HHH-14312 - entity graph is ignored for 'padded' and 'dynamic' batch style entity loader

- Adjusted multi-loading to use LoadPlans (and apply entity-graphs) as well.
- All of the loaders/builder in `org.hibernate.loader.entity` are now no longer used, superseded by `org.hibernate.loader.entity.plan`.  Removed no longer needed code.
- Adjusted multi-load tests that relied on the actual generated SQL *String* - they now explicitly count the number of parameters and use that for assertions
This commit is contained in:
Steve Ebersole 2020-11-11 10:40:59 -06:00
parent 0b2fb4e28b
commit b296459851
13 changed files with 568 additions and 938 deletions

View File

@ -13,11 +13,9 @@ import java.util.List;
import org.hibernate.HibernateException;
import org.hibernate.LockOptions;
import org.hibernate.engine.spi.LoadQueryInfluencers;
import org.hibernate.engine.spi.QueryParameters;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.loader.OuterJoinLoader;
import org.hibernate.param.ParameterBinder;
import org.hibernate.persister.entity.OuterJoinLoadable;
import org.hibernate.transform.ResultTransformer;
import org.hibernate.type.Type;

View File

@ -1,142 +0,0 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.loader.entity;
import java.io.Serializable;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.List;
import org.hibernate.HibernateException;
import org.hibernate.LockOptions;
import org.hibernate.engine.internal.BatchFetchQueueHelper;
import org.hibernate.engine.spi.QueryParameters;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.loader.Loader;
import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.pretty.MessageHelper;
import org.hibernate.type.Type;
import org.jboss.logging.Logger;
/**
* The base contract for loaders capable of performing batch-fetch loading of entities using multiple primary key
* values in the SQL <tt>WHERE</tt> clause.
*
* @author Gavin King
* @author Steve Ebersole
*
* @see BatchingEntityLoaderBuilder
* @see UniqueEntityLoader
*/
public abstract class BatchingEntityLoader implements UniqueEntityLoader {
private static final Logger log = Logger.getLogger( BatchingEntityLoader.class );
private final EntityPersister persister;
public BatchingEntityLoader(EntityPersister persister) {
this.persister = persister;
}
public EntityPersister persister() {
return persister;
}
@Override
public Object load(Serializable id, Object optionalObject, SharedSessionContractImplementor session) {
return load( id, optionalObject, session, LockOptions.NONE );
}
@Override
public Object load(
Serializable id,
Object optionalObject,
SharedSessionContractImplementor session,
LockOptions lockOptions,
Boolean readOnly) {
return load( id, optionalObject, session, lockOptions, readOnly );
}
@Override
public Object load(Serializable id, Object optionalObject, SharedSessionContractImplementor session, Boolean readOnly) {
return load( id, optionalObject, session, LockOptions.NONE, readOnly );
}
protected QueryParameters buildQueryParameters(
Serializable id,
Serializable[] ids,
Object optionalObject,
LockOptions lockOptions,
Boolean readOnly) {
Type[] types = new Type[ids.length];
Arrays.fill( types, persister().getIdentifierType() );
QueryParameters qp = new QueryParameters();
qp.setPositionalParameterTypes( types );
qp.setPositionalParameterValues( ids );
qp.setOptionalObject( optionalObject );
qp.setOptionalEntityName( persister().getEntityName() );
qp.setOptionalId( id );
qp.setLockOptions( lockOptions );
if ( readOnly != null ) {
qp.setReadOnly( readOnly );
}
return qp;
}
protected Object getObjectFromList(List results, Serializable id, SharedSessionContractImplementor session) {
for ( Object obj : results ) {
final boolean equal = persister.getIdentifierType().isEqual(
id,
session.getContextEntityIdentifier( obj ),
session.getFactory()
);
if ( equal ) {
return obj;
}
}
return null;
}
protected Object doBatchLoad(
Serializable id,
Loader loaderToUse,
SharedSessionContractImplementor session,
Serializable[] ids,
Object optionalObject,
LockOptions lockOptions,
Boolean readOnly) {
if ( log.isDebugEnabled() ) {
log.debugf( "Batch loading entity: %s", MessageHelper.infoString( persister, ids, session.getFactory() ) );
}
QueryParameters qp = buildQueryParameters( id, ids, optionalObject, lockOptions, readOnly );
try {
final List results = loaderToUse.doQueryAndInitializeNonLazyCollections( session, qp, false );
log.debug( "Done entity batch load" );
// The EntityKey for any entity that is not found will remain in the batch.
// Explicitly remove the EntityKeys for entities that were not found to
// avoid including them in future batches that get executed.
BatchFetchQueueHelper.removeNotFoundBatchLoadableEntityKeys(
ids,
results,
persister(),
session
);
return getObjectFromList(results, id, session);
}
catch ( SQLException sqle ) {
throw session.getJdbcServices().getSqlExceptionHelper().convert(
sqle,
"could not load an entity batch: " + MessageHelper.infoString( persister(), ids, session.getFactory() ),
loaderToUse.getSQLString()
);
}
}
}

View File

@ -27,16 +27,12 @@ import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.engine.spi.Status;
import org.hibernate.event.internal.AbstractLockUpgradeEventListener;
import org.hibernate.event.service.spi.EventListenerRegistry;
import org.hibernate.event.spi.EventSource;
import org.hibernate.event.spi.EventType;
import org.hibernate.event.spi.LoadEvent;
import org.hibernate.event.spi.LoadEventListener;
import org.hibernate.event.spi.PostLoadEvent;
import org.hibernate.event.spi.PostLoadEventListener;
import org.hibernate.internal.CoreLogging;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.FastSessionServices;
import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.pretty.MessageHelper;
import org.hibernate.proxy.HibernateProxy;

View File

@ -1,589 +0,0 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.loader.entity;
import java.io.Serializable;
import java.lang.reflect.Array;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.hibernate.LockMode;
import org.hibernate.LockOptions;
import org.hibernate.dialect.pagination.LimitHelper;
import org.hibernate.engine.internal.BatchFetchQueueHelper;
import org.hibernate.engine.jdbc.spi.JdbcCoordinator;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.engine.spi.EntityEntry;
import org.hibernate.engine.spi.EntityKey;
import org.hibernate.engine.spi.LoadQueryInfluencers;
import org.hibernate.engine.spi.PersistenceContext;
import org.hibernate.engine.spi.QueryParameters;
import org.hibernate.engine.spi.RowSelection;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.engine.spi.Status;
import org.hibernate.event.spi.EventSource;
import org.hibernate.event.spi.LoadEvent;
import org.hibernate.event.spi.LoadEventListener;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.internal.util.collections.ArrayHelper;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.loader.spi.AfterLoadAction;
import org.hibernate.persister.entity.MultiLoadOptions;
import org.hibernate.persister.entity.OuterJoinLoadable;
import org.hibernate.pretty.MessageHelper;
import org.hibernate.type.Type;
import org.jboss.logging.Logger;
/**
* A BatchingEntityLoaderBuilder that builds UniqueEntityLoader instances capable of dynamically building
* its batch-fetch SQL based on the actual number of entity ids waiting to be fetched.
*
* @author Steve Ebersole
*/
public class DynamicBatchingEntityLoaderBuilder extends BatchingEntityLoaderBuilder {
private static final Logger log = Logger.getLogger( DynamicBatchingEntityLoaderBuilder.class );
public static final DynamicBatchingEntityLoaderBuilder INSTANCE = new DynamicBatchingEntityLoaderBuilder();
public List multiLoad(
OuterJoinLoadable persister,
Serializable[] ids,
SharedSessionContractImplementor session,
MultiLoadOptions loadOptions) {
if ( loadOptions.isOrderReturnEnabled() ) {
return performOrderedMultiLoad( persister, ids, session, loadOptions );
}
else {
return performUnorderedMultiLoad( persister, ids, session, loadOptions );
}
}
@SuppressWarnings("unchecked")
private List performOrderedMultiLoad(
OuterJoinLoadable persister,
Serializable[] ids,
SharedSessionContractImplementor session,
MultiLoadOptions loadOptions) {
assert loadOptions.isOrderReturnEnabled();
final List result = CollectionHelper.arrayList( ids.length );
final LockOptions lockOptions = (loadOptions.getLockOptions() == null)
? new LockOptions( LockMode.NONE )
: loadOptions.getLockOptions();
final int maxBatchSize;
if ( loadOptions.getBatchSize() != null && loadOptions.getBatchSize() > 0 ) {
maxBatchSize = loadOptions.getBatchSize();
}
else {
maxBatchSize = session.getJdbcServices().getJdbcEnvironment().getDialect().getDefaultBatchLoadSizingStrategy().determineOptimalBatchLoadSize(
persister.getIdentifierType().getColumnSpan( session.getFactory() ),
ids.length
);
}
final List<Serializable> idsInBatch = new ArrayList<>();
final List<Integer> elementPositionsLoadedByBatch = new ArrayList<>();
for ( int i = 0; i < ids.length; i++ ) {
final Serializable id = ids[i];
final EntityKey entityKey = new EntityKey( id, persister );
if ( loadOptions.isSessionCheckingEnabled() || loadOptions.isSecondLevelCacheCheckingEnabled() ) {
LoadEvent loadEvent = new LoadEvent(
id,
persister.getMappedClass().getName(),
lockOptions,
(EventSource) session,
null
);
Object managedEntity = null;
if ( loadOptions.isSessionCheckingEnabled() ) {
// look for it in the Session first
CacheEntityLoaderHelper.PersistenceContextEntry persistenceContextEntry = CacheEntityLoaderHelper.INSTANCE
.loadFromSessionCache(
loadEvent,
entityKey,
LoadEventListener.GET
);
managedEntity = persistenceContextEntry.getEntity();
if ( managedEntity != null && !loadOptions.isReturnOfDeletedEntitiesEnabled() && !persistenceContextEntry
.isManaged() ) {
// put a null in the result
result.add( i, null );
continue;
}
}
if ( managedEntity == null && loadOptions.isSecondLevelCacheCheckingEnabled() ) {
// look for it in the SessionFactory
managedEntity = CacheEntityLoaderHelper.INSTANCE.loadFromSecondLevelCache(
loadEvent,
persister,
entityKey
);
}
if ( managedEntity != null ) {
result.add( i, managedEntity );
continue;
}
}
// if we did not hit any of the continues above, then we need to batch
// load the entity state.
idsInBatch.add( ids[i] );
if ( idsInBatch.size() >= maxBatchSize ) {
performOrderedBatchLoad( idsInBatch, lockOptions, persister, session );
}
// Save the EntityKey instance for use later!
result.add( i, entityKey );
elementPositionsLoadedByBatch.add( i );
}
if ( !idsInBatch.isEmpty() ) {
performOrderedBatchLoad( idsInBatch, lockOptions, persister, session );
}
final PersistenceContext persistenceContext = session.getPersistenceContextInternal();
for ( Integer position : elementPositionsLoadedByBatch ) {
// the element value at this position in the result List should be
// the EntityKey for that entity; reuse it!
final EntityKey entityKey = (EntityKey) result.get( position );
Object entity = persistenceContext.getEntity( entityKey );
if ( entity != null && !loadOptions.isReturnOfDeletedEntitiesEnabled() ) {
// make sure it is not DELETED
final EntityEntry entry = persistenceContext.getEntry( entity );
if ( entry.getStatus() == Status.DELETED || entry.getStatus() == Status.GONE ) {
// the entity is locally deleted, and the options ask that we not return such entities...
entity = null;
}
}
result.set( position, entity );
}
return result;
}
private void performOrderedBatchLoad(
List<Serializable> idsInBatch,
LockOptions lockOptions,
OuterJoinLoadable persister,
SharedSessionContractImplementor session) {
final int batchSize = idsInBatch.size();
final DynamicEntityLoader batchingLoader = new DynamicEntityLoader(
persister,
batchSize,
lockOptions,
session.getFactory(),
session.getLoadQueryInfluencers()
);
final Serializable[] idsInBatchArray = idsInBatch.toArray( new Serializable[ idsInBatch.size() ] );
QueryParameters qp = buildMultiLoadQueryParameters( persister, idsInBatchArray, lockOptions );
batchingLoader.doEntityBatchFetch( session, qp, idsInBatchArray );
idsInBatch.clear();
}
@SuppressWarnings("unchecked")
protected List performUnorderedMultiLoad(
OuterJoinLoadable persister,
Serializable[] ids,
SharedSessionContractImplementor session,
MultiLoadOptions loadOptions) {
assert !loadOptions.isOrderReturnEnabled();
final List result = CollectionHelper.arrayList( ids.length );
final LockOptions lockOptions = (loadOptions.getLockOptions() == null)
? new LockOptions( LockMode.NONE )
: loadOptions.getLockOptions();
if ( loadOptions.isSessionCheckingEnabled() || loadOptions.isSecondLevelCacheCheckingEnabled() ) {
// the user requested that we exclude ids corresponding to already managed
// entities from the generated load SQL. So here we will iterate all
// incoming id values and see whether it corresponds to an existing
// entity associated with the PC - if it does we add it to the result
// list immediately and remove its id from the group of ids to load.
boolean foundAnyManagedEntities = false;
final List<Serializable> nonManagedIds = new ArrayList<Serializable>();
for ( Serializable id : ids ) {
final EntityKey entityKey = new EntityKey( id, persister );
LoadEvent loadEvent = new LoadEvent(
id,
persister.getMappedClass().getName(),
lockOptions,
(EventSource) session,
null
);
Object managedEntity = null;
// look for it in the Session first
CacheEntityLoaderHelper.PersistenceContextEntry persistenceContextEntry = CacheEntityLoaderHelper.INSTANCE
.loadFromSessionCache(
loadEvent,
entityKey,
LoadEventListener.GET
);
if ( loadOptions.isSessionCheckingEnabled() ) {
managedEntity = persistenceContextEntry.getEntity();
if ( managedEntity != null && !loadOptions.isReturnOfDeletedEntitiesEnabled() && !persistenceContextEntry
.isManaged() ) {
foundAnyManagedEntities = true;
result.add( null );
continue;
}
}
if ( managedEntity == null && loadOptions.isSecondLevelCacheCheckingEnabled() ) {
managedEntity = CacheEntityLoaderHelper.INSTANCE.loadFromSecondLevelCache(
loadEvent,
persister,
entityKey
);
}
if ( managedEntity != null ) {
foundAnyManagedEntities = true;
result.add( managedEntity );
}
else {
nonManagedIds.add( id );
}
}
if ( foundAnyManagedEntities ) {
if ( nonManagedIds.isEmpty() ) {
// all of the given ids were already associated with the Session
return result;
}
else {
// over-write the ids to be loaded with the collection of
// just non-managed ones
ids = nonManagedIds.toArray(
(Serializable[]) Array.newInstance(
ids.getClass().getComponentType(),
nonManagedIds.size()
)
);
}
}
}
int numberOfIdsLeft = ids.length;
final int maxBatchSize;
if ( loadOptions.getBatchSize() != null && loadOptions.getBatchSize() > 0 ) {
maxBatchSize = loadOptions.getBatchSize();
}
else {
maxBatchSize = session.getJdbcServices().getJdbcEnvironment().getDialect().getDefaultBatchLoadSizingStrategy().determineOptimalBatchLoadSize(
persister.getIdentifierType().getColumnSpan( session.getFactory() ),
numberOfIdsLeft
);
}
int idPosition = 0;
while ( numberOfIdsLeft > 0 ) {
int batchSize = Math.min( numberOfIdsLeft, maxBatchSize );
final DynamicEntityLoader batchingLoader = new DynamicEntityLoader(
persister,
batchSize,
lockOptions,
session.getFactory(),
session.getLoadQueryInfluencers()
);
Serializable[] idsInBatch = new Serializable[batchSize];
System.arraycopy( ids, idPosition, idsInBatch, 0, batchSize );
QueryParameters qp = buildMultiLoadQueryParameters( persister, idsInBatch, lockOptions );
result.addAll( batchingLoader.doEntityBatchFetch( session, qp, idsInBatch ) );
numberOfIdsLeft = numberOfIdsLeft - batchSize;
idPosition += batchSize;
}
return result;
}
public static QueryParameters buildMultiLoadQueryParameters(
OuterJoinLoadable persister,
Serializable[] ids,
LockOptions lockOptions) {
Type[] types = new Type[ids.length];
Arrays.fill( types, persister.getIdentifierType() );
QueryParameters qp = new QueryParameters();
qp.setOptionalEntityName( persister.getEntityName() );
qp.setPositionalParameterTypes( types );
qp.setPositionalParameterValues( ids );
qp.setLockOptions( lockOptions );
qp.setOptionalObject( null );
qp.setOptionalId( null );
return qp;
}
@Override
protected UniqueEntityLoader buildBatchingLoader(
OuterJoinLoadable persister,
int batchSize,
LockMode lockMode,
SessionFactoryImplementor factory,
LoadQueryInfluencers influencers) {
return new DynamicBatchingEntityLoader( persister, batchSize, lockMode, factory, influencers );
}
@Override
protected UniqueEntityLoader buildBatchingLoader(
OuterJoinLoadable persister,
int batchSize,
LockOptions lockOptions,
SessionFactoryImplementor factory,
LoadQueryInfluencers influencers) {
return new DynamicBatchingEntityLoader( persister, batchSize, lockOptions, factory, influencers );
}
public static class DynamicBatchingEntityLoader extends BatchingEntityLoader {
private final int maxBatchSize;
private final UniqueEntityLoader singleKeyLoader;
private final DynamicEntityLoader dynamicLoader;
public DynamicBatchingEntityLoader(
OuterJoinLoadable persister,
int maxBatchSize,
LockMode lockMode,
SessionFactoryImplementor factory,
LoadQueryInfluencers loadQueryInfluencers) {
super( persister );
this.maxBatchSize = maxBatchSize;
this.singleKeyLoader = new EntityLoader( persister, 1, lockMode, factory, loadQueryInfluencers );
this.dynamicLoader = new DynamicEntityLoader( persister, maxBatchSize, lockMode, factory, loadQueryInfluencers );
}
public DynamicBatchingEntityLoader(
OuterJoinLoadable persister,
int maxBatchSize,
LockOptions lockOptions,
SessionFactoryImplementor factory,
LoadQueryInfluencers loadQueryInfluencers) {
super( persister );
this.maxBatchSize = maxBatchSize;
this.singleKeyLoader = new EntityLoader( persister, 1, lockOptions, factory, loadQueryInfluencers );
this.dynamicLoader = new DynamicEntityLoader( persister, maxBatchSize, lockOptions, factory, loadQueryInfluencers );
}
@Override
public Object load(
Serializable id,
Object optionalObject,
SharedSessionContractImplementor session,
LockOptions lockOptions) {
return load (id, optionalObject, session, lockOptions, null );
}
@Override
public Object load(
Serializable id,
Object optionalObject,
SharedSessionContractImplementor session,
LockOptions lockOptions,
Boolean readOnly) {
final Serializable[] batch = session.getPersistenceContextInternal()
.getBatchFetchQueue()
.getEntityBatch( persister(), id, maxBatchSize, persister().getEntityMode() );
final int numberOfIds = ArrayHelper.countNonNull( batch );
if ( numberOfIds <= 1 ) {
final Object result = singleKeyLoader.load( id, optionalObject, session );
if ( result == null ) {
// There was no entity with the specified ID. Make sure the EntityKey does not remain
// in the batch to avoid including it in future batches that get executed.
BatchFetchQueueHelper.removeBatchLoadableEntityKey( id, persister(), session );
}
return result;
}
final Serializable[] idsToLoad = new Serializable[numberOfIds];
System.arraycopy( batch, 0, idsToLoad, 0, numberOfIds );
if ( log.isDebugEnabled() ) {
log.debugf( "Batch loading entity: %s", MessageHelper.infoString( persister(), idsToLoad, session.getFactory() ) );
}
QueryParameters qp = buildQueryParameters( id, idsToLoad, optionalObject, lockOptions, readOnly );
List results = dynamicLoader.doEntityBatchFetch( session, qp, idsToLoad );
// The EntityKey for any entity that is not found will remain in the batch.
// Explicitly remove the EntityKeys for entities that were not found to
// avoid including them in future batches that get executed.
BatchFetchQueueHelper.removeNotFoundBatchLoadableEntityKeys( idsToLoad, results, persister(), session );
return getObjectFromList( results, id, session );
}
}
private static class DynamicEntityLoader extends EntityLoader {
// todo : see the discussion on org.hibernate.loader.collection.DynamicBatchingCollectionInitializerBuilder.DynamicBatchingCollectionLoader
private final String sqlTemplate;
private final String alias;
public DynamicEntityLoader(
OuterJoinLoadable persister,
int maxBatchSize,
LockOptions lockOptions,
SessionFactoryImplementor factory,
LoadQueryInfluencers loadQueryInfluencers) {
this( persister, maxBatchSize, lockOptions.getLockMode(), factory, loadQueryInfluencers );
}
public DynamicEntityLoader(
OuterJoinLoadable persister,
int maxBatchSize,
LockMode lockMode,
SessionFactoryImplementor factory,
LoadQueryInfluencers loadQueryInfluencers) {
super( persister, -1, lockMode, factory, loadQueryInfluencers );
EntityJoinWalker walker = new EntityJoinWalker(
persister,
persister.getIdentifierColumnNames(),
-1,
lockMode,
factory,
loadQueryInfluencers) {
@Override
protected StringBuilder whereString(String alias, String[] columnNames, int batchSize) {
return StringHelper.buildBatchFetchRestrictionFragment(
alias,
columnNames,
getFactory().getDialect()
);
}
};
initFromWalker( walker );
this.sqlTemplate = walker.getSQLString();
this.alias = walker.getAlias();
postInstantiate();
if ( LOG.isDebugEnabled() ) {
LOG.debugf(
"SQL-template for dynamic entity [%s] batch-fetching [%s] : %s",
entityName,
lockMode,
sqlTemplate
);
}
}
@Override
protected boolean isSingleRowLoader() {
return false;
}
@Override
protected boolean isSubselectLoadingEnabled() {
return persister.hasSubselectLoadableCollections();
}
public List doEntityBatchFetch(
SharedSessionContractImplementor session,
QueryParameters queryParameters,
Serializable[] ids) {
final JdbcServices jdbcServices = session.getJdbcServices();
final String sql = StringHelper.expandBatchIdPlaceholder(
sqlTemplate,
ids,
alias,
persister.getKeyColumnNames(),
jdbcServices.getJdbcEnvironment().getDialect()
);
try {
final PersistenceContext persistenceContext = session.getPersistenceContextInternal();
boolean defaultReadOnlyOrig = persistenceContext.isDefaultReadOnly();
if ( queryParameters.isReadOnlyInitialized() ) {
// The read-only/modifiable mode for the query was explicitly set.
// Temporarily set the default read-only/modifiable setting to the query's setting.
persistenceContext.setDefaultReadOnly( queryParameters.isReadOnly() );
}
else {
// The read-only/modifiable setting for the query was not initialized.
// Use the default read-only/modifiable from the persistence context instead.
queryParameters.setReadOnly( persistenceContext.isDefaultReadOnly() );
}
persistenceContext.beforeLoad();
List results;
try {
try {
results = doTheLoad( sql, queryParameters, session );
}
finally {
persistenceContext.afterLoad();
}
persistenceContext.initializeNonLazyCollections();
log.debug( "Done batch load" );
return results;
}
finally {
// Restore the original default
persistenceContext.setDefaultReadOnly( defaultReadOnlyOrig );
}
}
catch ( SQLException sqle ) {
throw jdbcServices.getSqlExceptionHelper().convert(
sqle,
"could not load an entity batch: " + MessageHelper.infoString(
getEntityPersisters()[0],
ids,
session.getFactory()
),
sql
);
}
}
private List doTheLoad(String sql, QueryParameters queryParameters, SharedSessionContractImplementor session) throws SQLException {
final RowSelection selection = queryParameters.getRowSelection();
final int maxRows = LimitHelper.hasMaxRows( selection ) ?
selection.getMaxRows() :
Integer.MAX_VALUE;
final List<AfterLoadAction> afterLoadActions = new ArrayList<>();
final SqlStatementWrapper wrapper = executeQueryStatement( sql, queryParameters, false, afterLoadActions, session );
final ResultSet rs = wrapper.getResultSet();
final Statement st = wrapper.getStatement();
try {
return processResultSet( rs, queryParameters, session, false, null, maxRows, afterLoadActions );
}
finally {
final JdbcCoordinator jdbcCoordinator = session.getJdbcCoordinator();
jdbcCoordinator.getLogicalConnection().getResourceRegistry().release( st );
jdbcCoordinator.afterStatementExecution();
}
}
}
}

View File

@ -153,6 +153,10 @@ public abstract class AbstractLoadPlanBasedEntityLoader extends AbstractLoadPlan
);
}
public OuterJoinLoadable getEntityPersister() {
return entityPersister;
}
@Override
protected LoadQueryDetails getStaticLoadQuery() {
return staticLoadQuery;
@ -162,6 +166,25 @@ public abstract class AbstractLoadPlanBasedEntityLoader extends AbstractLoadPlan
return entityName;
}
public List<?> loadEntityBatch(
Serializable[] idsInBatch,
OuterJoinLoadable persister,
LockOptions lockOptions,
SharedSessionContractImplementor session) {
final Type idType = persister.getIdentifierType();
return loadEntityBatch(
session,
idsInBatch,
persister.getIdentifierType(),
null,
null,
null,
persister,
lockOptions
);
}
/**
* Called by wrappers that batch load entities
* @param persister only needed for logging

View File

@ -21,7 +21,7 @@ import org.hibernate.pretty.MessageHelper;
import org.jboss.logging.Logger;
/**
* @author Steve Ebersole
* Batching entity loader using dynamic where-clause
*/
public class DynamicBatchingEntityLoader extends BatchingEntityLoader {
private static final Logger log = Logger.getLogger( DynamicBatchingEntityLoader.class );

View File

@ -14,7 +14,9 @@ import org.hibernate.loader.entity.UniqueEntityLoader;
import org.hibernate.persister.entity.OuterJoinLoadable;
/**
* @author Steve Ebersole
* A walking/plan based BatchingEntityLoaderBuilder that builds entity-loader instances
* capable of dynamically building its batch-fetch SQL based on the actual number of
* entity ids waiting to be batch fetched.
*/
public class DynamicBatchingEntityLoaderBuilder extends AbstractBatchingEntityLoaderBuilder {
/**

View File

@ -17,19 +17,11 @@ import org.hibernate.loader.plan.exec.query.internal.QueryBuildingParametersImpl
import org.hibernate.loader.plan.exec.query.spi.QueryBuildingParameters;
import org.hibernate.persister.entity.OuterJoinLoadable;
import org.hibernate.type.Type;
import org.jboss.logging.Logger;
/**
* UniqueEntityLoader implementation that is the main functionality for LoadPlan-based Entity loading.
* <p/>
* Can handle batch-loading as well as non-pk, unique-key loading,
* <p/>
* Much is ultimately delegated to its superclass, AbstractLoadPlanBasedEntityLoader. However:
*
* Loads an entity instance using outerjoin fetching to fetch associated entities.
* <br>
* The <tt>EntityPersister</tt> must implement <tt>Loadable</tt>. For other entities,
* create a customized subclass of <tt>Loader</tt>.
*
* @author Gavin King
* @author Steve Ebersole
@ -176,4 +168,5 @@ public class EntityLoader extends AbstractLoadPlanBasedEntityLoader {
protected EntityLoadQueryDetails getStaticLoadQuery() {
return (EntityLoadQueryDetails) super.getStaticLoadQuery();
}
}

View File

@ -0,0 +1,325 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.loader.entity.plan;
import java.io.Serializable;
import java.lang.reflect.Array;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.hibernate.LockMode;
import org.hibernate.LockOptions;
import org.hibernate.engine.spi.EntityEntry;
import org.hibernate.engine.spi.EntityKey;
import org.hibernate.engine.spi.PersistenceContext;
import org.hibernate.engine.spi.QueryParameters;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.engine.spi.Status;
import org.hibernate.event.spi.EventSource;
import org.hibernate.event.spi.LoadEvent;
import org.hibernate.event.spi.LoadEventListener;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.loader.entity.CacheEntityLoaderHelper;
import org.hibernate.persister.entity.MultiLoadOptions;
import org.hibernate.persister.entity.OuterJoinLoadable;
import org.hibernate.type.Type;
/**
* @author Steve Ebersole
*/
public class MultiEntityLoadingSupport {
public static List<?> multiLoad(
OuterJoinLoadable persister,
Serializable[] ids,
SharedSessionContractImplementor session,
MultiLoadOptions loadOptions) {
if ( loadOptions.isOrderReturnEnabled() ) {
return performOrderedMultiLoad( persister, ids, session, loadOptions );
}
else {
return performUnorderedMultiLoad( persister, ids, session, loadOptions );
}
}
@SuppressWarnings("unchecked")
private static List performOrderedMultiLoad(
OuterJoinLoadable persister,
Serializable[] ids,
SharedSessionContractImplementor session,
MultiLoadOptions loadOptions) {
assert loadOptions.isOrderReturnEnabled();
final List result = CollectionHelper.arrayList( ids.length );
final LockOptions lockOptions = (loadOptions.getLockOptions() == null)
? new LockOptions( LockMode.NONE )
: loadOptions.getLockOptions();
final int maxBatchSize;
if ( loadOptions.getBatchSize() != null && loadOptions.getBatchSize() > 0 ) {
maxBatchSize = loadOptions.getBatchSize();
}
else {
maxBatchSize = session.getJdbcServices().getJdbcEnvironment().getDialect().getDefaultBatchLoadSizingStrategy().determineOptimalBatchLoadSize(
persister.getIdentifierType().getColumnSpan( session.getFactory() ),
ids.length
);
}
final List<Serializable> idsInBatch = new ArrayList<>();
final List<Integer> elementPositionsLoadedByBatch = new ArrayList<>();
for ( int i = 0; i < ids.length; i++ ) {
final Serializable id = ids[i];
final EntityKey entityKey = new EntityKey( id, persister );
if ( loadOptions.isSessionCheckingEnabled() || loadOptions.isSecondLevelCacheCheckingEnabled() ) {
LoadEvent loadEvent = new LoadEvent(
id,
persister.getMappedClass().getName(),
lockOptions,
(EventSource) session,
null
);
Object managedEntity = null;
if ( loadOptions.isSessionCheckingEnabled() ) {
// look for it in the Session first
CacheEntityLoaderHelper.PersistenceContextEntry persistenceContextEntry = CacheEntityLoaderHelper.INSTANCE
.loadFromSessionCache(
loadEvent,
entityKey,
LoadEventListener.GET
);
managedEntity = persistenceContextEntry.getEntity();
if ( managedEntity != null && !loadOptions.isReturnOfDeletedEntitiesEnabled() && !persistenceContextEntry
.isManaged() ) {
// put a null in the result
result.add( i, null );
continue;
}
}
if ( managedEntity == null && loadOptions.isSecondLevelCacheCheckingEnabled() ) {
// look for it in the SessionFactory
managedEntity = CacheEntityLoaderHelper.INSTANCE.loadFromSecondLevelCache(
loadEvent,
persister,
entityKey
);
}
if ( managedEntity != null ) {
result.add( i, managedEntity );
continue;
}
}
// if we did not hit any of the continues above, then we need to batch
// load the entity state.
idsInBatch.add( ids[i] );
if ( idsInBatch.size() >= maxBatchSize ) {
performOrderedBatchLoad( idsInBatch, lockOptions, persister, session );
}
// Save the EntityKey instance for use later!
result.add( i, entityKey );
elementPositionsLoadedByBatch.add( i );
}
if ( !idsInBatch.isEmpty() ) {
performOrderedBatchLoad( idsInBatch, lockOptions, persister, session );
}
final PersistenceContext persistenceContext = session.getPersistenceContextInternal();
for ( Integer position : elementPositionsLoadedByBatch ) {
// the element value at this position in the result List should be
// the EntityKey for that entity; reuse it!
final EntityKey entityKey = (EntityKey) result.get( position );
Object entity = persistenceContext.getEntity( entityKey );
if ( entity != null && !loadOptions.isReturnOfDeletedEntitiesEnabled() ) {
// make sure it is not DELETED
final EntityEntry entry = persistenceContext.getEntry( entity );
if ( entry.getStatus() == Status.DELETED || entry.getStatus() == Status.GONE ) {
// the entity is locally deleted, and the options ask that we not return such entities...
entity = null;
}
}
result.set( position, entity );
}
return result;
}
private static void performOrderedBatchLoad(
List<Serializable> idsInBatch,
LockOptions lockOptions,
OuterJoinLoadable persister,
SharedSessionContractImplementor session) {
final EntityLoader entityLoader = EntityLoader.forEntity( persister )
.withInfluencers( session.getLoadQueryInfluencers() )
.withLockOptions( lockOptions )
.withBatchSize( idsInBatch.size() ).byPrimaryKey();
entityLoader.loadEntityBatch(
idsInBatch.toArray( new Serializable[0] ),
persister,
lockOptions,
session
);
idsInBatch.clear();
}
@SuppressWarnings("unchecked")
protected static List performUnorderedMultiLoad(
OuterJoinLoadable persister,
Serializable[] ids,
SharedSessionContractImplementor session,
MultiLoadOptions loadOptions) {
assert !loadOptions.isOrderReturnEnabled();
final List result = CollectionHelper.arrayList( ids.length );
final LockOptions lockOptions = (loadOptions.getLockOptions() == null)
? new LockOptions( LockMode.NONE )
: loadOptions.getLockOptions();
if ( loadOptions.isSessionCheckingEnabled() || loadOptions.isSecondLevelCacheCheckingEnabled() ) {
// the user requested that we exclude ids corresponding to already managed
// entities from the generated load SQL. So here we will iterate all
// incoming id values and see whether it corresponds to an existing
// entity associated with the PC - if it does we add it to the result
// list immediately and remove its id from the group of ids to load.
boolean foundAnyManagedEntities = false;
final List<Serializable> nonManagedIds = new ArrayList<Serializable>();
for ( Serializable id : ids ) {
final EntityKey entityKey = new EntityKey( id, persister );
LoadEvent loadEvent = new LoadEvent(
id,
persister.getMappedClass().getName(),
lockOptions,
(EventSource) session,
null
);
Object managedEntity = null;
// look for it in the Session first
CacheEntityLoaderHelper.PersistenceContextEntry persistenceContextEntry = CacheEntityLoaderHelper.INSTANCE
.loadFromSessionCache(
loadEvent,
entityKey,
LoadEventListener.GET
);
if ( loadOptions.isSessionCheckingEnabled() ) {
managedEntity = persistenceContextEntry.getEntity();
if ( managedEntity != null && !loadOptions.isReturnOfDeletedEntitiesEnabled() && !persistenceContextEntry
.isManaged() ) {
foundAnyManagedEntities = true;
result.add( null );
continue;
}
}
if ( managedEntity == null && loadOptions.isSecondLevelCacheCheckingEnabled() ) {
managedEntity = CacheEntityLoaderHelper.INSTANCE.loadFromSecondLevelCache(
loadEvent,
persister,
entityKey
);
}
if ( managedEntity != null ) {
foundAnyManagedEntities = true;
result.add( managedEntity );
}
else {
nonManagedIds.add( id );
}
}
if ( foundAnyManagedEntities ) {
if ( nonManagedIds.isEmpty() ) {
// all of the given ids were already associated with the Session
return result;
}
else {
// over-write the ids to be loaded with the collection of
// just non-managed ones
ids = nonManagedIds.toArray(
(Serializable[]) Array.newInstance(
ids.getClass().getComponentType(),
nonManagedIds.size()
)
);
}
}
}
int numberOfIdsLeft = ids.length;
final int maxBatchSize;
if ( loadOptions.getBatchSize() != null && loadOptions.getBatchSize() > 0 ) {
maxBatchSize = loadOptions.getBatchSize();
}
else {
maxBatchSize = session.getJdbcServices().getJdbcEnvironment().getDialect().getDefaultBatchLoadSizingStrategy().determineOptimalBatchLoadSize(
persister.getIdentifierType().getColumnSpan( session.getFactory() ),
numberOfIdsLeft
);
}
int idPosition = 0;
while ( numberOfIdsLeft > 0 ) {
int batchSize = Math.min( numberOfIdsLeft, maxBatchSize );
final EntityLoader entityLoader = EntityLoader.forEntity( persister )
.withInfluencers( session.getLoadQueryInfluencers() )
.withLockOptions( lockOptions )
.withBatchSize( batchSize ).byPrimaryKey();
Serializable[] idsInBatch = new Serializable[batchSize];
System.arraycopy( ids, idPosition, idsInBatch, 0, batchSize );
final List<?> batchResults = entityLoader.loadEntityBatch(
idsInBatch,
persister,
lockOptions,
session
);
result.addAll( batchResults );
numberOfIdsLeft = numberOfIdsLeft - batchSize;
idPosition += batchSize;
}
return result;
}
public static QueryParameters buildMultiLoadQueryParameters(
OuterJoinLoadable persister,
Serializable[] ids,
LockOptions lockOptions) {
Type[] types = new Type[ids.length];
Arrays.fill( types, persister.getIdentifierType() );
QueryParameters qp = new QueryParameters();
qp.setOptionalEntityName( persister.getEntityName() );
qp.setPositionalParameterTypes( types );
qp.setPositionalParameterValues( ids );
qp.setLockOptions( lockOptions );
qp.setOptionalObject( null );
qp.setOptionalId( null );
return qp;
}
}

View File

@ -20,7 +20,7 @@ import org.hibernate.loader.entity.UniqueEntityLoader;
import org.hibernate.persister.entity.OuterJoinLoadable;
/**
* @author Steve Ebersole
* Batching entity loader using padded where-clause
*/
public class PaddedBatchingEntityLoader extends BatchingEntityLoader {
private final int[] batchSizes;

View File

@ -14,7 +14,9 @@ import org.hibernate.loader.entity.UniqueEntityLoader;
import org.hibernate.persister.entity.OuterJoinLoadable;
/**
* @author Steve Ebersole
* A walking/plan based BatchingEntityLoaderBuilder that builds entity-loader instances
* building its batch-fetch SQL based on padding - using a set number of parameters, but
* setting "unneeded ones" to null.
*/
public class PaddedBatchingEntityLoaderBuilder extends AbstractBatchingEntityLoaderBuilder {
/**

View File

@ -104,9 +104,10 @@ import org.hibernate.loader.custom.sql.SQLQueryParser;
import org.hibernate.loader.entity.BatchingEntityLoaderBuilder;
import org.hibernate.loader.entity.CacheEntityLoaderHelper;
import org.hibernate.loader.entity.CascadeEntityLoader;
import org.hibernate.loader.entity.DynamicBatchingEntityLoaderBuilder;
import org.hibernate.loader.entity.plan.DynamicBatchingEntityLoaderBuilder;
import org.hibernate.loader.entity.EntityLoader;
import org.hibernate.loader.entity.UniqueEntityLoader;
import org.hibernate.loader.entity.plan.MultiEntityLoadingSupport;
import org.hibernate.mapping.Column;
import org.hibernate.mapping.Component;
import org.hibernate.mapping.Formula;
@ -4461,7 +4462,7 @@ public abstract class AbstractEntityPersister
@Override
public List multiLoad(Serializable[] ids, SharedSessionContractImplementor session, MultiLoadOptions loadOptions) {
return DynamicBatchingEntityLoaderBuilder.INSTANCE.multiLoad(
return MultiEntityLoadingSupport.multiLoad(
this,
ids,
session,

View File

@ -15,7 +15,6 @@ import javax.persistence.SharedCacheMode;
import javax.persistence.Table;
import org.hibernate.CacheMode;
import org.hibernate.Session;
import org.hibernate.annotations.BatchSize;
import org.hibernate.boot.MetadataBuilder;
import org.hibernate.boot.SessionFactoryBuilder;
@ -27,6 +26,7 @@ import org.hibernate.engine.spi.EntityKey;
import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.engine.spi.Status;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.stat.Statistics;
import org.hibernate.testing.TestForIssue;
@ -36,12 +36,13 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static org.hibernate.testing.transaction.TransactionUtil.doInHibernate;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
/**
@ -78,36 +79,36 @@ public class MultiLoadTest extends BaseNonConfigCoreFunctionalTestCase {
@Before
public void before() {
Session session = sessionFactory().openSession();
session.getTransaction().begin();
session.setCacheMode( CacheMode.IGNORE );
for ( int i = 1; i <= 60; i++ ) {
session.save( new SimpleEntity( i, "Entity #" + i ) );
}
session.getTransaction().commit();
session.close();
inTransaction(
session -> {
session.setCacheMode( CacheMode.IGNORE );
for ( int i = 1; i <= 60; i++ ) {
session.save( new SimpleEntity( i, "Entity #" + i ) );
}
}
);
}
@After
public void after() {
Session session = sessionFactory().openSession();
session.getTransaction().begin();
session.createQuery( "delete SimpleEntity" ).executeUpdate();
session.getTransaction().commit();
session.close();
inTransaction(
session -> {
session.createQuery( "delete SimpleEntity" ).executeUpdate();
}
);
}
@Test
public void testBasicMultiLoad() {
doInHibernate(
this::sessionFactory, session -> {
inTransaction(
session -> {
sqlStatementInterceptor.getSqlQueries().clear();
List<SimpleEntity> list = session.byMultipleIds( SimpleEntity.class ).multiLoad( ids( 5 ) );
assertEquals( 5, list.size() );
assertTrue( sqlStatementInterceptor.getSqlQueries().getFirst().endsWith( "id in (?,?,?,?,?)" ) );
final int paramCount = StringHelper.countUnquoted( sqlStatementInterceptor.getSqlQueries().getFirst(), '?' );
assertThat( paramCount, is( 5 ) );
}
);
}
@ -115,8 +116,8 @@ public class MultiLoadTest extends BaseNonConfigCoreFunctionalTestCase {
@Test
@TestForIssue( jiraKey = "HHH-10984" )
public void testUnflushedDeleteAndThenMultiLoad() {
doInHibernate(
this::sessionFactory, session -> {
inTransaction(
session -> {
// delete one of them (but do not flush)...
session.delete( session.load( SimpleEntity.class, 5 ) );
@ -138,8 +139,8 @@ public class MultiLoadTest extends BaseNonConfigCoreFunctionalTestCase {
@Test
@TestForIssue( jiraKey = "HHH-10617" )
public void testDuplicatedRequestedIds() {
doInHibernate(
this::sessionFactory, session -> {
inTransaction(
session -> {
// ordered multiLoad
List<SimpleEntity> list = session.byMultipleIds( SimpleEntity.class ).multiLoad( 1, 2, 3, 2, 2 );
assertEquals( 5, list.size() );
@ -156,8 +157,8 @@ public class MultiLoadTest extends BaseNonConfigCoreFunctionalTestCase {
@Test
@TestForIssue( jiraKey = "HHH-10617" )
public void testNonExistentIdRequest() {
doInHibernate(
this::sessionFactory, session -> {
inTransaction(
session -> {
// ordered multiLoad
List<SimpleEntity> list = session.byMultipleIds( SimpleEntity.class ).multiLoad( 1, 699, 2 );
assertEquals( 3, list.size() );
@ -172,258 +173,278 @@ public class MultiLoadTest extends BaseNonConfigCoreFunctionalTestCase {
@Test
public void testBasicMultiLoadWithManagedAndNoChecking() {
Session session = openSession();
session.getTransaction().begin();
SimpleEntity first = session.byId( SimpleEntity.class ).load( 1 );
List<SimpleEntity> list = session.byMultipleIds( SimpleEntity.class ).multiLoad( ids(56) );
assertEquals( 56, list.size() );
// this check is HIGHLY specific to implementation in the batch loader
// which puts existing managed entities first...
assertSame( first, list.get( 0 ) );
session.getTransaction().commit();
session.close();
inTransaction(
session -> {
SimpleEntity first = session.byId( SimpleEntity.class ).load( 1 );
List<SimpleEntity> list = session.byMultipleIds( SimpleEntity.class ).multiLoad( ids(56) );
assertEquals( 56, list.size() );
// this check is HIGHLY specific to implementation in the batch loader
// which puts existing managed entities first...
assertSame( first, list.get( 0 ) );
}
);
}
@Test
public void testBasicMultiLoadWithManagedAndChecking() {
Session session = openSession();
session.getTransaction().begin();
SimpleEntity first = session.byId( SimpleEntity.class ).load( 1 );
List<SimpleEntity> list = session.byMultipleIds( SimpleEntity.class ).enableSessionCheck( true ).multiLoad( ids(56) );
assertEquals( 56, list.size() );
// this check is HIGHLY specific to implementation in the batch loader
// which puts existing managed entities first...
assertSame( first, list.get( 0 ) );
session.getTransaction().commit();
session.close();
inTransaction(
session -> {
SimpleEntity first = session.byId( SimpleEntity.class ).load( 1 );
List<SimpleEntity> list = session.byMultipleIds( SimpleEntity.class ).enableSessionCheck( true ).multiLoad( ids(56) );
assertEquals( 56, list.size() );
// this check is HIGHLY specific to implementation in the batch loader
// which puts existing managed entities first...
assertSame( first, list.get( 0 ) );
}
);
}
@Test
@TestForIssue(jiraKey = "HHH-12944")
public void testMultiLoadFrom2ndLevelCache() {
Statistics statistics = sessionFactory().getStatistics();
sessionFactory().getCache().evictAll();
final Statistics statistics = sessionFactory().getStatistics();
statistics.clear();
doInHibernate( this::sessionFactory, session -> {
// Load 1 of the items directly
SimpleEntity entity = session.get( SimpleEntity.class, 2 );
assertNotNull( entity );
inTransaction(
session -> {
// Load 1 of the items directly
SimpleEntity entity = session.get( SimpleEntity.class, 2 );
assertNotNull( entity );
assertEquals( 1, statistics.getSecondLevelCacheMissCount() );
assertEquals( 0, statistics.getSecondLevelCacheHitCount() );
assertEquals( 1, statistics.getSecondLevelCachePutCount() );
assertTrue( session.getSessionFactory().getCache().containsEntity( SimpleEntity.class, 2 ) );
} );
assertEquals( 1, statistics.getSecondLevelCacheMissCount() );
assertEquals( 0, statistics.getSecondLevelCacheHitCount() );
assertEquals( 1, statistics.getSecondLevelCachePutCount() );
assertTrue( session.getSessionFactory().getCache().containsEntity( SimpleEntity.class, 2 ) );
}
);
statistics.clear();
doInHibernate( this::sessionFactory, session -> {
// Validate that the entity is still in the Level 2 cache
assertTrue( session.getSessionFactory().getCache().containsEntity( SimpleEntity.class, 2 ) );
inTransaction(
session -> {
// Validate that the entity is still in the Level 2 cache
assertTrue( session.getSessionFactory().getCache().containsEntity( SimpleEntity.class, 2 ) );
sqlStatementInterceptor.getSqlQueries().clear();
sqlStatementInterceptor.getSqlQueries().clear();
// Multiload 3 items and ensure that multiload pulls 2 from the database & 1 from the cache.
List<SimpleEntity> entities = session.byMultipleIds( SimpleEntity.class )
.with( CacheMode.NORMAL )
.enableSessionCheck( true )
.multiLoad( ids( 3 ) );
assertEquals( 3, entities.size() );
assertEquals( 1, statistics.getSecondLevelCacheHitCount() );
// Multiload 3 items and ensure that multiload pulls 2 from the database & 1 from the cache.
List<SimpleEntity> entities = session.byMultipleIds( SimpleEntity.class )
.with( CacheMode.NORMAL )
.enableSessionCheck( true )
.multiLoad( ids( 3 ) );
for(SimpleEntity entity: entities) {
assertTrue( session.contains( entity ) );
}
assertEquals( 3, entities.size() );
assertEquals( 1, statistics.getSecondLevelCacheHitCount() );
assertTrue( sqlStatementInterceptor.getSqlQueries().getFirst().endsWith( "id in (?,?)" ) );
} );
for ( SimpleEntity entity: entities ) {
assertTrue( session.contains( entity ) );
}
final int paramCount = StringHelper.countUnquoted( sqlStatementInterceptor.getSqlQueries().getFirst(), '?' );
assertThat( paramCount, is( 2 ) );
}
);
}
@Test
@TestForIssue(jiraKey = "HHH-12944")
public void testUnorderedMultiLoadFrom2ndLevelCache() {
Statistics statistics = sessionFactory().getStatistics();
sessionFactory().getCache().evictAll();
final Statistics statistics = sessionFactory().getStatistics();
statistics.clear();
doInHibernate( this::sessionFactory, session -> {
// Load 1 of the items directly
SimpleEntity entity = session.get( SimpleEntity.class, 2 );
assertNotNull( entity );
inTransaction(
session -> {
// Load 1 of the items directly
final SimpleEntity entity = session.get( SimpleEntity.class, 2 );
assertNotNull( entity );
assertEquals( 1, statistics.getSecondLevelCacheMissCount() );
assertEquals( 0, statistics.getSecondLevelCacheHitCount() );
assertEquals( 1, statistics.getSecondLevelCachePutCount() );
assertTrue( session.getSessionFactory().getCache().containsEntity( SimpleEntity.class, 2 ) );
} );
assertEquals( 1, statistics.getSecondLevelCacheMissCount() );
assertEquals( 0, statistics.getSecondLevelCacheHitCount() );
assertEquals( 1, statistics.getSecondLevelCachePutCount() );
assertTrue( session.getSessionFactory().getCache().containsEntity( SimpleEntity.class, 2 ) );
}
);
statistics.clear();
doInHibernate( this::sessionFactory, session -> {
// Validate that the entity is still in the Level 2 cache
assertTrue( session.getSessionFactory().getCache().containsEntity( SimpleEntity.class, 2 ) );
inTransaction(
session -> {
// Validate that the entity is still in the Level 2 cache
assertTrue( session.getSessionFactory().getCache().containsEntity( SimpleEntity.class, 2 ) );
sqlStatementInterceptor.getSqlQueries().clear();
sqlStatementInterceptor.getSqlQueries().clear();
// Multiload 3 items and ensure that multiload pulls 2 from the database & 1 from the cache.
List<SimpleEntity> entities = session.byMultipleIds( SimpleEntity.class )
.with( CacheMode.NORMAL )
.enableSessionCheck( true )
.enableOrderedReturn( false )
.multiLoad( ids( 3 ) );
assertEquals( 3, entities.size() );
assertEquals( 1, statistics.getSecondLevelCacheHitCount() );
// Multiload 3 items and ensure that multiload pulls 2 from the database & 1 from the cache.
List<SimpleEntity> entities = session.byMultipleIds( SimpleEntity.class )
.with( CacheMode.NORMAL )
.enableSessionCheck( true )
.enableOrderedReturn( false )
.multiLoad( ids( 3 ) );
assertEquals( 3, entities.size() );
assertEquals( 1, statistics.getSecondLevelCacheHitCount() );
for(SimpleEntity entity: entities) {
assertTrue( session.contains( entity ) );
}
for(SimpleEntity entity: entities) {
assertTrue( session.contains( entity ) );
}
assertTrue( sqlStatementInterceptor.getSqlQueries().getFirst().endsWith( "id in (?,?)" ) );
} );
final int paramCount = StringHelper.countUnquoted( sqlStatementInterceptor.getSqlQueries().getFirst(), '?' );
assertThat( paramCount, is( 2 ) );
}
);
}
@Test
@TestForIssue(jiraKey = "HHH-12944")
public void testOrderedMultiLoadFrom2ndLevelCachePendingDelete() {
inTransaction(
session -> {
session.remove( session.find( SimpleEntity.class, 2 ) );
doInHibernate( this::sessionFactory, session -> {
session.remove( session.find( SimpleEntity.class, 2 ) );
sqlStatementInterceptor.getSqlQueries().clear();
sqlStatementInterceptor.getSqlQueries().clear();
// Multi-load 3 items and ensure that it pulls 2 from the database & 1 from the cache.
List<SimpleEntity> entities = session.byMultipleIds( SimpleEntity.class )
.with( CacheMode.NORMAL )
.enableSessionCheck( true )
.enableOrderedReturn( true )
.multiLoad( ids( 3 ) );
assertEquals( 3, entities.size() );
// Multiload 3 items and ensure that multiload pulls 2 from the database & 1 from the cache.
List<SimpleEntity> entities = session.byMultipleIds( SimpleEntity.class )
.with( CacheMode.NORMAL )
.enableSessionCheck( true )
.enableOrderedReturn( true )
.multiLoad( ids( 3 ) );
assertEquals( 3, entities.size() );
assertNull( entities.get(1) );
assertNull( entities.get(1) );
assertTrue( sqlStatementInterceptor.getSqlQueries().getFirst().endsWith( "id in (?,?)" ) );
} );
final int paramCount = StringHelper.countUnquoted( sqlStatementInterceptor.getSqlQueries().getFirst(), '?' );
assertThat( paramCount, is( 2 ) );
}
);
}
@Test
@TestForIssue(jiraKey = "HHH-12944")
public void testOrderedMultiLoadFrom2ndLevelCachePendingDeleteReturnRemoved() {
inTransaction(
session -> {
session.remove( session.find( SimpleEntity.class, 2 ) );
doInHibernate( this::sessionFactory, session -> {
session.remove( session.find( SimpleEntity.class, 2 ) );
sqlStatementInterceptor.getSqlQueries().clear();
sqlStatementInterceptor.getSqlQueries().clear();
// Multiload 3 items and ensure that multiload pulls 2 from the database & 1 from the cache.
List<SimpleEntity> entities = session.byMultipleIds( SimpleEntity.class )
.with( CacheMode.NORMAL )
.enableSessionCheck( true )
.enableOrderedReturn( true )
.enableReturnOfDeletedEntities( true )
.multiLoad( ids( 3 ) );
assertEquals( 3, entities.size() );
// Multiload 3 items and ensure that multiload pulls 2 from the database & 1 from the cache.
List<SimpleEntity> entities = session.byMultipleIds( SimpleEntity.class )
.with( CacheMode.NORMAL )
.enableSessionCheck( true )
.enableOrderedReturn( true )
.enableReturnOfDeletedEntities( true )
.multiLoad( ids( 3 ) );
assertEquals( 3, entities.size() );
SimpleEntity deletedEntity = entities.get(1);
assertNotNull( deletedEntity );
SimpleEntity deletedEntity = entities.get(1);
assertNotNull( deletedEntity );
final EntityEntry entry = ((SharedSessionContractImplementor) session).getPersistenceContext().getEntry( deletedEntity );
assertTrue( entry.getStatus() == Status.DELETED || entry.getStatus() == Status.GONE );
final EntityEntry entry = ((SharedSessionContractImplementor) session).getPersistenceContext().getEntry( deletedEntity );
assertTrue( entry.getStatus() == Status.DELETED || entry.getStatus() == Status.GONE );
assertTrue( sqlStatementInterceptor.getSqlQueries().getFirst().endsWith( "id in (?,?)" ) );
} );
final int paramCount = StringHelper.countUnquoted( sqlStatementInterceptor.getSqlQueries().getFirst(), '?' );
assertThat( paramCount, is( 2 ) );
}
);
}
@Test
@TestForIssue(jiraKey = "HHH-12944")
public void testUnorderedMultiLoadFrom2ndLevelCachePendingDelete() {
inTransaction(
session -> {
session.remove( session.find( SimpleEntity.class, 2 ) );
doInHibernate( this::sessionFactory, session -> {
session.remove( session.find( SimpleEntity.class, 2 ) );
sqlStatementInterceptor.getSqlQueries().clear();
sqlStatementInterceptor.getSqlQueries().clear();
// Multiload 3 items and ensure that multiload pulls 2 from the database & 1 from the cache.
List<SimpleEntity> entities = session.byMultipleIds( SimpleEntity.class )
.with( CacheMode.NORMAL )
.enableSessionCheck( true )
.enableOrderedReturn( false )
.multiLoad( ids( 3 ) );
assertEquals( 3, entities.size() );
// Multiload 3 items and ensure that multiload pulls 2 from the database & 1 from the cache.
List<SimpleEntity> entities = session.byMultipleIds( SimpleEntity.class )
.with( CacheMode.NORMAL )
.enableSessionCheck( true )
.enableOrderedReturn( false )
.multiLoad( ids( 3 ) );
assertEquals( 3, entities.size() );
assertTrue( entities.stream().anyMatch( Objects::isNull ) );
assertTrue( entities.stream().anyMatch( Objects::isNull ) );
assertTrue( sqlStatementInterceptor.getSqlQueries().getFirst().endsWith( "id in (?,?)" ) );
} );
final int paramCount = StringHelper.countUnquoted( sqlStatementInterceptor.getSqlQueries().getFirst(), '?' );
assertThat( paramCount, is( 2 ) );
}
);
}
@Test
@TestForIssue(jiraKey = "HHH-12944")
public void testUnorderedMultiLoadFrom2ndLevelCachePendingDeleteReturnRemoved() {
inTransaction(
session -> {
session.remove( session.find( SimpleEntity.class, 2 ) );
doInHibernate( this::sessionFactory, session -> {
session.remove( session.find( SimpleEntity.class, 2 ) );
sqlStatementInterceptor.getSqlQueries().clear();
sqlStatementInterceptor.getSqlQueries().clear();
// Multiload 3 items and ensure that multiload pulls 2 from the database & 1 from the cache.
List<SimpleEntity> entities = session.byMultipleIds( SimpleEntity.class )
.with( CacheMode.NORMAL )
.enableSessionCheck( true )
.enableOrderedReturn( false )
.enableReturnOfDeletedEntities( true )
.multiLoad( ids( 3 ) );
assertEquals( 3, entities.size() );
// Multiload 3 items and ensure that multiload pulls 2 from the database & 1 from the cache.
List<SimpleEntity> entities = session.byMultipleIds( SimpleEntity.class )
.with( CacheMode.NORMAL )
.enableSessionCheck( true )
.enableOrderedReturn( false )
.enableReturnOfDeletedEntities( true )
.multiLoad( ids( 3 ) );
assertEquals( 3, entities.size() );
SimpleEntity deletedEntity = entities.stream().filter( simpleEntity -> simpleEntity.getId().equals( 2 ) ).findAny().orElse( null );
assertNotNull( deletedEntity );
SimpleEntity deletedEntity = entities.stream().filter( simpleEntity -> simpleEntity.getId().equals( 2 ) ).findAny().orElse( null );
assertNotNull( deletedEntity );
final EntityEntry entry = ((SharedSessionContractImplementor) session).getPersistenceContext().getEntry( deletedEntity );
assertTrue( entry.getStatus() == Status.DELETED || entry.getStatus() == Status.GONE );
final EntityEntry entry = ((SharedSessionContractImplementor) session).getPersistenceContext().getEntry( deletedEntity );
assertTrue( entry.getStatus() == Status.DELETED || entry.getStatus() == Status.GONE );
assertTrue( sqlStatementInterceptor.getSqlQueries().getFirst().endsWith( "id in (?,?)" ) );
} );
final int paramCount = StringHelper.countUnquoted( sqlStatementInterceptor.getSqlQueries().getFirst(), '?' );
assertThat( paramCount, is( 2 ) );
}
);
}
@Test
public void testMultiLoadWithCacheModeIgnore() {
// do the multi-load, telling Hibernate to IGNORE the L2 cache -
// the end result should be that the cache is (still) empty afterwards
Session session = openSession();
session.getTransaction().begin();
List<SimpleEntity> list = session.byMultipleIds( SimpleEntity.class )
.with( CacheMode.IGNORE )
.multiLoad( ids(56) );
session.getTransaction().commit();
session.close();
inTransaction(
session -> {
final List<SimpleEntity> list = session.byMultipleIds( SimpleEntity.class )
.with( CacheMode.IGNORE )
.multiLoad( ids(56) );
assertEquals( 56, list.size() );
for ( SimpleEntity entity : list ) {
assertFalse( sessionFactory().getCache().containsEntity( SimpleEntity.class, entity.getId() ) );
}
assertEquals( 56, list.size() );
for ( SimpleEntity entity : list ) {
assertFalse( sessionFactory().getCache().containsEntity( SimpleEntity.class, entity.getId() ) );
}
}
);
}
@Test
public void testMultiLoadClearsBatchFetchQueue() {
final EntityKey entityKey = new EntityKey(
1,
sessionFactory().getEntityPersister( SimpleEntity.class.getName() )
sessionFactory().getMetamodel().entityPersister( SimpleEntity.class.getName() )
);
Session session = openSession();
session.getTransaction().begin();
// create a proxy, which should add an entry to the BatchFetchQueue
SimpleEntity first = session.byId( SimpleEntity.class ).getReference( 1 );
assertTrue( ( (SessionImplementor) session ).getPersistenceContext().getBatchFetchQueue().containsEntityKey( entityKey ) );
inTransaction(
session -> {
// create a proxy, which should add an entry to the BatchFetchQueue
SimpleEntity first = session.byId( SimpleEntity.class ).getReference( 1 );
assertTrue( ( (SessionImplementor) session ).getPersistenceContext().getBatchFetchQueue().containsEntityKey( entityKey ) );
// now bulk load, which should clean up the BatchFetchQueue entry
List<SimpleEntity> list = session.byMultipleIds( SimpleEntity.class ).enableSessionCheck( true ).multiLoad( ids(56) );
// now bulk load, which should clean up the BatchFetchQueue entry
List<SimpleEntity> list = session.byMultipleIds( SimpleEntity.class ).enableSessionCheck( true ).multiLoad( ids(56) );
assertEquals( 56, list.size() );
assertFalse( ( (SessionImplementor) session ).getPersistenceContext().getBatchFetchQueue().containsEntityKey( entityKey ) );
session.getTransaction().commit();
session.close();
assertEquals( 56, list.size() );
assertFalse( ( (SessionImplementor) session ).getPersistenceContext().getBatchFetchQueue().containsEntityKey( entityKey ) );
}
);
}
private Integer[] ids(int count) {