HHH-8629 : Integrate LoadPlans into collection initializers

This commit is contained in:
Gail Badner 2013-10-25 12:54:59 -07:00
parent 0e4c2a9ed5
commit e1eef18d62
44 changed files with 2321 additions and 1221 deletions

View File

@ -44,7 +44,8 @@ public abstract class BatchingCollectionInitializerBuilder {
return DynamicBatchingCollectionInitializerBuilder.INSTANCE;
}
default: {
return LegacyBatchingCollectionInitializerBuilder.INSTANCE;
return org.hibernate.loader.collection.plan.LegacyBatchingCollectionInitializerBuilder.INSTANCE;
//return LegacyBatchingCollectionInitializerBuilder.INSTANCE;
}
}
}
@ -67,7 +68,7 @@ public abstract class BatchingCollectionInitializerBuilder {
LoadQueryInfluencers influencers) {
if ( maxBatchSize <= 1 ) {
// no batching
return new BasicCollectionLoader( persister, factory, influencers );
return buildNonBatchingLoader( persister, factory, influencers );
}
return createRealBatchingCollectionInitializer( persister, maxBatchSize, factory, influencers );
@ -98,7 +99,7 @@ public abstract class BatchingCollectionInitializerBuilder {
LoadQueryInfluencers influencers) {
if ( maxBatchSize <= 1 ) {
// no batching
return new OneToManyLoader( persister, factory, influencers );
return buildNonBatchingLoader( persister, factory, influencers );
}
return createRealBatchingOneToManyInitializer( persister, maxBatchSize, factory, influencers );
@ -109,4 +110,13 @@ public abstract class BatchingCollectionInitializerBuilder {
int maxBatchSize,
SessionFactoryImplementor factory,
LoadQueryInfluencers influencers);
protected CollectionInitializer buildNonBatchingLoader(
QueryableCollection persister,
SessionFactoryImplementor factory,
LoadQueryInfluencers influencers) {
return persister.isOneToMany() ?
new OneToManyLoader( persister, factory, influencers ) :
new BasicCollectionLoader( persister, factory, influencers );
}
}

View File

@ -41,7 +41,7 @@ public class LegacyBatchingCollectionInitializerBuilder extends BatchingCollecti
public static final LegacyBatchingCollectionInitializerBuilder INSTANCE = new LegacyBatchingCollectionInitializerBuilder();
@Override
public CollectionInitializer createRealBatchingCollectionInitializer(
protected CollectionInitializer createRealBatchingCollectionInitializer(
QueryableCollection persister,
int maxBatchSize,
SessionFactoryImplementor factory,
@ -55,7 +55,7 @@ public class LegacyBatchingCollectionInitializerBuilder extends BatchingCollecti
}
@Override
public CollectionInitializer createRealBatchingOneToManyInitializer(
protected CollectionInitializer createRealBatchingOneToManyInitializer(
QueryableCollection persister,
int maxBatchSize,
SessionFactoryImplementor factory,

View File

@ -0,0 +1,47 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.loader.collection.plan;
import org.hibernate.engine.spi.LoadQueryInfluencers;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.loader.collection.BatchingCollectionInitializerBuilder;
import org.hibernate.loader.collection.CollectionInitializer;
import org.hibernate.persister.collection.QueryableCollection;
/**
* Base class for LoadPlan-based BatchingCollectionInitializerBuilder implementations. Mainly we handle the common
* "no batching" case here to use the LoadPlan-based CollectionLoader
*
* @author Gail Badner
*/
public abstract class AbstractBatchingCollectionInitializerBuilder extends BatchingCollectionInitializerBuilder {
@Override
protected CollectionInitializer buildNonBatchingLoader(
QueryableCollection persister,
SessionFactoryImplementor factory,
LoadQueryInfluencers influencers) {
return CollectionLoader.forCollection( persister ).withInfluencers( influencers ).byKey();
}
}

View File

@ -0,0 +1,147 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Middleware LLC or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*
*/
package org.hibernate.loader.collection.plan;
import java.io.Serializable;
import java.sql.ResultSet;
import java.sql.SQLException;
import org.hibernate.AssertionFailure;
import org.hibernate.HibernateException;
import org.hibernate.engine.spi.QueryParameters;
import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.internal.CoreLogging;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.loader.collection.CollectionInitializer;
import org.hibernate.loader.plan2.build.internal.FetchStyleLoadPlanBuildingAssociationVisitationStrategy;
import org.hibernate.loader.plan2.build.spi.MetamodelDrivenLoadPlanBuilder;
import org.hibernate.loader.plan2.exec.internal.AbstractLoadPlanBasedLoader;
import org.hibernate.loader.plan2.exec.query.spi.QueryBuildingParameters;
import org.hibernate.loader.plan2.exec.spi.BasicCollectionLoadQueryDetails;
import org.hibernate.loader.plan2.exec.spi.CollectionLoadQueryDetails;
import org.hibernate.loader.plan2.exec.spi.OneToManyLoadQueryDetails;
import org.hibernate.loader.plan2.spi.LoadPlan;
import org.hibernate.persister.collection.QueryableCollection;
import org.hibernate.pretty.MessageHelper;
import org.hibernate.type.Type;
/**
* A CollectionInitializer implementation based on using LoadPlans
*
* @author Gail Badner
*/
public abstract class AbstractLoadPlanBasedCollectionInitializer
extends AbstractLoadPlanBasedLoader implements CollectionInitializer {
private static final CoreMessageLogger log = CoreLogging.messageLogger( AbstractLoadPlanBasedCollectionInitializer.class );
private final QueryableCollection collectionPersister;
private final LoadPlan plan;
private final CollectionLoadQueryDetails staticLoadQuery;
public AbstractLoadPlanBasedCollectionInitializer(
QueryableCollection collectionPersister,
QueryBuildingParameters buildingParameters) {
super( collectionPersister.getFactory() );
this.collectionPersister = collectionPersister;
final FetchStyleLoadPlanBuildingAssociationVisitationStrategy strategy =
new FetchStyleLoadPlanBuildingAssociationVisitationStrategy(
collectionPersister.getFactory(),
buildingParameters.getQueryInfluencers(),
buildingParameters.getLockMode() != null
? buildingParameters.getLockMode()
: buildingParameters.getLockOptions().getLockMode()
);
this.plan = MetamodelDrivenLoadPlanBuilder.buildRootCollectionLoadPlan( strategy, collectionPersister );
this.staticLoadQuery = collectionPersister.isOneToMany() ?
OneToManyLoadQueryDetails.makeForBatching(
plan,
buildingParameters,
collectionPersister.getFactory()
) :
BasicCollectionLoadQueryDetails.makeForBatching(
plan,
buildingParameters,
collectionPersister.getFactory()
);
}
@Override
public void initialize(Serializable id, SessionImplementor session)
throws HibernateException {
if ( log.isDebugEnabled() ) {
log.debugf( "Loading collection: %s",
MessageHelper.collectionInfoString( collectionPersister, id, getFactory() ) );
}
final Serializable[] ids = new Serializable[]{id};
try {
final QueryParameters qp = new QueryParameters();
qp.setPositionalParameterTypes( new Type[]{ collectionPersister.getKeyType() } );
qp.setPositionalParameterValues( ids );
qp.setCollectionKeys( ids );
executeLoad(
session,
qp,
staticLoadQuery,
true,
null
);
}
catch ( SQLException sqle ) {
throw getFactory().getSQLExceptionHelper().convert(
sqle,
"could not initialize a collection: " +
MessageHelper.collectionInfoString( collectionPersister, id, getFactory() ),
staticLoadQuery.getSqlStatement()
);
}
log.debug( "Done loading collection" );
}
protected QueryableCollection collectionPersister() {
return collectionPersister;
}
@Override
protected CollectionLoadQueryDetails getStaticLoadQuery() {
return staticLoadQuery;
}
@Override
protected int[] getNamedParameterLocs(String name) {
throw new AssertionFailure("no named parameters");
}
@Override
protected void autoDiscoverTypes(ResultSet rs) {
throw new AssertionFailure("Auto discover types not supported in this loader");
}
}

View File

@ -0,0 +1,51 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2008, 2012, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.loader.collection.plan;
import org.hibernate.loader.collection.CollectionInitializer;
import org.hibernate.persister.collection.CollectionPersister;
import org.hibernate.persister.collection.QueryableCollection;
/**
* The base contract for loaders capable of performing batch-fetch loading of collections using multiple foreign key
* values in the SQL <tt>WHERE</tt> clause.
*
* @author Gavin King
* @author Steve Ebersole
*
* @see org.hibernate.loader.collection.BatchingCollectionInitializerBuilder
* @see org.hibernate.loader.collection.BasicCollectionLoader
* @see org.hibernate.loader.collection.OneToManyLoader
*/
public abstract class BatchingCollectionInitializer implements CollectionInitializer {
private final QueryableCollection collectionPersister;
public BatchingCollectionInitializer(QueryableCollection collectionPersister) {
this.collectionPersister = collectionPersister;
}
public CollectionPersister getCollectionPersister() {
return collectionPersister;
}
}

View File

@ -0,0 +1,129 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Middleware LLC or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*
*/
package org.hibernate.loader.collection.plan;
import java.sql.ResultSet;
import org.jboss.logging.Logger;
import org.hibernate.LockMode;
import org.hibernate.LockOptions;
import org.hibernate.engine.spi.LoadQueryInfluencers;
import org.hibernate.internal.CoreLogging;
import org.hibernate.loader.plan2.exec.query.spi.QueryBuildingParameters;
import org.hibernate.persister.collection.QueryableCollection;
import org.hibernate.type.Type;
/**
* Superclass for loaders that initialize collections
*
* @see org.hibernate.loader.collection.OneToManyLoader
* @see org.hibernate.loader.collection.BasicCollectionLoader
* @author Gavin King
* @author Gail Badner
*/
public class CollectionLoader extends AbstractLoadPlanBasedCollectionInitializer {
private static final Logger log = CoreLogging.logger( CollectionLoader.class );
public static Builder forCollection(QueryableCollection collectionPersister) {
return new Builder( collectionPersister );
}
@Override
protected int[] getNamedParameterLocs(String name) {
return new int[0]; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
protected void autoDiscoverTypes(ResultSet rs) {
//To change body of implemented methods use File | Settings | File Templates.
}
protected static class Builder {
private final QueryableCollection collectionPersister;
private int batchSize = 1;
private LoadQueryInfluencers influencers = LoadQueryInfluencers.NONE;
private Builder(QueryableCollection collectionPersister) {
this.collectionPersister = collectionPersister;
}
public Builder withBatchSize(int batchSize) {
this.batchSize = batchSize;
return this;
}
public Builder withInfluencers(LoadQueryInfluencers influencers) {
this.influencers = influencers;
return this;
}
public CollectionLoader byKey() {
final QueryBuildingParameters buildingParameters = new QueryBuildingParameters() {
@Override
public LoadQueryInfluencers getQueryInfluencers() {
return influencers;
}
@Override
public int getBatchSize() {
return batchSize;
}
@Override
public LockMode getLockMode() {
return LockMode.NONE;
}
@Override
public LockOptions getLockOptions() {
return null;
}
};
return new CollectionLoader( collectionPersister, buildingParameters ) ;
}
}
public CollectionLoader(
QueryableCollection collectionPersister,
QueryBuildingParameters buildingParameters) {
super( collectionPersister, buildingParameters );
if ( log.isDebugEnabled() ) {
log.debugf(
"Static select for collection %s: %s",
collectionPersister.getRole(),
getStaticLoadQuery().getSqlStatement()
);
}
}
protected Type getKeyType() {
return collectionPersister().getKeyType();
}
public String toString() {
return getClass().getName() + '(' + collectionPersister().getRole() + ')';
}
}

View File

@ -0,0 +1,108 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2012, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.loader.collection.plan;
import java.io.Serializable;
import org.hibernate.HibernateException;
import org.hibernate.MappingException;
import org.hibernate.engine.spi.LoadQueryInfluencers;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.internal.util.collections.ArrayHelper;
import org.hibernate.loader.Loader;
import org.hibernate.loader.collection.BasicCollectionLoader;
import org.hibernate.loader.collection.CollectionInitializer;
import org.hibernate.loader.collection.OneToManyLoader;
import org.hibernate.persister.collection.QueryableCollection;
/**
* LoadPlan-based implementation of the the legacy batch collection initializer.
*
* @author Steve Ebersole
*/
public class LegacyBatchingCollectionInitializerBuilder extends AbstractBatchingCollectionInitializerBuilder {
public static final LegacyBatchingCollectionInitializerBuilder INSTANCE = new LegacyBatchingCollectionInitializerBuilder();
@Override
public CollectionInitializer createRealBatchingCollectionInitializer(
QueryableCollection persister,
int maxBatchSize,
SessionFactoryImplementor factory,
LoadQueryInfluencers loadQueryInfluencers) throws MappingException {
int[] batchSizes = ArrayHelper.getBatchSizes( maxBatchSize );
Loader[] loaders = new Loader[ batchSizes.length ];
for ( int i = 0; i < batchSizes.length; i++ ) {
loaders[i] = new BasicCollectionLoader( persister, batchSizes[i], factory, loadQueryInfluencers );
}
return new LegacyBatchingCollectionInitializer( persister, batchSizes, loaders );
}
@Override
public CollectionInitializer createRealBatchingOneToManyInitializer(
QueryableCollection persister,
int maxBatchSize,
SessionFactoryImplementor factory,
LoadQueryInfluencers loadQueryInfluencers) throws MappingException {
final int[] batchSizes = ArrayHelper.getBatchSizes( maxBatchSize );
final Loader[] loaders = new Loader[ batchSizes.length ];
for ( int i = 0; i < batchSizes.length; i++ ) {
loaders[i] = new OneToManyLoader( persister, batchSizes[i], factory, loadQueryInfluencers );
}
return new LegacyBatchingCollectionInitializer( persister, batchSizes, loaders );
}
public static class LegacyBatchingCollectionInitializer extends BatchingCollectionInitializer {
private final int[] batchSizes;
private final Loader[] loaders;
public LegacyBatchingCollectionInitializer(
QueryableCollection persister,
int[] batchSizes,
Loader[] loaders) {
super( persister );
this.batchSizes = batchSizes;
this.loaders = loaders;
}
@Override
public void initialize(Serializable id, SessionImplementor session) throws HibernateException {
Serializable[] batch = session.getPersistenceContext().getBatchFetchQueue()
.getCollectionBatch( getCollectionPersister(), id, batchSizes[0] );
for ( int i=0; i<batchSizes.length-1; i++) {
final int smallBatchSize = batchSizes[i];
if ( batch[smallBatchSize-1]!=null ) {
Serializable[] smallBatch = new Serializable[smallBatchSize];
System.arraycopy(batch, 0, smallBatch, 0, smallBatchSize);
loaders[i].loadCollectionBatch( session, smallBatch, getCollectionPersister().getKeyType() );
return; //EARLY EXIT!
}
}
loaders[batchSizes.length-1].loadCollection( session, id, getCollectionPersister().getKeyType() );
}
}
}

View File

@ -1,37 +1,48 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Middleware LLC or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*
*/
package org.hibernate.loader.entity.plan;
import java.io.Serializable;
import java.sql.CallableStatement;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.hibernate.AssertionFailure;
import org.hibernate.HibernateException;
import org.hibernate.LockOptions;
import org.hibernate.ScrollMode;
import org.hibernate.dialect.Dialect;
import org.hibernate.dialect.pagination.LimitHandler;
import org.hibernate.dialect.pagination.LimitHelper;
import org.hibernate.dialect.pagination.NoopLimitHandler;
import org.hibernate.engine.jdbc.ColumnNameCache;
import org.hibernate.engine.spi.PersistenceContext;
import org.hibernate.engine.spi.QueryParameters;
import org.hibernate.engine.spi.RowSelection;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.engine.spi.TypedValue;
import org.hibernate.internal.CoreLogging;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.loader.entity.UniqueEntityLoader;
import org.hibernate.loader.plan2.build.internal.FetchStyleLoadPlanBuildingAssociationVisitationStrategy;
import org.hibernate.loader.plan2.build.spi.MetamodelDrivenLoadPlanBuilder;
import org.hibernate.loader.plan2.exec.internal.AbstractLoadPlanBasedLoader;
import org.hibernate.loader.plan2.exec.query.spi.NamedParameterContext;
import org.hibernate.loader.plan2.exec.query.spi.QueryBuildingParameters;
import org.hibernate.loader.plan2.exec.spi.EntityLoadQueryDetails;
@ -48,10 +59,9 @@ import org.hibernate.type.Type;
*
* @author Steve Ebersole
*/
public abstract class AbstractLoadPlanBasedEntityLoader implements UniqueEntityLoader {
public abstract class AbstractLoadPlanBasedEntityLoader extends AbstractLoadPlanBasedLoader implements UniqueEntityLoader {
private static final CoreMessageLogger log = CoreLogging.messageLogger( AbstractLoadPlanBasedEntityLoader.class );
private final SessionFactoryImplementor factory;
private final OuterJoinLoadable entityPersister;
private final Type uniqueKeyType;
private final String entityName;
@ -59,22 +69,21 @@ public abstract class AbstractLoadPlanBasedEntityLoader implements UniqueEntityL
private final LoadPlan plan;
private final EntityLoadQueryDetails staticLoadQuery;
private ColumnNameCache columnNameCache;
public AbstractLoadPlanBasedEntityLoader(
OuterJoinLoadable entityPersister,
SessionFactoryImplementor factory,
String[] uniqueKeyColumnNames,
Type uniqueKeyType,
QueryBuildingParameters buildingParameters) {
super( factory );
this.entityPersister = entityPersister;
this.factory = factory;
this.uniqueKeyType = uniqueKeyType;
this.entityName = entityPersister.getEntityName();
final FetchStyleLoadPlanBuildingAssociationVisitationStrategy strategy = new FetchStyleLoadPlanBuildingAssociationVisitationStrategy(
factory,
buildingParameters.getQueryInfluencers()
buildingParameters.getQueryInfluencers(),
buildingParameters.getLockMode()
);
this.plan = MetamodelDrivenLoadPlanBuilder.buildRootEntityLoadPlan( strategy, entityPersister );
@ -86,10 +95,7 @@ public abstract class AbstractLoadPlanBasedEntityLoader implements UniqueEntityL
);
}
protected SessionFactoryImplementor getFactory() {
return factory;
}
@Override
protected EntityLoadQueryDetails getStaticLoadQuery() {
return staticLoadQuery;
}
@ -135,7 +141,7 @@ public abstract class AbstractLoadPlanBasedEntityLoader implements UniqueEntityL
);
}
catch ( SQLException sqle ) {
throw factory.getSQLExceptionHelper().convert(
throw getFactory().getSQLExceptionHelper().convert(
sqle,
"could not load an entity batch: " + MessageHelper.infoString( entityPersister, ids, getFactory() ),
staticLoadQuery.getSqlStatement()
@ -156,8 +162,8 @@ public abstract class AbstractLoadPlanBasedEntityLoader implements UniqueEntityL
@Override
public Object load(Serializable id, Object optionalObject, SessionImplementor session, LockOptions lockOptions) {
Object result = null;
final Object result;
try {
final QueryParameters qp = new QueryParameters();
qp.setPositionalParameterTypes( new Type[] { entityPersister.getIdentifierType() } );
@ -177,13 +183,13 @@ public abstract class AbstractLoadPlanBasedEntityLoader implements UniqueEntityL
result = extractEntityResult( results );
}
catch ( SQLException sqle ) {
throw factory.getSQLExceptionHelper().convert(
throw getFactory().getSQLExceptionHelper().convert(
sqle,
"could not load an entity: " + MessageHelper.infoString(
entityPersister,
id,
entityPersister.getIdentifierType(),
factory
getFactory()
),
staticLoadQuery.getSqlStatement()
);
@ -193,83 +199,6 @@ public abstract class AbstractLoadPlanBasedEntityLoader implements UniqueEntityL
return result;
}
protected List executeLoad(
SessionImplementor session,
QueryParameters queryParameters,
EntityLoadQueryDetails loadQueryDetails,
boolean returnProxies,
ResultTransformer forcedResultTransformer) throws SQLException {
final List<AfterLoadAction> afterLoadActions = new ArrayList<AfterLoadAction>();
return executeLoad(
session,
queryParameters,
loadQueryDetails,
returnProxies,
forcedResultTransformer,
afterLoadActions
);
}
protected List executeLoad(
SessionImplementor session,
QueryParameters queryParameters,
EntityLoadQueryDetails loadQueryDetails,
boolean returnProxies,
ResultTransformer forcedResultTransformer,
List<AfterLoadAction> afterLoadActions) throws SQLException {
final PersistenceContext persistenceContext = session.getPersistenceContext();
final boolean defaultReadOnlyOrig = persistenceContext.isDefaultReadOnly();
if ( queryParameters.isReadOnlyInitialized() ) {
// The read-only/modifiable mode for the query was explicitly set.
// Temporarily set the default read-only/modifiable setting to the query's setting.
persistenceContext.setDefaultReadOnly( queryParameters.isReadOnly() );
}
else {
// The read-only/modifiable setting for the query was not initialized.
// Use the default read-only/modifiable from the persistence context instead.
queryParameters.setReadOnly( persistenceContext.isDefaultReadOnly() );
}
persistenceContext.beforeLoad();
try {
List results = null;
final String sql = loadQueryDetails.getSqlStatement();
SqlStatementWrapper wrapper = null;
try {
wrapper = executeQueryStatement( sql, queryParameters, false, afterLoadActions, session );
results = loadQueryDetails.getResultSetProcessor().extractResults(
wrapper.getResultSet(),
session,
queryParameters,
new NamedParameterContext() {
@Override
public int[] getNamedParameterLocations(String name) {
return AbstractLoadPlanBasedEntityLoader.this.getNamedParameterLocs( name );
}
},
returnProxies,
queryParameters.isReadOnly(),
forcedResultTransformer,
afterLoadActions
);
}
finally {
if ( wrapper != null ) {
session.getTransactionCoordinator().getJdbcCoordinator().release(
wrapper.getResultSet(),
wrapper.getStatement()
);
}
persistenceContext.afterLoad();
}
persistenceContext.initializeNonLazyCollections();
return results;
}
finally {
// Restore the original default
persistenceContext.setDefaultReadOnly( defaultReadOnlyOrig );
}
}
protected Object extractEntityResult(List results) {
if ( results.size() == 0 ) {
return null;
@ -352,378 +281,11 @@ public abstract class AbstractLoadPlanBasedEntityLoader implements UniqueEntityL
}
}
protected SqlStatementWrapper executeQueryStatement(
final QueryParameters queryParameters,
final boolean scroll,
List<AfterLoadAction> afterLoadActions,
final SessionImplementor session) throws SQLException {
return executeQueryStatement( staticLoadQuery.getSqlStatement(), queryParameters, scroll, afterLoadActions, session );
}
protected SqlStatementWrapper executeQueryStatement(
String sqlStatement,
QueryParameters queryParameters,
boolean scroll,
List<AfterLoadAction> afterLoadActions,
SessionImplementor session) throws SQLException {
// Processing query filters.
queryParameters.processFilters( sqlStatement, session );
// Applying LIMIT clause.
final LimitHandler limitHandler = getLimitHandler(
queryParameters.getFilteredSQL(),
queryParameters.getRowSelection()
);
String sql = limitHandler.getProcessedSql();
// Adding locks and comments.
sql = preprocessSQL( sql, queryParameters, getFactory().getDialect(), afterLoadActions );
final PreparedStatement st = prepareQueryStatement( sql, queryParameters, limitHandler, scroll, session );
return new SqlStatementWrapper( st, getResultSet( st, queryParameters.getRowSelection(), limitHandler, queryParameters.hasAutoDiscoverScalarTypes(), session ) );
}
/**
* Build LIMIT clause handler applicable for given selection criteria. Returns {@link org.hibernate.dialect.pagination.NoopLimitHandler} delegate
* if dialect does not support LIMIT expression or processed query does not use pagination.
*
* @param sql Query string.
* @param selection Selection criteria.
* @return LIMIT clause delegate.
*/
protected LimitHandler getLimitHandler(String sql, RowSelection selection) {
final LimitHandler limitHandler = getFactory().getDialect().buildLimitHandler( sql, selection );
return LimitHelper.useLimit( limitHandler, selection ) ? limitHandler : new NoopLimitHandler( sql, selection );
}
private String preprocessSQL(
String sql,
QueryParameters queryParameters,
Dialect dialect,
List<AfterLoadAction> afterLoadActions) {
return getFactory().getSettings().isCommentsEnabled()
? prependComment( sql, queryParameters )
: sql;
}
private String prependComment(String sql, QueryParameters parameters) {
final String comment = parameters.getComment();
if ( comment == null ) {
return sql;
}
else {
return "/* " + comment + " */ " + sql;
}
}
/**
* Obtain a <tt>PreparedStatement</tt> with all parameters pre-bound.
* Bind JDBC-style <tt>?</tt> parameters, named parameters, and
* limit parameters.
*/
protected final PreparedStatement prepareQueryStatement(
final String sql,
final QueryParameters queryParameters,
final LimitHandler limitHandler,
final boolean scroll,
final SessionImplementor session) throws SQLException, HibernateException {
final Dialect dialect = getFactory().getDialect();
final RowSelection selection = queryParameters.getRowSelection();
final boolean useLimit = LimitHelper.useLimit( limitHandler, selection );
final boolean hasFirstRow = LimitHelper.hasFirstRow( selection );
final boolean useLimitOffset = hasFirstRow && useLimit && limitHandler.supportsLimitOffset();
final boolean callable = queryParameters.isCallable();
final ScrollMode scrollMode = getScrollMode( scroll, hasFirstRow, useLimitOffset, queryParameters );
final PreparedStatement st = session.getTransactionCoordinator().getJdbcCoordinator()
.getStatementPreparer().prepareQueryStatement( sql, callable, scrollMode );
try {
int col = 1;
//TODO: can we limit stored procedures ?!
col += limitHandler.bindLimitParametersAtStartOfQuery( st, col );
if (callable) {
col = dialect.registerResultSetOutParameter( (CallableStatement)st, col );
}
col += bindParameterValues( st, queryParameters, col, session );
col += limitHandler.bindLimitParametersAtEndOfQuery( st, col );
limitHandler.setMaxRows( st );
if ( selection != null ) {
if ( selection.getTimeout() != null ) {
st.setQueryTimeout( selection.getTimeout() );
}
if ( selection.getFetchSize() != null ) {
st.setFetchSize( selection.getFetchSize() );
}
}
// handle lock timeout...
final LockOptions lockOptions = queryParameters.getLockOptions();
if ( lockOptions != null ) {
if ( lockOptions.getTimeOut() != LockOptions.WAIT_FOREVER ) {
if ( !dialect.supportsLockTimeouts() ) {
if ( log.isDebugEnabled() ) {
log.debugf(
"Lock timeout [%s] requested but dialect reported to not support lock timeouts",
lockOptions.getTimeOut()
);
}
}
else if ( dialect.isLockTimeoutParameterized() ) {
st.setInt( col++, lockOptions.getTimeOut() );
}
}
}
if ( log.isTraceEnabled() ) {
log.tracev( "Bound [{0}] parameters total", col );
}
}
catch ( SQLException sqle ) {
session.getTransactionCoordinator().getJdbcCoordinator().release( st );
throw sqle;
}
catch ( HibernateException he ) {
session.getTransactionCoordinator().getJdbcCoordinator().release( st );
throw he;
}
return st;
}
protected ScrollMode getScrollMode(boolean scroll, boolean hasFirstRow, boolean useLimitOffSet, QueryParameters queryParameters) {
final boolean canScroll = getFactory().getSettings().isScrollableResultSetsEnabled();
if ( canScroll ) {
if ( scroll ) {
return queryParameters.getScrollMode();
}
if ( hasFirstRow && !useLimitOffSet ) {
return ScrollMode.SCROLL_INSENSITIVE;
}
}
return null;
}
/**
* Bind all parameter values into the prepared statement in preparation
* for execution.
*
* @param statement The JDBC prepared statement
* @param queryParameters The encapsulation of the parameter values to be bound.
* @param startIndex The position from which to start binding parameter values.
* @param session The originating session.
* @return The number of JDBC bind positions actually bound during this method execution.
* @throws SQLException Indicates problems performing the binding.
*/
protected int bindParameterValues(
PreparedStatement statement,
QueryParameters queryParameters,
int startIndex,
SessionImplementor session) throws SQLException {
int span = 0;
span += bindPositionalParameters( statement, queryParameters, startIndex, session );
span += bindNamedParameters( statement, queryParameters.getNamedParameters(), startIndex + span, session );
return span;
}
/**
* Bind positional parameter values to the JDBC prepared statement.
* <p/>
* Positional parameters are those specified by JDBC-style ? parameters
* in the source query. It is (currently) expected that these come
* before any named parameters in the source query.
*
* @param statement The JDBC prepared statement
* @param queryParameters The encapsulation of the parameter values to be bound.
* @param startIndex The position from which to start binding parameter values.
* @param session The originating session.
* @return The number of JDBC bind positions actually bound during this method execution.
* @throws SQLException Indicates problems performing the binding.
* @throws org.hibernate.HibernateException Indicates problems delegating binding to the types.
*/
protected int bindPositionalParameters(
final PreparedStatement statement,
final QueryParameters queryParameters,
final int startIndex,
final SessionImplementor session) throws SQLException, HibernateException {
final Object[] values = queryParameters.getFilteredPositionalParameterValues();
final Type[] types = queryParameters.getFilteredPositionalParameterTypes();
int span = 0;
for ( int i = 0; i < values.length; i++ ) {
types[i].nullSafeSet( statement, values[i], startIndex + span, session );
span += types[i].getColumnSpan( getFactory() );
}
return span;
}
/**
* Bind named parameters to the JDBC prepared statement.
* <p/>
* This is a generic implementation, the problem being that in the
* general case we do not know enough information about the named
* parameters to perform this in a complete manner here. Thus this
* is generally overridden on subclasses allowing named parameters to
* apply the specific behavior. The most usual limitation here is that
* we need to assume the type span is always one...
*
* @param statement The JDBC prepared statement
* @param namedParams A map of parameter names to values
* @param startIndex The position from which to start binding parameter values.
* @param session The originating session.
* @return The number of JDBC bind positions actually bound during this method execution.
* @throws SQLException Indicates problems performing the binding.
* @throws org.hibernate.HibernateException Indicates problems delegating binding to the types.
*/
protected int bindNamedParameters(
final PreparedStatement statement,
final Map namedParams,
final int startIndex,
final SessionImplementor session) throws SQLException, HibernateException {
if ( namedParams != null ) {
// assumes that types are all of span 1
final Iterator itr = namedParams.entrySet().iterator();
final boolean debugEnabled = log.isDebugEnabled();
int result = 0;
while ( itr.hasNext() ) {
final Map.Entry e = (Map.Entry) itr.next();
final String name = (String) e.getKey();
final TypedValue typedval = (TypedValue) e.getValue();
final int[] locs = getNamedParameterLocs( name );
for ( int loc : locs ) {
if ( debugEnabled ) {
log.debugf(
"bindNamedParameters() %s -> %s [%s]",
typedval.getValue(),
name,
loc + startIndex
);
}
typedval.getType().nullSafeSet( statement, typedval.getValue(), loc + startIndex, session );
}
result += locs.length;
}
return result;
}
else {
return 0;
}
}
public int[] getNamedParameterLocs(String name) {
protected int[] getNamedParameterLocs(String name) {
throw new AssertionFailure("no named parameters");
}
/**
* Execute given <tt>PreparedStatement</tt>, advance to the first result and return SQL <tt>ResultSet</tt>.
*/
protected final ResultSet getResultSet(
final PreparedStatement st,
final RowSelection selection,
final LimitHandler limitHandler,
final boolean autodiscovertypes,
final SessionImplementor session)
throws SQLException, HibernateException {
try {
ResultSet rs = session.getTransactionCoordinator().getJdbcCoordinator().getResultSetReturn().extract( st );
rs = wrapResultSetIfEnabled( rs , session );
if ( !limitHandler.supportsLimitOffset() || !LimitHelper.useLimit( limitHandler, selection ) ) {
advance( rs, selection );
}
if ( autodiscovertypes ) {
autoDiscoverTypes( rs );
}
return rs;
}
catch ( SQLException sqle ) {
session.getTransactionCoordinator().getJdbcCoordinator().release( st );
throw sqle;
}
}
/**
* Advance the cursor to the first required row of the <tt>ResultSet</tt>
*/
protected void advance(final ResultSet rs, final RowSelection selection) throws SQLException {
final int firstRow = LimitHelper.getFirstRow( selection );
if ( firstRow != 0 ) {
if ( getFactory().getSettings().isScrollableResultSetsEnabled() ) {
// we can go straight to the first required row
rs.absolute( firstRow );
}
else {
// we need to step through the rows one row at a time (slow)
for ( int m = 0; m < firstRow; m++ ) {
rs.next();
}
}
}
}
protected void autoDiscoverTypes(ResultSet rs) {
throw new AssertionFailure("Auto discover types not supported in this loader");
}
private synchronized ResultSet wrapResultSetIfEnabled(final ResultSet rs, final SessionImplementor session) {
// synchronized to avoid multi-thread access issues; defined as method synch to avoid
// potential deadlock issues due to nature of code.
if ( session.getFactory().getSettings().isWrapResultSetsEnabled() ) {
try {
if ( log.isDebugEnabled() ) {
log.debugf( "Wrapping result set [%s]", rs );
}
return session.getFactory()
.getJdbcServices()
.getResultSetWrapper().wrap( rs, retreiveColumnNameToIndexCache( rs ) );
}
catch(SQLException e) {
log.unableToWrapResultSet( e );
return rs;
}
}
else {
return rs;
}
}
private ColumnNameCache retreiveColumnNameToIndexCache(ResultSet rs) throws SQLException {
if ( columnNameCache == null ) {
log.trace( "Building columnName->columnIndex cache" );
columnNameCache = new ColumnNameCache( rs.getMetaData().getColumnCount() );
}
return columnNameCache;
}
/**
* Wrapper class for {@link java.sql.Statement} and associated {@link ResultSet}.
*/
protected static class SqlStatementWrapper {
private final Statement statement;
private final ResultSet resultSet;
private SqlStatementWrapper(Statement statement, ResultSet resultSet) {
this.resultSet = resultSet;
this.statement = statement;
}
public ResultSet getResultSet() {
return resultSet;
}
public Statement getStatement() {
return statement;
}
}
}

View File

@ -42,7 +42,6 @@ import org.hibernate.type.Type;
* Can handle batch-loading as well as non-pk, unique-key loading,
* <p/>
* Much is ultimately delegated to its superclass, AbstractLoadPlanBasedEntityLoader. However:
* todo How much of AbstractLoadPlanBasedEntityLoader is actually needed?
*
* Loads an entity instance using outerjoin fetching to fetch associated entities.
* <br>

View File

@ -23,6 +23,7 @@
*/
package org.hibernate.loader.plan2.build.internal;
import org.hibernate.LockMode;
import org.hibernate.engine.FetchStrategy;
import org.hibernate.engine.FetchStyle;
import org.hibernate.engine.FetchTiming;
@ -47,8 +48,9 @@ public class CascadeStyleLoadPlanBuildingAssociationVisitationStrategy
public CascadeStyleLoadPlanBuildingAssociationVisitationStrategy(
CascadingAction cascadeActionToMatch,
SessionFactoryImplementor sessionFactory,
LoadQueryInfluencers loadQueryInfluencers) {
super( sessionFactory, loadQueryInfluencers );
LoadQueryInfluencers loadQueryInfluencers,
LockMode lockMode) {
super( sessionFactory, loadQueryInfluencers, lockMode );
this.cascadeActionToMatch = cascadeActionToMatch;
}

View File

@ -26,6 +26,7 @@ package org.hibernate.loader.plan2.build.internal;
import org.jboss.logging.Logger;
import org.hibernate.HibernateException;
import org.hibernate.LockMode;
import org.hibernate.engine.FetchStrategy;
import org.hibernate.engine.FetchStyle;
import org.hibernate.engine.FetchTiming;
@ -50,14 +51,17 @@ public class FetchStyleLoadPlanBuildingAssociationVisitationStrategy
private static final Logger log = CoreLogging.logger( FetchStyleLoadPlanBuildingAssociationVisitationStrategy.class );
private final LoadQueryInfluencers loadQueryInfluencers;
private final LockMode lockMode;
private Return rootReturn;
public FetchStyleLoadPlanBuildingAssociationVisitationStrategy(
SessionFactoryImplementor sessionFactory,
LoadQueryInfluencers loadQueryInfluencers) {
LoadQueryInfluencers loadQueryInfluencers,
LockMode lockMode) {
super( sessionFactory );
this.loadQueryInfluencers = loadQueryInfluencers;
this.lockMode = lockMode;
}
@Override
@ -106,6 +110,10 @@ public class FetchStyleLoadPlanBuildingAssociationVisitationStrategy
protected FetchStrategy adjustJoinFetchIfNeeded(
AssociationAttributeDefinition attributeDefinition,
FetchStrategy fetchStrategy) {
if ( lockMode.greaterThan( LockMode.READ ) ) {
return new FetchStrategy( fetchStrategy.getTiming(), FetchStyle.SELECT );
}
final Integer maxFetchDepth = sessionFactory().getSettings().getMaximumFetchDepth();
if ( maxFetchDepth != null && currentDepth() > maxFetchDepth ) {
return new FetchStrategy( fetchStrategy.getTiming(), FetchStyle.SELECT );
@ -121,7 +129,7 @@ public class FetchStyleLoadPlanBuildingAssociationVisitationStrategy
@Override
protected boolean isTooManyCollections() {
return false;
return CollectionReturn.class.isInstance( rootReturn );
}
// @Override

View File

@ -35,6 +35,7 @@ import org.hibernate.loader.plan2.spi.EntityReference;
import org.hibernate.loader.plan2.spi.FetchSource;
import org.hibernate.persister.walking.spi.AssociationAttributeDefinition;
import org.hibernate.persister.walking.spi.AttributeDefinition;
import org.hibernate.persister.walking.spi.CompositionDefinition;
import org.hibernate.persister.walking.spi.WalkingException;
import org.hibernate.type.CompositeType;
import org.hibernate.type.Type;
@ -89,14 +90,14 @@ public abstract class AbstractCompositeFetch extends AbstractExpandingFetchSourc
}
protected CompositeFetch createCompositeFetch(
CompositeType compositeType,
CompositionDefinition compositionDefinition,
ExpandingCompositeQuerySpace compositeQuerySpace) {
return new NestedCompositeFetchImpl(
this,
compositeType,
compositionDefinition.getType(),
compositeQuerySpace,
allowCollectionFetches,
getPropertyPath()
getPropertyPath().append( compositionDefinition.getName() )
);
}

View File

@ -31,6 +31,7 @@ import org.hibernate.loader.plan2.spi.EntityIdentifierDescription;
import org.hibernate.loader.plan2.spi.EntityReference;
import org.hibernate.loader.plan2.spi.Join;
import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.persister.walking.spi.CompositionDefinition;
import org.hibernate.persister.walking.spi.EncapsulatedEntityIdentifierDefinition;
import org.hibernate.persister.walking.spi.EntityIdentifierDefinition;
import org.hibernate.type.CompositeType;
@ -112,14 +113,14 @@ public abstract class AbstractEntityReference extends AbstractExpandingFetchSour
}
protected CompositeFetch createCompositeFetch(
CompositeType compositeType,
CompositionDefinition compositionDefinition,
ExpandingCompositeQuerySpace compositeQuerySpace) {
return new CompositeFetchImpl(
this,
compositeType,
compositionDefinition.getType(),
compositeQuerySpace,
true,
getPropertyPath()
getPropertyPath().append( compositionDefinition.getName() )
);
}
}

View File

@ -174,7 +174,7 @@ public abstract class AbstractExpandingFetchSource implements ExpandingFetchSour
}
protected abstract CompositeFetch createCompositeFetch(
CompositeType compositeType,
CompositionDefinition compositeType,
ExpandingCompositeQuerySpace compositeQuerySpace);
@Override
@ -187,7 +187,7 @@ public abstract class AbstractExpandingFetchSource implements ExpandingFetchSour
loadPlanBuildingContext.getQuerySpaces().generateImplicitUid()
);
final CompositeFetch fetch = createCompositeFetch(
attributeDefinition.getType(),
attributeDefinition,
(ExpandingCompositeQuerySpace) join.getRightHandSide()
);
addFetch( fetch );

View File

@ -23,22 +23,28 @@
*/
package org.hibernate.loader.plan2.build.internal.spaces;
import org.hibernate.engine.internal.JoinHelper;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.loader.plan2.build.spi.AbstractQuerySpace;
import org.hibernate.loader.plan2.build.spi.LoadPlanBuildingContext;
import org.hibernate.loader.plan2.spi.CollectionQuerySpace;
import org.hibernate.loader.plan2.spi.Join;
import org.hibernate.persister.collection.CollectionPersister;
import org.hibernate.persister.collection.CollectionPropertyMapping;
import org.hibernate.persister.collection.CollectionPropertyNames;
import org.hibernate.persister.collection.QueryableCollection;
import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.persister.entity.PropertyMapping;
import org.hibernate.persister.entity.Queryable;
import org.hibernate.type.CompositeType;
import org.hibernate.type.EntityType;
/**
* @author Steve Ebersole
*/
public class CollectionQuerySpaceImpl extends AbstractQuerySpace implements CollectionQuerySpace {
private final CollectionPersister persister;
private final CollectionPropertyMapping propertyMapping;
public CollectionQuerySpaceImpl(
CollectionPersister persister,
@ -48,6 +54,7 @@ public class CollectionQuerySpaceImpl extends AbstractQuerySpace implements Coll
SessionFactoryImplementor sessionFactory) {
super( uid, Disposition.COLLECTION, querySpaces, canJoinsBeRequired, sessionFactory );
this.persister = persister;
this.propertyMapping = new CollectionPropertyMapping( (QueryableCollection) persister );
}
@Override
@ -57,7 +64,7 @@ public class CollectionQuerySpaceImpl extends AbstractQuerySpace implements Coll
@Override
public PropertyMapping getPropertyMapping() {
return (PropertyMapping) persister;
return propertyMapping;
}
public JoinImpl addIndexEntityJoin(
@ -76,11 +83,10 @@ public class CollectionQuerySpaceImpl extends AbstractQuerySpace implements Coll
final JoinImpl join = new JoinImpl(
this,
"index",
CollectionPropertyNames.COLLECTION_INDICES,
entityQuerySpace,
// not sure this 'rhsColumnNames' bit is correct...
( (Queryable) indexPersister ).getKeyColumnNames(),
null,
Helper.INSTANCE.determineRhsColumnNames( (EntityType) persister.getIndexType(), sessionFactory() ),
persister.getIndexType(),
required
);
internalGetJoins().add( join );
@ -107,10 +113,10 @@ public class CollectionQuerySpaceImpl extends AbstractQuerySpace implements Coll
final JoinImpl join = new JoinImpl(
this,
"index",
CollectionPropertyNames.COLLECTION_INDICES,
compositeQuerySpace,
null,
null,
( (QueryableCollection) persister ).getIndexColumnNames(),
persister.getIndexType(),
canJoinsBeRequired()
);
internalGetJoins().add( join );
@ -134,10 +140,10 @@ public class CollectionQuerySpaceImpl extends AbstractQuerySpace implements Coll
final JoinImpl join = new JoinImpl(
this,
// collection persister maps its elements (through its PropertyMapping contract) as non-prefixed
"id",
CollectionPropertyNames.COLLECTION_ELEMENTS,
entityQuerySpace,
( (Queryable) elementPersister ).getKeyColumnNames(),
null,
Helper.INSTANCE.determineRhsColumnNames( (EntityType) persister.getElementType(), sessionFactory() ),
persister.getElementType(),
canJoinsBeRequired()
);
internalGetJoins().add( join );
@ -166,10 +172,10 @@ public class CollectionQuerySpaceImpl extends AbstractQuerySpace implements Coll
final JoinImpl join = new JoinImpl(
this,
// collection persister maps its elements (through its PropertyMapping contract) as non-prefixed
"elements",
CollectionPropertyNames.COLLECTION_ELEMENTS,
compositeQuerySpace,
null,
null,
( (QueryableCollection) persister ).getElementColumnNames(),
compositeType,
canJoinsBeRequired()
);
internalGetJoins().add( join );

View File

@ -91,11 +91,11 @@ public class CompositePropertyMapping implements PropertyMapping {
throw new NullPointerException( "Provided property name cannot be null" );
}
if ( propertyName.contains( "." ) ) {
throw new IllegalArgumentException(
"Provided property name cannot contain paths (dots) [" + propertyName + "]"
);
}
//if ( propertyName.contains( "." ) ) {
// throw new IllegalArgumentException(
// "Provided property name cannot contain paths (dots) [" + propertyName + "]"
// );
//}
}
/**

View File

@ -78,7 +78,11 @@ public class CompositeQuerySpaceImpl extends AbstractQuerySpace implements Expan
final boolean required = canJoinsBeRequired() && !compositionDefinition.isNullable();
final CompositeQuerySpaceImpl rhs = new CompositeQuerySpaceImpl(
new CompositePropertyMapping(
compositionDefinition.getType(),
compositeSubPropertyMapping,
compositionDefinition.getName()
),
querySpaceUid,
getQuerySpaces(),
required,
@ -90,8 +94,8 @@ public class CompositeQuerySpaceImpl extends AbstractQuerySpace implements Expan
this,
propertyPath,
rhs,
null,
null,
getPropertyMapping().toColumns( compositionDefinition.getName() ),
compositionDefinition.getType(),
required
);
internalGetJoins().add( join );
@ -123,11 +127,8 @@ public class CompositeQuerySpaceImpl extends AbstractQuerySpace implements Expan
this,
propertyPath,
rhs,
Helper.INSTANCE.determineRhsColumnNames(
(EntityType) attributeDefinition.getType(),
sessionFactory()
),
(EntityType) attributeDefinition.getType(),
Helper.INSTANCE.determineRhsColumnNames( (EntityType) attributeDefinition.getType(), sessionFactory() ),
attributeDefinition.getType(),
required
);
internalGetJoins().add( join );
@ -156,7 +157,7 @@ public class CompositeQuerySpaceImpl extends AbstractQuerySpace implements Expan
attributeDefinition.getName(),
rhs,
( (CollectionType) attributeDefinition.getType() ).getAssociatedJoinable( sessionFactory() ).getKeyColumnNames(),
(AssociationType) attributeDefinition.getType(),
attributeDefinition.getType(),
required
);
internalGetJoins().add( join );

View File

@ -29,6 +29,7 @@ import org.hibernate.loader.plan2.build.spi.ExpandingEntityQuerySpace;
import org.hibernate.loader.plan2.spi.Join;
import org.hibernate.persister.collection.CollectionPersister;
import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.persister.entity.Loadable;
import org.hibernate.persister.entity.PropertyMapping;
import org.hibernate.persister.entity.Queryable;
import org.hibernate.persister.walking.spi.AttributeDefinition;
@ -92,8 +93,8 @@ public class EntityQuerySpaceImpl extends AbstractQuerySpace implements Expandin
this,
compositionDefinition.getName(),
rhs,
null,
null,
( (PropertyMapping) persister ).toColumns( compositionDefinition.getName() ),
compositionDefinition.getType(),
required
);
internalGetJoins().add( join );
@ -127,7 +128,7 @@ public class EntityQuerySpaceImpl extends AbstractQuerySpace implements Expandin
(EntityType) attribute.getType(),
sessionFactory()
),
(AssociationType) attribute.getType(),
attribute.getType(),
required
);
internalGetJoins().add( join );
@ -156,7 +157,7 @@ public class EntityQuerySpaceImpl extends AbstractQuerySpace implements Expandin
attributeDefinition.getName(),
rhs,
( (CollectionType) attributeDefinition.getType() ).getAssociatedJoinable( sessionFactory() ).getKeyColumnNames(),
(AssociationType) attributeDefinition.getType(),
attributeDefinition.getType(),
required
);
internalGetJoins().add( join );
@ -181,9 +182,9 @@ public class EntityQuerySpaceImpl extends AbstractQuerySpace implements Expandin
final JoinImpl join = new JoinImpl(
this,
"id",
EntityPersister.ENTITY_ID,
rhs,
null,
( (Loadable) persister ).getIdentifierColumnNames(),
null,
canJoinsBeRequired()
);

View File

@ -26,7 +26,7 @@ package org.hibernate.loader.plan2.build.internal.spaces;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.loader.plan2.spi.JoinDefinedByMetadata;
import org.hibernate.loader.plan2.spi.QuerySpace;
import org.hibernate.type.AssociationType;
import org.hibernate.type.Type;
/**
* @author Steve Ebersole
@ -38,22 +38,22 @@ public class JoinImpl implements JoinDefinedByMetadata {
private final String lhsPropertyName;
private final String[] rhsColumnNames;
private final boolean rightHandSideOptional;
private final AssociationType joinedAssociationPropertyType;
private final boolean rightHandSideRequired;
private final Type joinedPropertyType;
public JoinImpl(
QuerySpace leftHandSide,
String lhsPropertyName,
QuerySpace rightHandSide,
String[] rhsColumnNames,
AssociationType attributeType,
boolean rightHandSideOptional) {
Type propertyType,
boolean rightHandSideRequired) {
this.leftHandSide = leftHandSide;
this.lhsPropertyName = lhsPropertyName;
this.rightHandSide = rightHandSide;
this.rhsColumnNames = rhsColumnNames;
this.rightHandSideOptional = rightHandSideOptional;
this.joinedAssociationPropertyType = attributeType;
this.rightHandSideRequired = rightHandSideRequired;
this.joinedPropertyType = propertyType;
if ( StringHelper.isEmpty( lhsPropertyName ) ) {
throw new IllegalArgumentException( "Incoming 'lhsPropertyName' parameter was empty" );
}
@ -71,12 +71,12 @@ public class JoinImpl implements JoinDefinedByMetadata {
@Override
public boolean isRightHandSideRequired() {
return rightHandSideOptional;
return rightHandSideRequired;
}
@Override
public String[] resolveAliasedLeftHandSideJoinConditionColumns(String leftHandSideTableAlias) {
return getLeftHandSide().getPropertyMapping().toColumns( leftHandSideTableAlias, getJoinedAssociationPropertyName() );
return getLeftHandSide().getPropertyMapping().toColumns( leftHandSideTableAlias, getJoinedPropertyName() );
}
@Override
@ -99,12 +99,12 @@ public class JoinImpl implements JoinDefinedByMetadata {
}
@Override
public String getJoinedAssociationPropertyName() {
public String getJoinedPropertyName() {
return lhsPropertyName;
}
@Override
public AssociationType getJoinedAssociationPropertyType() {
return joinedAssociationPropertyType;
public Type getJoinedPropertyType() {
return joinedPropertyType;
}
}

View File

@ -339,8 +339,18 @@ public abstract class AbstractLoadPlanBuildingAssociationVisitationStrategy
pushToCollectionStack( collectionReturn );
addRootReturn( collectionReturn );
//if ( collectionDefinition.getCollectionPersister().isOneToMany() ) {
associationKeyRegistered(
new AssociationKey(
( (Joinable) collectionDefinition.getCollectionPersister() ).getTableName(),
( (Joinable) collectionDefinition.getCollectionPersister() ).getKeyColumnNames()
)
);
//}
// also add an AssociationKey for the root so we can later on recognize circular references back to the root.
// for a collection, the circularity would always be to an entity element...
/*
if ( collectionReturn.getElementGraph() != null ) {
if ( EntityReference.class.isInstance( collectionReturn.getElementGraph() ) ) {
final EntityReference entityReference = (EntityReference) collectionReturn.getElementGraph();
@ -350,6 +360,7 @@ public abstract class AbstractLoadPlanBuildingAssociationVisitationStrategy
);
}
}
*/
}
protected boolean supportsRootCollectionReturns() {
@ -375,7 +386,7 @@ public abstract class AbstractLoadPlanBuildingAssociationVisitationStrategy
public void startingCollectionIndex(CollectionIndexDefinition indexDefinition) {
final Type indexType = indexDefinition.getType();
if ( indexType.isAnyType() ) {
throw new WalkingException( "CollectionIndexDefinition reported any-type mapping as map index" );
return;
}
log.tracef(
@ -409,6 +420,11 @@ public abstract class AbstractLoadPlanBuildingAssociationVisitationStrategy
@Override
public void finishingCollectionIndex(CollectionIndexDefinition indexDefinition) {
final Type indexType = indexDefinition.getType();
if ( indexType.isAnyType() ) {
return;
}
if ( indexType.isComponentType() ) {
// todo : validate the stack?
popFromStack();
@ -463,6 +479,10 @@ public abstract class AbstractLoadPlanBuildingAssociationVisitationStrategy
@Override
public void finishingCollectionElements(CollectionElementDefinition elementDefinition) {
final Type elementType = elementDefinition.getType();
if ( elementType.isAnyType() ) {
return;
}
if ( elementType.isComponentType() ) {
// pop it from the stack
final ExpandingFetchSource popped = popFromStack();
@ -597,7 +617,8 @@ public abstract class AbstractLoadPlanBuildingAssociationVisitationStrategy
// TODO: AFAICT, to avoid an overflow, the associated entity must already be loaded into the session, or
// it must be loaded when the ID for the dependent entity is resolved. Is there some other way to
// deal with this???
if ( ! associationKey.equals( currentEntityReferenceAssociationKey ) ) {
final FetchSource registeredFetchSource = registeredFetchSource( associationKey );
if ( registeredFetchSource != null && ! associationKey.equals( currentEntityReferenceAssociationKey ) ) {
currentSource().buildBidirectionalEntityReference(
attributeDefinition,
fetchStrategy,

View File

@ -220,7 +220,7 @@ public class QuerySpaceTreePrinter {
private String determineJoinType(Join join) {
if ( JoinDefinedByMetadata.class.isInstance( join ) ) {
return "JoinDefinedByMetadata(" + ( (JoinDefinedByMetadata) join ).getJoinedAssociationPropertyName() + ")";
return "JoinDefinedByMetadata(" + ( (JoinDefinedByMetadata) join ).getJoinedPropertyName() + ")";
}
return join.getClass().getSimpleName();

View File

@ -0,0 +1,527 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Middleware LLC or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*
*/
package org.hibernate.loader.plan2.exec.internal;
import java.sql.CallableStatement;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.hibernate.HibernateException;
import org.hibernate.LockOptions;
import org.hibernate.ScrollMode;
import org.hibernate.dialect.Dialect;
import org.hibernate.dialect.pagination.LimitHandler;
import org.hibernate.dialect.pagination.LimitHelper;
import org.hibernate.dialect.pagination.NoopLimitHandler;
import org.hibernate.engine.jdbc.ColumnNameCache;
import org.hibernate.engine.spi.PersistenceContext;
import org.hibernate.engine.spi.QueryParameters;
import org.hibernate.engine.spi.RowSelection;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.engine.spi.TypedValue;
import org.hibernate.internal.CoreLogging;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.loader.plan2.exec.query.spi.NamedParameterContext;
import org.hibernate.loader.plan2.exec.spi.LoadQueryDetails;
import org.hibernate.loader.spi.AfterLoadAction;
import org.hibernate.transform.ResultTransformer;
import org.hibernate.type.Type;
/**
* todo How much of AbstractLoadPlanBasedEntityLoader is actually needed?
* @author Gail Badner
*/
public abstract class AbstractLoadPlanBasedLoader {
private static final CoreMessageLogger log = CoreLogging.messageLogger( AbstractLoadPlanBasedLoader.class );
private final SessionFactoryImplementor factory;
private ColumnNameCache columnNameCache;
public AbstractLoadPlanBasedLoader(
SessionFactoryImplementor factory) {
this.factory = factory;
}
protected SessionFactoryImplementor getFactory() {
return factory;
}
protected abstract LoadQueryDetails getStaticLoadQuery();
protected abstract int[] getNamedParameterLocs(String name);
protected abstract void autoDiscoverTypes(ResultSet rs);
protected List executeLoad(
SessionImplementor session,
QueryParameters queryParameters,
LoadQueryDetails loadQueryDetails,
boolean returnProxies,
ResultTransformer forcedResultTransformer) throws SQLException {
final List<AfterLoadAction> afterLoadActions = new ArrayList<AfterLoadAction>();
return executeLoad(
session,
queryParameters,
loadQueryDetails,
returnProxies,
forcedResultTransformer,
afterLoadActions
);
}
protected List executeLoad(
SessionImplementor session,
QueryParameters queryParameters,
LoadQueryDetails loadQueryDetails,
boolean returnProxies,
ResultTransformer forcedResultTransformer,
List<AfterLoadAction> afterLoadActions) throws SQLException {
final PersistenceContext persistenceContext = session.getPersistenceContext();
final boolean defaultReadOnlyOrig = persistenceContext.isDefaultReadOnly();
if ( queryParameters.isReadOnlyInitialized() ) {
// The read-only/modifiable mode for the query was explicitly set.
// Temporarily set the default read-only/modifiable setting to the query's setting.
persistenceContext.setDefaultReadOnly( queryParameters.isReadOnly() );
}
else {
// The read-only/modifiable setting for the query was not initialized.
// Use the default read-only/modifiable from the persistence context instead.
queryParameters.setReadOnly( persistenceContext.isDefaultReadOnly() );
}
persistenceContext.beforeLoad();
try {
List results = null;
final String sql = loadQueryDetails.getSqlStatement();
SqlStatementWrapper wrapper = null;
try {
wrapper = executeQueryStatement( sql, queryParameters, false, afterLoadActions, session );
results = loadQueryDetails.getResultSetProcessor().extractResults(
wrapper.getResultSet(),
session,
queryParameters,
new NamedParameterContext() {
@Override
public int[] getNamedParameterLocations(String name) {
return AbstractLoadPlanBasedLoader.this.getNamedParameterLocs( name );
}
},
returnProxies,
queryParameters.isReadOnly(),
forcedResultTransformer,
afterLoadActions
);
}
finally {
if ( wrapper != null ) {
session.getTransactionCoordinator().getJdbcCoordinator().release(
wrapper.getResultSet(),
wrapper.getStatement()
);
}
persistenceContext.afterLoad();
}
persistenceContext.initializeNonLazyCollections();
return results;
}
finally {
// Restore the original default
persistenceContext.setDefaultReadOnly( defaultReadOnlyOrig );
}
}
protected SqlStatementWrapper executeQueryStatement(
final QueryParameters queryParameters,
final boolean scroll,
List<AfterLoadAction> afterLoadActions,
final SessionImplementor session) throws SQLException {
return executeQueryStatement( getStaticLoadQuery().getSqlStatement(), queryParameters, scroll, afterLoadActions, session );
}
protected SqlStatementWrapper executeQueryStatement(
String sqlStatement,
QueryParameters queryParameters,
boolean scroll,
List<AfterLoadAction> afterLoadActions,
SessionImplementor session) throws SQLException {
// Processing query filters.
queryParameters.processFilters( sqlStatement, session );
// Applying LIMIT clause.
final LimitHandler limitHandler = getLimitHandler(
queryParameters.getFilteredSQL(),
queryParameters.getRowSelection()
);
String sql = limitHandler.getProcessedSql();
// Adding locks and comments.
sql = preprocessSQL( sql, queryParameters, getFactory().getDialect(), afterLoadActions );
final PreparedStatement st = prepareQueryStatement( sql, queryParameters, limitHandler, scroll, session );
return new SqlStatementWrapper( st, getResultSet( st, queryParameters.getRowSelection(), limitHandler, queryParameters.hasAutoDiscoverScalarTypes(), session ) );
}
/**
* Build LIMIT clause handler applicable for given selection criteria. Returns {@link org.hibernate.dialect.pagination.NoopLimitHandler} delegate
* if dialect does not support LIMIT expression or processed query does not use pagination.
*
* @param sql Query string.
* @param selection Selection criteria.
* @return LIMIT clause delegate.
*/
protected LimitHandler getLimitHandler(String sql, RowSelection selection) {
final LimitHandler limitHandler = getFactory().getDialect().buildLimitHandler( sql, selection );
return LimitHelper.useLimit( limitHandler, selection ) ? limitHandler : new NoopLimitHandler( sql, selection );
}
private String preprocessSQL(
String sql,
QueryParameters queryParameters,
Dialect dialect,
List<AfterLoadAction> afterLoadActions) {
return getFactory().getSettings().isCommentsEnabled()
? prependComment( sql, queryParameters )
: sql;
}
private String prependComment(String sql, QueryParameters parameters) {
final String comment = parameters.getComment();
if ( comment == null ) {
return sql;
}
else {
return "/* " + comment + " */ " + sql;
}
}
/**
* Obtain a <tt>PreparedStatement</tt> with all parameters pre-bound.
* Bind JDBC-style <tt>?</tt> parameters, named parameters, and
* limit parameters.
*/
protected final PreparedStatement prepareQueryStatement(
final String sql,
final QueryParameters queryParameters,
final LimitHandler limitHandler,
final boolean scroll,
final SessionImplementor session) throws SQLException, HibernateException {
final Dialect dialect = getFactory().getDialect();
final RowSelection selection = queryParameters.getRowSelection();
final boolean useLimit = LimitHelper.useLimit( limitHandler, selection );
final boolean hasFirstRow = LimitHelper.hasFirstRow( selection );
final boolean useLimitOffset = hasFirstRow && useLimit && limitHandler.supportsLimitOffset();
final boolean callable = queryParameters.isCallable();
final ScrollMode scrollMode = getScrollMode( scroll, hasFirstRow, useLimitOffset, queryParameters );
final PreparedStatement st = session.getTransactionCoordinator().getJdbcCoordinator()
.getStatementPreparer().prepareQueryStatement( sql, callable, scrollMode );
try {
int col = 1;
//TODO: can we limit stored procedures ?!
col += limitHandler.bindLimitParametersAtStartOfQuery( st, col );
if (callable) {
col = dialect.registerResultSetOutParameter( (CallableStatement)st, col );
}
col += bindParameterValues( st, queryParameters, col, session );
col += limitHandler.bindLimitParametersAtEndOfQuery( st, col );
limitHandler.setMaxRows( st );
if ( selection != null ) {
if ( selection.getTimeout() != null ) {
st.setQueryTimeout( selection.getTimeout() );
}
if ( selection.getFetchSize() != null ) {
st.setFetchSize( selection.getFetchSize() );
}
}
// handle lock timeout...
final LockOptions lockOptions = queryParameters.getLockOptions();
if ( lockOptions != null ) {
if ( lockOptions.getTimeOut() != LockOptions.WAIT_FOREVER ) {
if ( !dialect.supportsLockTimeouts() ) {
if ( log.isDebugEnabled() ) {
log.debugf(
"Lock timeout [%s] requested but dialect reported to not support lock timeouts",
lockOptions.getTimeOut()
);
}
}
else if ( dialect.isLockTimeoutParameterized() ) {
st.setInt( col++, lockOptions.getTimeOut() );
}
}
}
if ( log.isTraceEnabled() ) {
log.tracev( "Bound [{0}] parameters total", col );
}
}
catch ( SQLException sqle ) {
session.getTransactionCoordinator().getJdbcCoordinator().release( st );
throw sqle;
}
catch ( HibernateException he ) {
session.getTransactionCoordinator().getJdbcCoordinator().release( st );
throw he;
}
return st;
}
protected ScrollMode getScrollMode(boolean scroll, boolean hasFirstRow, boolean useLimitOffSet, QueryParameters queryParameters) {
final boolean canScroll = getFactory().getSettings().isScrollableResultSetsEnabled();
if ( canScroll ) {
if ( scroll ) {
return queryParameters.getScrollMode();
}
if ( hasFirstRow && !useLimitOffSet ) {
return ScrollMode.SCROLL_INSENSITIVE;
}
}
return null;
}
/**
* Bind all parameter values into the prepared statement in preparation
* for execution.
*
* @param statement The JDBC prepared statement
* @param queryParameters The encapsulation of the parameter values to be bound.
* @param startIndex The position from which to start binding parameter values.
* @param session The originating session.
* @return The number of JDBC bind positions actually bound during this method execution.
* @throws SQLException Indicates problems performing the binding.
*/
protected int bindParameterValues(
PreparedStatement statement,
QueryParameters queryParameters,
int startIndex,
SessionImplementor session) throws SQLException {
int span = 0;
span += bindPositionalParameters( statement, queryParameters, startIndex, session );
span += bindNamedParameters( statement, queryParameters.getNamedParameters(), startIndex + span, session );
return span;
}
/**
* Bind positional parameter values to the JDBC prepared statement.
* <p/>
* Positional parameters are those specified by JDBC-style ? parameters
* in the source query. It is (currently) expected that these come
* before any named parameters in the source query.
*
* @param statement The JDBC prepared statement
* @param queryParameters The encapsulation of the parameter values to be bound.
* @param startIndex The position from which to start binding parameter values.
* @param session The originating session.
* @return The number of JDBC bind positions actually bound during this method execution.
* @throws SQLException Indicates problems performing the binding.
* @throws org.hibernate.HibernateException Indicates problems delegating binding to the types.
*/
protected int bindPositionalParameters(
final PreparedStatement statement,
final QueryParameters queryParameters,
final int startIndex,
final SessionImplementor session) throws SQLException, HibernateException {
final Object[] values = queryParameters.getFilteredPositionalParameterValues();
final Type[] types = queryParameters.getFilteredPositionalParameterTypes();
int span = 0;
for ( int i = 0; i < values.length; i++ ) {
types[i].nullSafeSet( statement, values[i], startIndex + span, session );
span += types[i].getColumnSpan( getFactory() );
}
return span;
}
/**
* Bind named parameters to the JDBC prepared statement.
* <p/>
* This is a generic implementation, the problem being that in the
* general case we do not know enough information about the named
* parameters to perform this in a complete manner here. Thus this
* is generally overridden on subclasses allowing named parameters to
* apply the specific behavior. The most usual limitation here is that
* we need to assume the type span is always one...
*
* @param statement The JDBC prepared statement
* @param namedParams A map of parameter names to values
* @param startIndex The position from which to start binding parameter values.
* @param session The originating session.
* @return The number of JDBC bind positions actually bound during this method execution.
* @throws SQLException Indicates problems performing the binding.
* @throws org.hibernate.HibernateException Indicates problems delegating binding to the types.
*/
protected int bindNamedParameters(
final PreparedStatement statement,
final Map namedParams,
final int startIndex,
final SessionImplementor session) throws SQLException, HibernateException {
if ( namedParams != null ) {
// assumes that types are all of span 1
final Iterator itr = namedParams.entrySet().iterator();
final boolean debugEnabled = log.isDebugEnabled();
int result = 0;
while ( itr.hasNext() ) {
final Map.Entry e = (Map.Entry) itr.next();
final String name = (String) e.getKey();
final TypedValue typedval = (TypedValue) e.getValue();
final int[] locs = getNamedParameterLocs( name );
for ( int loc : locs ) {
if ( debugEnabled ) {
log.debugf(
"bindNamedParameters() %s -> %s [%s]",
typedval.getValue(),
name,
loc + startIndex
);
}
typedval.getType().nullSafeSet( statement, typedval.getValue(), loc + startIndex, session );
}
result += locs.length;
}
return result;
}
else {
return 0;
}
}
/**
* Execute given <tt>PreparedStatement</tt>, advance to the first result and return SQL <tt>ResultSet</tt>.
*/
protected final ResultSet getResultSet(
final PreparedStatement st,
final RowSelection selection,
final LimitHandler limitHandler,
final boolean autodiscovertypes,
final SessionImplementor session)
throws SQLException, HibernateException {
try {
ResultSet rs = session.getTransactionCoordinator().getJdbcCoordinator().getResultSetReturn().extract( st );
rs = wrapResultSetIfEnabled( rs , session );
if ( !limitHandler.supportsLimitOffset() || !LimitHelper.useLimit( limitHandler, selection ) ) {
advance( rs, selection );
}
if ( autodiscovertypes ) {
autoDiscoverTypes( rs );
}
return rs;
}
catch ( SQLException sqle ) {
session.getTransactionCoordinator().getJdbcCoordinator().release( st );
throw sqle;
}
}
/**
* Advance the cursor to the first required row of the <tt>ResultSet</tt>
*/
protected void advance(final ResultSet rs, final RowSelection selection) throws SQLException {
final int firstRow = LimitHelper.getFirstRow( selection );
if ( firstRow != 0 ) {
if ( getFactory().getSettings().isScrollableResultSetsEnabled() ) {
// we can go straight to the first required row
rs.absolute( firstRow );
}
else {
// we need to step through the rows one row at a time (slow)
for ( int m = 0; m < firstRow; m++ ) {
rs.next();
}
}
}
}
private synchronized ResultSet wrapResultSetIfEnabled(final ResultSet rs, final SessionImplementor session) {
// synchronized to avoid multi-thread access issues; defined as method synch to avoid
// potential deadlock issues due to nature of code.
if ( session.getFactory().getSettings().isWrapResultSetsEnabled() ) {
try {
if ( log.isDebugEnabled() ) {
log.debugf( "Wrapping result set [%s]", rs );
}
return session.getFactory()
.getJdbcServices()
.getResultSetWrapper().wrap( rs, retreiveColumnNameToIndexCache( rs ) );
}
catch(SQLException e) {
log.unableToWrapResultSet( e );
return rs;
}
}
else {
return rs;
}
}
private ColumnNameCache retreiveColumnNameToIndexCache(ResultSet rs) throws SQLException {
if ( columnNameCache == null ) {
log.trace( "Building columnName->columnIndex cache" );
columnNameCache = new ColumnNameCache( rs.getMetaData().getColumnCount() );
}
return columnNameCache;
}
/**
* Wrapper class for {@link java.sql.Statement} and associated {@link java.sql.ResultSet}.
*/
protected static class SqlStatementWrapper {
private final Statement statement;
private final ResultSet resultSet;
private SqlStatementWrapper(Statement statement, ResultSet resultSet) {
this.resultSet = resultSet;
this.statement = statement;
}
public ResultSet getResultSet() {
return resultSet;
}
public Statement getStatement() {
return statement;
}
}
}

View File

@ -129,8 +129,17 @@ public class AliasResolutionContextImpl implements AliasResolutionContext {
}
public CollectionReferenceAliases generateCollectionReferenceAliases(String uid, CollectionPersister persister) {
final String manyToManyTableAlias = persister.isManyToMany()? createTableAlias( persister.getRole() ) : null;
final String tableAlias = createTableAlias( persister.getRole() );
final String manyToManyTableAlias;
final String tableAlias;
if ( persister.isManyToMany() ) {
manyToManyTableAlias = createTableAlias( persister.getRole() );
tableAlias = createTableAlias( persister.getElementDefinition().toEntityDefinition().getEntityPersister() );
}
else {
manyToManyTableAlias = null;
tableAlias = createTableAlias( persister.getRole() );
}
final CollectionReferenceAliasesImpl aliases = new CollectionReferenceAliasesImpl(
tableAlias,
manyToManyTableAlias,

View File

@ -30,12 +30,12 @@ import org.hibernate.engine.FetchTiming;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.internal.CoreLogging;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.loader.EntityAliases;
import org.hibernate.loader.plan2.exec.process.internal.CollectionReferenceInitializerImpl;
import org.hibernate.loader.plan2.exec.process.internal.EntityReferenceInitializerImpl;
import org.hibernate.loader.plan2.exec.process.spi.ReaderCollector;
import org.hibernate.loader.plan2.exec.query.internal.SelectStatementBuilder;
import org.hibernate.loader.plan2.exec.query.spi.QueryBuildingParameters;
import org.hibernate.loader.plan2.exec.spi.AliasResolutionContext;
import org.hibernate.loader.plan2.exec.spi.CollectionReferenceAliases;
import org.hibernate.loader.plan2.exec.spi.EntityReferenceAliases;
import org.hibernate.loader.plan2.spi.CollectionFetch;
@ -50,13 +50,16 @@ import org.hibernate.loader.plan2.spi.Join;
import org.hibernate.loader.plan2.spi.JoinDefinedByMetadata;
import org.hibernate.loader.plan2.spi.QuerySpace;
import org.hibernate.persister.collection.CollectionPersister;
import org.hibernate.persister.collection.CollectionPropertyNames;
import org.hibernate.persister.collection.QueryableCollection;
import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.persister.entity.Joinable;
import org.hibernate.persister.entity.OuterJoinLoadable;
import org.hibernate.persister.walking.internal.FetchStrategyHelper;
import org.hibernate.sql.JoinFragment;
import org.hibernate.sql.JoinType;
import org.hibernate.type.AssociationType;
import org.hibernate.type.Type;
/**
* Helper for implementors of entity and collection based query building based on LoadPlans providing common
@ -64,7 +67,7 @@ import org.hibernate.type.AssociationType;
* <p/>
* Exposes 2 main methods:<ol>
* <li>{@link #processQuerySpaceJoins(QuerySpace, SelectStatementBuilder)}</li>
* <li>{@link #processFetches(FetchSource, SelectStatementBuilder, ReaderCollector)}li>
* <li>{@link #processFetches(FetchSource, SelectStatementBuilder, org.hibernate.loader.plan2.exec.process.spi.ReaderCollector)}li>
* </ol>
*
* @author Steve Ebersole
@ -92,6 +95,18 @@ public class LoadQueryJoinAndFetchProcessor {
this.factory = factory;
}
public AliasResolutionContext getAliasResolutionContext() {
return aliasResolutionContext;
}
public QueryBuildingParameters getQueryBuildingParameters() {
return buildingParameters;
}
public SessionFactoryImplementor getSessionFactory() {
return factory;
}
public void processQuerySpaceJoins(QuerySpace querySpace, SelectStatementBuilder selectStatementBuilder) {
LOG.debug( "processing queryspace " + querySpace.getUid() );
final JoinFragment joinFragment = factory.getDialect().createOuterJoinFragment();
@ -130,17 +145,16 @@ public class LoadQueryJoinAndFetchProcessor {
else if ( EntityQuerySpace.class.isInstance( join.getRightHandSide() ) ) {
// do not render the entity join for a one-to-many association, since the collection join
// already joins to the associated entity table (see doc in renderCollectionJoin()).
if ( join.getLeftHandSide().getDisposition() == QuerySpace.Disposition.COLLECTION &&
CollectionQuerySpace.class.cast( join.getLeftHandSide() ).getCollectionPersister().isManyToMany() ) {
final CollectionQuerySpace leftHandSide = (CollectionQuerySpace) join.getLeftHandSide();
final CollectionReferenceAliases aliases = aliasResolutionContext.resolveCollectionReferenceAliases(
leftHandSide.getUid()
);
renderManyToManyJoin(aliases, leftHandSide, join, joinFragment );
if ( join.getLeftHandSide().getDisposition() == QuerySpace.Disposition.COLLECTION ) {
if ( CollectionQuerySpace.class.cast( join.getLeftHandSide() ).getCollectionPersister().isManyToMany() ) {
renderManyToManyJoin( join, joinFragment );
}
else if ( join.getLeftHandSide().getDisposition() != QuerySpace.Disposition.COLLECTION ||
! CollectionQuerySpace.class.cast( join.getLeftHandSide() ).getCollectionPersister().isOneToMany() ) {
else if ( JoinDefinedByMetadata.class.isInstance( join ) &&
CollectionPropertyNames.COLLECTION_INDICES.equals( JoinDefinedByMetadata.class.cast( join ).getJoinedPropertyName() ) ) {
renderManyToManyJoin( join, joinFragment );
}
}
else {
renderEntityJoin( join, joinFragment );
}
}
@ -164,47 +178,36 @@ public class LoadQueryJoinAndFetchProcessor {
}
private void renderEntityJoin(Join join, JoinFragment joinFragment) {
final String leftHandSideUid = join.getLeftHandSide().getUid();
final String leftHandSideTableAlias = aliasResolutionContext.resolveSqlTableAliasFromQuerySpaceUid( leftHandSideUid );
if ( leftHandSideTableAlias == null ) {
throw new IllegalStateException( "QuerySpace with that UID was not yet registered in the AliasResolutionContext" );
}
final String[] aliasedLhsColumnNames = join.resolveAliasedLeftHandSideJoinConditionColumns( leftHandSideTableAlias );
final EntityQuerySpace rightHandSide = (EntityQuerySpace) join.getRightHandSide();
// see if there is already aliases registered for this entity query space (collection joins)
EntityReferenceAliases aliases = aliasResolutionContext.resolveEntityReferenceAliases( rightHandSide.getUid() );
if ( aliases == null ) {
aliases = aliasResolutionContext.generateEntityReferenceAliases(
aliasResolutionContext.generateEntityReferenceAliases(
rightHandSide.getUid(),
rightHandSide.getEntityPersister()
);
}
final String[] rhsColumnNames = join.resolveNonAliasedRightHandSideJoinConditionColumns();
final String rhsTableAlias = aliases.getTableAlias();
final AssociationType associationType = join instanceof JoinDefinedByMetadata ? ((JoinDefinedByMetadata)join).getJoinedAssociationPropertyType() : null;
final String additionalJoinConditions = resolveAdditionalJoinCondition(
rhsTableAlias,
join.getAnyAdditionalJoinConditions( rhsTableAlias ),
(Joinable) rightHandSide.getEntityPersister(),
associationType
);
final Joinable joinable = (Joinable) rightHandSide.getEntityPersister();
addJoins(
join,
joinFragment,
joinable,
join.isRightHandSideRequired() ? JoinType.INNER_JOIN : JoinType.LEFT_OUTER_JOIN,
aliases.getTableAlias(),
rhsColumnNames,
aliasedLhsColumnNames,
additionalJoinConditions
joinable
);
}
private AssociationType getJoinedAssociationTypeOrNull(Join join) {
if ( !JoinDefinedByMetadata.class.isInstance( join ) ) {
return null;
}
final Type joinedType = ( (JoinDefinedByMetadata) join ).getJoinedPropertyType();
return joinedType.isAssociationType()
? (AssociationType) joinedType
: null;
}
private String resolveAdditionalJoinCondition(String rhsTableAlias, String withClause, Joinable joinable, AssociationType associationType) {
// turns out that the call to AssociationType#getOnCondition in the initial code really just translates to
// calls to the Joinable.filterFragment() method where the Joinable is either the entity or
@ -228,41 +231,48 @@ public class LoadQueryJoinAndFetchProcessor {
}
}
private static void addJoins(
private void addJoins(
Join join,
JoinFragment joinFragment,
Joinable joinable,
JoinType joinType,
String rhsAlias,
String[] rhsColumnNames,
String[] aliasedLhsColumnNames,
String additionalJoinConditions) {
// somewhere, one of these being empty is causing trouble...
if ( StringHelper.isEmpty( rhsAlias ) ) {
Joinable joinable) {
final String rhsTableAlias = aliasResolutionContext.resolveSqlTableAliasFromQuerySpaceUid(
join.getRightHandSide().getUid()
);
if ( StringHelper.isEmpty( rhsTableAlias ) ) {
throw new IllegalStateException( "Join's RHS table alias cannot be empty" );
}
final String lhsTableAlias = aliasResolutionContext.resolveSqlTableAliasFromQuerySpaceUid(
join.getLeftHandSide().getUid()
);
if ( lhsTableAlias == null ) {
throw new IllegalStateException( "QuerySpace with that UID was not yet registered in the AliasResolutionContext" );
}
// add join fragments from the collection table -> element entity table ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
final String additionalJoinConditions = resolveAdditionalJoinCondition(
rhsTableAlias,
join.getAnyAdditionalJoinConditions( rhsTableAlias ),
joinable,
getJoinedAssociationTypeOrNull( join )
);
joinFragment.addJoin(
joinable.getTableName(),
rhsAlias,
aliasedLhsColumnNames,
rhsColumnNames,
joinType,
rhsTableAlias,
join.resolveAliasedLeftHandSideJoinConditionColumns( lhsTableAlias ),
join.resolveNonAliasedRightHandSideJoinConditionColumns(),
join.isRightHandSideRequired() ? JoinType.INNER_JOIN : JoinType.LEFT_OUTER_JOIN,
additionalJoinConditions
);
joinFragment.addJoins(
joinable.fromJoinFragment( rhsAlias, false, true ),
joinable.whereJoinFragment( rhsAlias, false, true )
joinable.fromJoinFragment( rhsTableAlias, false, true ),
joinable.whereJoinFragment( rhsTableAlias, false, true )
);
}
private void renderCollectionJoin(Join join, JoinFragment joinFragment) {
final String leftHandSideUid = join.getLeftHandSide().getUid();
final String leftHandSideTableAlias = aliasResolutionContext.resolveSqlTableAliasFromQuerySpaceUid( leftHandSideUid );
if ( leftHandSideTableAlias == null ) {
throw new IllegalStateException( "QuerySpace with that UID was not yet registered in the AliasResolutionContext" );
}
final String[] aliasedLhsColumnNames = join.resolveAliasedLeftHandSideJoinConditionColumns( leftHandSideTableAlias );
final CollectionQuerySpace rightHandSide = (CollectionQuerySpace) join.getRightHandSide();
final CollectionReferenceAliases aliases = aliasResolutionContext.generateCollectionReferenceAliases(
rightHandSide.getUid(),
@ -318,82 +328,16 @@ public class LoadQueryJoinAndFetchProcessor {
);
}
renderSqlJoinToCollectionTable(
aliases,
rightHandSide,
aliasedLhsColumnNames,
addJoins(
join,
joinFragment
);
// if ( rightHandSide.getCollectionPersister().isManyToMany() ) {
// renderManyToManyJoin(
// aliases,
// rightHandSide,
// aliasedLhsColumnNames,
// join,
// joinFragment
// );
// }
// else if ( rightHandSide.getCollectionPersister().isOneToMany() ) {
// renderOneToManyJoin(
// aliases,
// rightHandSide,
// aliasedLhsColumnNames,
// join,
// joinFragment
// );
// }
// else {
// renderBasicCollectionJoin(
// aliases,
// rightHandSide,
// aliasedLhsColumnNames,
// join,
// joinFragment
// );
// }
}
private void renderSqlJoinToCollectionTable(
CollectionReferenceAliases aliases,
CollectionQuerySpace rightHandSide,
String[] aliasedLhsColumnNames,
Join join,
JoinFragment joinFragment) {
final String collectionTableAlias = aliases.getCollectionTableAlias();
final CollectionPersister persister = rightHandSide.getCollectionPersister();
final QueryableCollection queryableCollection = (QueryableCollection) persister;
// add join fragments from the owner table -> collection table ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
final String filterFragment = queryableCollection.filterFragment(
collectionTableAlias,
buildingParameters.getQueryInfluencers().getEnabledFilters()
);
joinFragment.addJoin(
queryableCollection.getTableName(),
collectionTableAlias,
aliasedLhsColumnNames,
queryableCollection.getKeyColumnNames(),
JoinType.LEFT_OUTER_JOIN,
filterFragment
);
joinFragment.addJoins(
queryableCollection.fromJoinFragment( collectionTableAlias, false, true ),
queryableCollection.whereJoinFragment( collectionTableAlias, false, true )
joinFragment,
(Joinable) rightHandSide.getCollectionPersister()
);
}
private void renderManyToManyJoin(
CollectionReferenceAliases aliases,
CollectionQuerySpace rightHandSide,
// String[] aliasedLhsColumnNames,
Join join,
JoinFragment joinFragment) {
final CollectionPersister persister = rightHandSide.getCollectionPersister();
final QueryableCollection queryableCollection = (QueryableCollection) persister;
// for many-to-many we have 3 table aliases. By way of example, consider a normal m-n: User<->Role
// where User is the FetchOwner and Role (User.roles) is the Fetch. We'd have:
@ -402,141 +346,32 @@ public class LoadQueryJoinAndFetchProcessor {
// columns here (aliasedLhsColumnNames)
//final String ownerTableAlias = ...;
// 2) the m-n table : user_role
final String collectionTableAlias = aliases.getCollectionTableAlias();
// 3) the element table : role
final String elementTableAlias = aliases.getElementTableAlias();
final EntityPersister entityPersister = ( (EntityQuerySpace) join.getRightHandSide() ).getEntityPersister();
final String entityTableAlias = aliasResolutionContext.resolveSqlTableAliasFromQuerySpaceUid(
join.getRightHandSide().getUid()
);
// somewhere, one of these being empty is causing trouble...
if ( StringHelper.isEmpty( collectionTableAlias ) ) {
throw new IllegalStateException( "Collection table alias cannot be empty" );
}
if ( StringHelper.isEmpty( elementTableAlias ) ) {
if ( StringHelper.isEmpty( entityTableAlias ) ) {
throw new IllegalStateException( "Collection element (many-to-many) table alias cannot be empty" );
}
//
// {
// // add join fragments from the owner table -> collection table ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// final String filterFragment = queryableCollection.filterFragment(
// collectionTableAlias,
// buildingParameters.getQueryInfluencers().getEnabledFilters()
// );
//
// joinFragment.addJoin(
// queryableCollection.getTableName(),
// collectionTableAlias,
// aliasedLhsColumnNames,
// queryableCollection.getKeyColumnNames(),
// JoinType.LEFT_OUTER_JOIN,
// filterFragment
// );
// joinFragment.addJoins(
// queryableCollection.fromJoinFragment( collectionTableAlias, false, true ),
// queryableCollection.whereJoinFragment( collectionTableAlias, false, true )
// );
// }
{
final AssociationType associationType = join instanceof JoinDefinedByMetadata ? ((JoinDefinedByMetadata)join).getJoinedAssociationPropertyType() : null;
// add join fragments from the collection table -> element entity table ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
final String additionalJoinConditions = resolveAdditionalJoinCondition(
elementTableAlias,
join.getAnyAdditionalJoinConditions( elementTableAlias ),
queryableCollection,
associationType
);
if ( JoinDefinedByMetadata.class.isInstance( join ) &&
CollectionPropertyNames.COLLECTION_ELEMENTS.equals( ( (JoinDefinedByMetadata) join ).getJoinedPropertyName() ) ) {
final CollectionQuerySpace leftHandSide = (CollectionQuerySpace) join.getLeftHandSide();
final CollectionPersister persister = leftHandSide.getCollectionPersister();
final String manyToManyFilter = persister.getManyToManyFilterFragment(
elementTableAlias,
entityTableAlias,
buildingParameters.getQueryInfluencers().getEnabledFilters()
);
final String condition;
if ( StringHelper.isEmpty( manyToManyFilter ) ) {
condition = additionalJoinConditions;
joinFragment.addCondition( manyToManyFilter );
}
else if ( StringHelper.isEmpty( additionalJoinConditions ) ) {
condition = manyToManyFilter;
}
else {
condition = additionalJoinConditions + " and " + manyToManyFilter;
}
final OuterJoinLoadable elementPersister = (OuterJoinLoadable) queryableCollection.getElementPersister();
addJoins(
join,
joinFragment,
elementPersister,
JoinType.LEFT_OUTER_JOIN,
elementTableAlias,
elementPersister.getIdentifierColumnNames(),
StringHelper.qualify( collectionTableAlias, queryableCollection.getElementColumnNames() ),
condition
(Joinable) entityPersister
);
}
}
// private void renderOneToManyJoin(
// CollectionReferenceAliases aliases,
// CollectionQuerySpace rightHandSide,
// String[] aliasedLhsColumnNames,
// Join join,
// JoinFragment joinFragment) {
// final QueryableCollection queryableCollection = (QueryableCollection) rightHandSide.getCollectionPersister();
//
// final String rhsTableAlias = aliases.getElementTableAlias();
// final String[] rhsColumnNames = join.resolveNonAliasedRightHandSideJoinConditionColumns();
//
// final String on = resolveAdditionalJoinCondition(
// rhsTableAlias,
// join.getAnyAdditionalJoinConditions( rhsTableAlias ),
// queryableCollection
// );
//
// addJoins(
// joinFragment,
// queryableCollection,
// JoinType.LEFT_OUTER_JOIN,
// rhsTableAlias,
// rhsColumnNames,
// aliasedLhsColumnNames,
// on
// );
// }
//
// private void renderBasicCollectionJoin(
// CollectionReferenceAliases aliases,
// CollectionQuerySpace rightHandSide,
// String[] aliasedLhsColumnNames,
// Join join,
// JoinFragment joinFragment) {
// final QueryableCollection queryableCollection = (QueryableCollection) rightHandSide.getCollectionPersister();
//
// final String rhsTableAlias = aliases.getElementTableAlias();
// final String[] rhsColumnNames = join.resolveNonAliasedRightHandSideJoinConditionColumns();
//
// final String on = resolveAdditionalJoinCondition(
// rhsTableAlias,
// join.getAnyAdditionalJoinConditions( rhsTableAlias ),
// queryableCollection
// );
//
// addJoins(
// joinFragment,
// queryableCollection,
// JoinType.LEFT_OUTER_JOIN,
// rhsTableAlias,
// rhsColumnNames,
// aliasedLhsColumnNames,
// on
// );
// }
public FetchStats processFetches(
FetchSource fetchSource,
@ -644,9 +479,10 @@ public class LoadQueryJoinAndFetchProcessor {
// First write out the SQL SELECT fragments
final Joinable joinable = (Joinable) fetch.getEntityPersister();
final EntityReferenceAliases aliases = aliasResolutionContext.resolveEntityReferenceAliases(
EntityReferenceAliases aliases = aliasResolutionContext.resolveEntityReferenceAliases(
fetch.getQuerySpaceUid()
);
// the null arguments here relate to many-to-many fetches
selectStatementBuilder.appendSelectClauseFragment(
joinable.selectFragment(
@ -659,22 +495,19 @@ public class LoadQueryJoinAndFetchProcessor {
)
);
// // process its identifier fetches first (building EntityReferenceInitializers for them if needed)
// if ( EntityReference.class.isInstance( fetchSource ) ) {
// final EntityReference fetchOwnerAsEntityReference = (EntityReference) fetchSource;
// if ( fetchOwnerAsEntityReference.getIdentifierDescription().hasFetches() ) {
// final FetchSource entityIdentifierAsFetchSource = (FetchSource) fetchOwnerAsEntityReference.getIdentifierDescription();
// for ( Fetch identifierFetch : entityIdentifierAsFetchSource.getFetches() ) {
// processFetch(
// selectStatementBuilder,
// fetchSource,
// identifierFetch,
// readerCollector,
// fetchStats
// );
// }
// }
// }
// process its identifier fetches first (building EntityReferenceInitializers for them if needed)
if ( fetch.getIdentifierDescription().hasFetches() ) {
final FetchSource entityIdentifierAsFetchSource = (FetchSource) fetch.getIdentifierDescription();
for ( Fetch identifierFetch : entityIdentifierAsFetchSource.getFetches() ) {
processFetch(
selectStatementBuilder,
fetch,
identifierFetch,
readerCollector,
fetchStats
);
}
}
// build an EntityReferenceInitializers for the incoming fetch itself
readerCollector.add( new EntityReferenceInitializerImpl( fetch, aliases ) );
@ -691,7 +524,9 @@ public class LoadQueryJoinAndFetchProcessor {
FetchStatsImpl fetchStats) {
fetchStats.processingFetch( fetch );
final CollectionReferenceAliases aliases = aliasResolutionContext.resolveCollectionReferenceAliases( fetch.getQuerySpaceUid() );
final CollectionReferenceAliases aliases = aliasResolutionContext.resolveCollectionReferenceAliases(
fetch.getQuerySpaceUid()
);
final QueryableCollection queryableCollection = (QueryableCollection) fetch.getCollectionPersister();
final Joinable joinableCollection = (Joinable) fetch.getCollectionPersister();
@ -746,17 +581,10 @@ public class LoadQueryJoinAndFetchProcessor {
}
// add an EntityReferenceInitializer for the collection elements (keys also?)
final EntityReferenceAliases entityReferenceAliases = new EntityReferenceAliases() {
@Override
public String getTableAlias() {
return aliases.getCollectionTableAlias();
}
@Override
public EntityAliases getColumnAliases() {
return aliases.getEntityElementColumnAliases();
}
};
final EntityReferenceAliases entityReferenceAliases = new EntityReferenceAliasesImpl(
aliases.getCollectionTableAlias(),
aliases.getEntityElementColumnAliases()
);
aliasResolutionContext.registerQuerySpaceAliases( fetch.getQuerySpaceUid(), entityReferenceAliases );
readerCollector.add(
new EntityReferenceInitializerImpl(
@ -766,12 +594,10 @@ public class LoadQueryJoinAndFetchProcessor {
);
}
else {
final String rhsTableAlias = aliases.getElementTableAlias();
// select the "collection columns"
selectStatementBuilder.appendSelectClauseFragment(
queryableCollection.selectFragment(
rhsTableAlias,
aliases.getElementTableAlias(),
aliases.getCollectionColumnAliases().getSuffix()
)
);
@ -785,17 +611,10 @@ public class LoadQueryJoinAndFetchProcessor {
aliases.getEntityElementColumnAliases().getSuffix()
)
);
final EntityReferenceAliases entityReferenceAliases = new EntityReferenceAliases() {
@Override
public String getTableAlias() {
return aliases.getElementTableAlias();
}
@Override
public EntityAliases getColumnAliases() {
return aliases.getEntityElementColumnAliases();
}
};
final EntityReferenceAliases entityReferenceAliases = new EntityReferenceAliasesImpl(
aliases.getElementTableAlias(),
aliases.getEntityElementColumnAliases()
);
aliasResolutionContext.registerQuerySpaceAliases( fetch.getQuerySpaceUid(), entityReferenceAliases );
readerCollector.add(
new EntityReferenceInitializerImpl(
@ -805,7 +624,7 @@ public class LoadQueryJoinAndFetchProcessor {
);
}
final String ordering = queryableCollection.getSQLOrderByString( rhsTableAlias );
final String ordering = queryableCollection.getSQLOrderByString( aliases.getElementTableAlias() );
if ( StringHelper.isNotEmpty( ordering ) ) {
selectStatementBuilder.appendOrderByFragment( ordering );
}

View File

@ -0,0 +1,47 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.loader.plan2.exec.process.internal;
import java.sql.ResultSet;
import java.sql.SQLException;
import org.hibernate.loader.plan2.exec.process.spi.ResultSetProcessingContext;
import org.hibernate.loader.plan2.exec.process.spi.ReturnReader;
import org.hibernate.loader.plan2.spi.CollectionReturn;
/**
* @author Steve Ebersole
*/
public class CollectionReturnReader implements ReturnReader {
private final CollectionReturn collectionReturn;
public CollectionReturnReader(CollectionReturn collectionReturn) {
this.collectionReturn = collectionReturn;
}
@Override
public Object read(ResultSet resultSet, ResultSetProcessingContext context) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
}

View File

@ -210,6 +210,7 @@ public class EntityReferenceInitializerImpl implements EntityReferenceInitialize
// use the existing association as the hydrated state
processingState.registerEntityInstance( existing );
//context.registerHydratedEntity( entityReference, entityKey, existing );
return;
}

View File

@ -30,7 +30,6 @@ import org.hibernate.AssertionFailure;
import org.hibernate.engine.spi.EntityKey;
import org.hibernate.loader.plan2.exec.process.spi.ResultSetProcessingContext;
import org.hibernate.loader.plan2.exec.process.spi.ReturnReader;
import org.hibernate.loader.plan2.exec.spi.EntityReferenceAliases;
import org.hibernate.loader.plan2.spi.EntityReturn;
import org.hibernate.proxy.HibernateProxy;
@ -41,11 +40,9 @@ import static org.hibernate.loader.plan2.exec.process.spi.ResultSetProcessingCon
*/
public class EntityReturnReader implements ReturnReader {
private final EntityReturn entityReturn;
private final EntityReferenceAliases aliases;
public EntityReturnReader(EntityReturn entityReturn, EntityReferenceAliases aliases) {
public EntityReturnReader(EntityReturn entityReturn) {
this.entityReturn = entityReturn;
this.aliases = aliases;
}
public EntityReferenceProcessingState getIdentifierResolutionContext(ResultSetProcessingContext context) {

View File

@ -287,6 +287,9 @@ public class ResultSetProcessingContextImpl implements ResultSetProcessingContex
*/
void finishUpRow() {
if ( currentRowHydratedEntityRegistrationList == null ) {
if ( identifierResolutionContextMap != null ) {
identifierResolutionContextMap.clear();
}
return;
}

View File

@ -169,13 +169,13 @@ public class ResultSetProcessorImpl implements ResultSetProcessor {
"Preparing collection intializer : %s",
MessageHelper.collectionInfoString( persister, key, session.getFactory() )
);
}
session.getPersistenceContext()
.getLoadContexts()
.getCollectionLoadContext( resultSet )
.getLoadingCollection( persister, key );
}
}
}
// private class LocalVisitationStrategy extends LoadPlanVisitationStrategyAdapter {

View File

@ -25,6 +25,8 @@ package org.hibernate.loader.plan2.exec.process.spi;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@ -55,18 +57,27 @@ import org.hibernate.persister.entity.Loadable;
public abstract class AbstractRowReader implements RowReader {
private static final Logger log = CoreLogging.logger( AbstractRowReader.class );
protected abstract List<EntityReferenceInitializer> getEntityReferenceInitializers();
protected abstract List<CollectionReferenceInitializer> getArrayReferenceInitializers();
protected abstract List<CollectionReferenceInitializer> getCollectionReferenceInitializers();
private final List<EntityReferenceInitializer> entityReferenceInitializers;
private final List<CollectionReferenceInitializer> arrayReferenceInitializers;
private final List<CollectionReferenceInitializer> collectionReferenceInitializers;
public AbstractRowReader(ReaderCollector readerCollector) {
this.entityReferenceInitializers = readerCollector.getEntityReferenceInitializers() != null
? new ArrayList<EntityReferenceInitializer>( readerCollector.getEntityReferenceInitializers() )
: Collections.<EntityReferenceInitializer>emptyList();
this.arrayReferenceInitializers = readerCollector.getArrayReferenceInitializers() != null
? new ArrayList<CollectionReferenceInitializer>( readerCollector.getArrayReferenceInitializers() )
: Collections.<CollectionReferenceInitializer>emptyList();
this.collectionReferenceInitializers = readerCollector.getNonArrayCollectionReferenceInitializers() != null
? new ArrayList<CollectionReferenceInitializer>( readerCollector.getNonArrayCollectionReferenceInitializers() )
: Collections.<CollectionReferenceInitializer>emptyList();
}
protected abstract Object readLogicalRow(ResultSet resultSet, ResultSetProcessingContextImpl context)
throws SQLException;
@Override
public Object readRow(ResultSet resultSet, ResultSetProcessingContextImpl context) throws SQLException {
final List<EntityReferenceInitializer> entityReferenceInitializers = getEntityReferenceInitializers();
final List<CollectionReferenceInitializer> arrayReferenceInitializers = getArrayReferenceInitializers();
final List<CollectionReferenceInitializer> collectionReferenceInitializers = getCollectionReferenceInitializers();
final boolean hasEntityReferenceInitializers = CollectionHelper.isNotEmpty( entityReferenceInitializers );
@ -209,13 +220,10 @@ public abstract class AbstractRowReader implements RowReader {
}
private void finishLoadingArrays(ResultSetProcessingContextImpl context) {
final List<CollectionReferenceInitializer> arrayReferenceInitializers = getArrayReferenceInitializers();
if ( arrayReferenceInitializers != null ) {
for ( CollectionReferenceInitializer arrayReferenceInitializer : arrayReferenceInitializers ) {
arrayReferenceInitializer.endLoading( context );
}
}
}
private void performTwoPhaseLoad(
PreLoadEvent preLoadEvent,
@ -241,11 +249,8 @@ public abstract class AbstractRowReader implements RowReader {
}
private void finishLoadingCollections(ResultSetProcessingContextImpl context) {
final List<CollectionReferenceInitializer> collectionReferenceInitializers = getCollectionReferenceInitializers();
if ( collectionReferenceInitializers != null ) {
for ( CollectionReferenceInitializer arrayReferenceInitializer : collectionReferenceInitializers ) {
arrayReferenceInitializer.endLoading( context );
}
for ( CollectionReferenceInitializer collectionReferenceInitializer : collectionReferenceInitializers ) {
collectionReferenceInitializer.endLoading( context );
}
}

View File

@ -23,13 +23,23 @@
*/
package org.hibernate.loader.plan2.exec.process.spi;
import java.util.List;
/**
* Used as a callback mechanism while building the SQL statement to collect the needed ResultSet readers
* Used as a callback mechanism while building the SQL statement to collect the needed ResultSet initializers.
*
* @author Steve Ebersole
* @author Gail Badner
*/
public interface ReaderCollector {
public ReturnReader getReturnReader();
public void add(CollectionReferenceInitializer collectionReferenceInitializer);
public List<CollectionReferenceInitializer> getArrayReferenceInitializers();
public List<CollectionReferenceInitializer> getNonArrayCollectionReferenceInitializers();
public void add(EntityReferenceInitializer entityReferenceInitializer);
public List<EntityReferenceInitializer> getEntityReferenceInitializers();
public RowReader buildRowReader();
}

View File

@ -0,0 +1,292 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.loader.plan2.exec.spi;
import java.util.ArrayList;
import java.util.List;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.loader.plan2.build.spi.LoadPlanTreePrinter;
import org.hibernate.loader.plan2.exec.internal.AliasResolutionContextImpl;
import org.hibernate.loader.plan2.exec.internal.FetchStats;
import org.hibernate.loader.plan2.exec.internal.LoadQueryJoinAndFetchProcessor;
import org.hibernate.loader.plan2.exec.process.internal.ResultSetProcessorImpl;
import org.hibernate.loader.plan2.exec.process.spi.CollectionReferenceInitializer;
import org.hibernate.loader.plan2.exec.process.spi.EntityReferenceInitializer;
import org.hibernate.loader.plan2.exec.process.spi.ReaderCollector;
import org.hibernate.loader.plan2.exec.process.spi.ResultSetProcessor;
import org.hibernate.loader.plan2.exec.query.internal.SelectStatementBuilder;
import org.hibernate.loader.plan2.exec.query.spi.QueryBuildingParameters;
import org.hibernate.loader.plan2.spi.CollectionReturn;
import org.hibernate.loader.plan2.spi.FetchSource;
import org.hibernate.loader.plan2.spi.LoadPlan;
import org.hibernate.loader.plan2.spi.QuerySpace;
import org.hibernate.loader.plan2.spi.Return;
import org.hibernate.sql.ConditionFragment;
import org.hibernate.sql.DisjunctionFragment;
import org.hibernate.sql.InFragment;
/**
* @author Gail Badner
*/
public abstract class AbstractLoadQueryDetails implements LoadQueryDetails {
private final LoadPlan loadPlan;
private final String[] keyColumnNames;
private final Return rootReturn;
private final LoadQueryJoinAndFetchProcessor queryProcessor;
private String sqlStatement;
private ResultSetProcessor resultSetProcessor;
/**
* @param rootReturn The root return reference we are processing
* @param select The SelectStatementBuilder
* @param helper The Join/Fetch helper
* @param factory The SessionFactory
* @param buildingParameters The query building context
* @param rootAlias The table alias to use
* @param rootLoadable The persister
* @param readerCollector Collector for EntityReferenceInitializer and CollectionReferenceInitializer references
*/
protected AbstractLoadQueryDetails(
LoadPlan loadPlan,
AliasResolutionContextImpl aliasResolutionContext,
QueryBuildingParameters buildingParameters,
String[] keyColumnNames,
Return rootReturn,
SessionFactoryImplementor factory) {
this.keyColumnNames = keyColumnNames;
this.rootReturn = rootReturn;
this.loadPlan = loadPlan;
this.queryProcessor = new LoadQueryJoinAndFetchProcessor( aliasResolutionContext, buildingParameters, factory );
}
protected QuerySpace getQuerySpace(String querySpaceUid) {
return loadPlan.getQuerySpaces().getQuerySpaceByUid( querySpaceUid );
}
@Override
public String getSqlStatement() {
return sqlStatement;
}
@Override
public ResultSetProcessor getResultSetProcessor() {
return resultSetProcessor;
}
protected final Return getRootReturn() {
return rootReturn;
}
protected final AliasResolutionContext getAliasResolutionContext() {
return queryProcessor.getAliasResolutionContext();
}
protected final QueryBuildingParameters getQueryBuildingParameters() {
return queryProcessor.getQueryBuildingParameters();
}
protected final SessionFactoryImplementor getSessionFactory() {
return queryProcessor.getSessionFactory();
}
/**
* Main entry point for properly handling the FROM clause and and joins and restrictions
*
*/
protected void generate() {
// There are 2 high-level requirements to perform here:
// 1) Determine the SQL required to carry out the given LoadPlan (and fulfill
// {@code LoadQueryDetails#getSqlStatement()}). SelectStatementBuilder collects the ongoing efforts to
// build the needed SQL.
// 2) Determine how to read information out of the ResultSet resulting from executing the indicated SQL
// (the SQL aliases). ReaderCollector and friends are where this work happens, ultimately
// producing a ResultSetProcessor
final SelectStatementBuilder select = new SelectStatementBuilder( queryProcessor.getSessionFactory().getDialect() );
// LoadPlan is broken down into 2 high-level pieces that we need to process here.
//
// First is the QuerySpaces, which roughly equates to the SQL FROM-clause. We'll cycle through
// those first, generating aliases into the AliasContext in addition to writing SQL FROM-clause information
// into SelectStatementBuilder. The AliasContext is populated here and the reused while process the SQL
// SELECT-clause into the SelectStatementBuilder and then again also to build the ResultSetProcessor
applyRootReturnTableFragments( select );
if ( shouldApplyRootReturnFilterBeforeKeyRestriction() ) {
applyRootReturnFilterRestrictions( select );
// add restrictions...
// first, the load key restrictions (which entity(s)/collection(s) do we want to load?)
applyKeyRestriction(
select,
getRootTableAlias(),
keyColumnNames,
getQueryBuildingParameters().getBatchSize()
);
}
else {
// add restrictions...
// first, the load key restrictions (which entity(s)/collection(s) do we want to load?)
applyKeyRestriction(
select,
getRootTableAlias(),
keyColumnNames,
getQueryBuildingParameters().getBatchSize()
);
applyRootReturnFilterRestrictions( select );
}
applyRootReturnWhereJoinRestrictions( select );
applyRootReturnOrderByFragments( select );
// then move on to joins...
applyRootReturnSelectFragments( select );
queryProcessor.processQuerySpaceJoins( getRootQuerySpace(), select );
// Next, we process the Returns and Fetches building the SELECT clause and at the same time building
// Readers for reading the described results out of a SQL ResultSet
FetchStats fetchStats = null;
if ( FetchSource.class.isInstance( rootReturn ) ) {
fetchStats = queryProcessor.processFetches(
(FetchSource) rootReturn,
select,
getReaderCollector()
);
}
else if ( CollectionReturn.class.isInstance( rootReturn ) ) {
final CollectionReturn collectionReturn = (CollectionReturn) rootReturn;
if ( collectionReturn.getElementGraph() != null ) {
fetchStats = queryProcessor.processFetches(
collectionReturn.getElementGraph(),
select,
getReaderCollector()
);
}
// TODO: what about index???
}
LoadPlanTreePrinter.INSTANCE.logTree( loadPlan, queryProcessor.getAliasResolutionContext() );
this.sqlStatement = select.toStatementString();
this.resultSetProcessor = new ResultSetProcessorImpl(
loadPlan,
getReaderCollector().buildRowReader(),
fetchStats != null && fetchStats.hasSubselectFetches()
);
}
protected abstract ReaderCollector getReaderCollector();
protected abstract QuerySpace getRootQuerySpace();
protected abstract String getRootTableAlias();
protected abstract boolean shouldApplyRootReturnFilterBeforeKeyRestriction();
protected abstract void applyRootReturnSelectFragments(SelectStatementBuilder selectStatementBuilder );
protected abstract void applyRootReturnTableFragments(SelectStatementBuilder selectStatementBuilder);
protected abstract void applyRootReturnFilterRestrictions(SelectStatementBuilder selectStatementBuilder);
protected abstract void applyRootReturnWhereJoinRestrictions(SelectStatementBuilder selectStatementBuilder);
protected abstract void applyRootReturnOrderByFragments(SelectStatementBuilder selectStatementBuilder);
private static void applyKeyRestriction(SelectStatementBuilder select, String alias, String[] keyColumnNames, int batchSize) {
if ( keyColumnNames.length==1 ) {
// NOT A COMPOSITE KEY
// for batching, use "foo in (?, ?, ?)" for batching
// for no batching, use "foo = ?"
// (that distinction is handled inside InFragment)
final InFragment in = new InFragment().setColumn( alias, keyColumnNames[0] );
for ( int i = 0; i < batchSize; i++ ) {
in.addValue( "?" );
}
select.appendRestrictions( in.toFragmentString() );
}
else {
// A COMPOSITE KEY...
final ConditionFragment keyRestrictionBuilder = new ConditionFragment()
.setTableAlias( alias )
.setCondition( keyColumnNames, "?" );
final String keyRestrictionFragment = keyRestrictionBuilder.toFragmentString();
StringBuilder restrictions = new StringBuilder();
if ( batchSize==1 ) {
// for no batching, use "foo = ? and bar = ?"
restrictions.append( keyRestrictionFragment );
}
else {
// for batching, use "( (foo = ? and bar = ?) or (foo = ? and bar = ?) )"
restrictions.append( '(' );
DisjunctionFragment df = new DisjunctionFragment();
for ( int i=0; i<batchSize; i++ ) {
df.addCondition( keyRestrictionFragment );
}
restrictions.append( df.toFragmentString() );
restrictions.append( ')' );
}
select.appendRestrictions( restrictions.toString() );
}
}
protected abstract static class ReaderCollectorImpl implements ReaderCollector {
private final List<EntityReferenceInitializer> entityReferenceInitializers = new ArrayList<EntityReferenceInitializer>();
private List<CollectionReferenceInitializer> arrayReferenceInitializers;
private List<CollectionReferenceInitializer> collectionReferenceInitializers;
@Override
public void add(CollectionReferenceInitializer collectionReferenceInitializer) {
if ( collectionReferenceInitializer.getCollectionReference().getCollectionPersister().isArray() ) {
if ( arrayReferenceInitializers == null ) {
arrayReferenceInitializers = new ArrayList<CollectionReferenceInitializer>();
}
arrayReferenceInitializers.add( collectionReferenceInitializer );
}
else {
if ( collectionReferenceInitializers == null ) {
collectionReferenceInitializers = new ArrayList<CollectionReferenceInitializer>();
}
collectionReferenceInitializers.add( collectionReferenceInitializer );
}
}
@Override
public void add(EntityReferenceInitializer entityReferenceInitializer) {
entityReferenceInitializers.add( entityReferenceInitializer );
}
public final List<EntityReferenceInitializer> getEntityReferenceInitializers() {
return entityReferenceInitializers;
}
public List<CollectionReferenceInitializer> getArrayReferenceInitializers() {
return arrayReferenceInitializers;
}
public List<CollectionReferenceInitializer> getNonArrayCollectionReferenceInitializers() {
return collectionReferenceInitializers;
}
}
}

View File

@ -0,0 +1,130 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.loader.plan2.exec.spi;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.loader.plan2.exec.internal.AliasResolutionContextImpl;
import org.hibernate.loader.plan2.exec.internal.Helper;
import org.hibernate.loader.plan2.exec.query.internal.SelectStatementBuilder;
import org.hibernate.loader.plan2.exec.query.spi.QueryBuildingParameters;
import org.hibernate.loader.plan2.spi.CollectionQuerySpace;
import org.hibernate.loader.plan2.spi.CollectionReturn;
import org.hibernate.loader.plan2.spi.EntityReference;
import org.hibernate.loader.plan2.spi.JoinDefinedByMetadata;
import org.hibernate.loader.plan2.spi.LoadPlan;
import org.hibernate.persister.collection.CollectionPropertyNames;
import org.hibernate.persister.entity.OuterJoinLoadable;
/**
* @author Gail Badner
*/
public class BasicCollectionLoadQueryDetails extends CollectionLoadQueryDetails {
/**
* Constructs a EntityLoadQueryDetails object from the given inputs.
*
* @param loadPlan The load plan
* @param buildingParameters And influencers that would affect the generated SQL (mostly we are concerned with those
* that add additional joins here)
* @param factory The SessionFactory
*
* @return The EntityLoadQueryDetails
*/
public static CollectionLoadQueryDetails makeForBatching(
LoadPlan loadPlan,
QueryBuildingParameters buildingParameters,
SessionFactoryImplementor factory) {
final CollectionReturn rootReturn = Helper.INSTANCE.extractRootReturn( loadPlan, CollectionReturn.class );
final AliasResolutionContextImpl aliasResolutionContext = new AliasResolutionContextImpl( factory );
return new BasicCollectionLoadQueryDetails(
loadPlan,
aliasResolutionContext,
rootReturn,
buildingParameters,
factory
);
}
protected BasicCollectionLoadQueryDetails(
LoadPlan loadPlan,
AliasResolutionContextImpl aliasResolutionContext,
CollectionReturn rootReturn,
QueryBuildingParameters buildingParameters,
SessionFactoryImplementor factory) {
super(
loadPlan,
aliasResolutionContext,
rootReturn,
buildingParameters,
factory
);
generate();
}
@Override
protected String getRootTableAlias() {
return getCollectionReferenceAliases().getCollectionTableAlias();
}
@Override
protected void applyRootReturnSelectFragments(SelectStatementBuilder selectStatementBuilder) {
selectStatementBuilder.appendSelectClauseFragment(
getQueryableCollection().selectFragment(
getCollectionReferenceAliases().getCollectionTableAlias(),
getCollectionReferenceAliases().getCollectionColumnAliases().getSuffix()
)
);
if ( getQueryableCollection().isManyToMany() ) {
final OuterJoinLoadable elementPersister = (OuterJoinLoadable) getQueryableCollection().getElementPersister();
selectStatementBuilder.appendSelectClauseFragment(
elementPersister.selectFragment(
getCollectionReferenceAliases().getElementTableAlias(),
getCollectionReferenceAliases().getEntityElementColumnAliases().getSuffix()
)
);
}
super.applyRootReturnSelectFragments( selectStatementBuilder );
}
@Override
protected void applyRootReturnTableFragments(SelectStatementBuilder selectStatementBuilder) {
selectStatementBuilder.appendFromClauseFragment(
getQueryableCollection().getTableName(),
getCollectionReferenceAliases().getCollectionTableAlias()
);
}
@Override
protected void applyRootReturnOrderByFragments(SelectStatementBuilder selectStatementBuilder) {
final String manyToManyOrdering = getQueryableCollection().getManyToManyOrderByString(
getCollectionReferenceAliases().getElementTableAlias()
);
if ( StringHelper.isNotEmpty( manyToManyOrdering ) ) {
selectStatementBuilder.appendOrderByFragment( manyToManyOrdering );
}
super.applyRootReturnOrderByFragments( selectStatementBuilder );
}
}

View File

@ -0,0 +1,215 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.loader.plan2.exec.spi;
import java.sql.ResultSet;
import java.sql.SQLException;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.loader.plan2.exec.internal.AliasResolutionContextImpl;
import org.hibernate.loader.plan2.exec.internal.EntityReferenceAliasesImpl;
import org.hibernate.loader.plan2.exec.process.internal.CollectionReferenceInitializerImpl;
import org.hibernate.loader.plan2.exec.process.internal.CollectionReturnReader;
import org.hibernate.loader.plan2.exec.process.internal.EntityReferenceInitializerImpl;
import org.hibernate.loader.plan2.exec.process.internal.ResultSetProcessingContextImpl;
import org.hibernate.loader.plan2.exec.process.spi.AbstractRowReader;
import org.hibernate.loader.plan2.exec.process.spi.CollectionReferenceInitializer;
import org.hibernate.loader.plan2.exec.process.spi.ReaderCollector;
import org.hibernate.loader.plan2.exec.process.spi.RowReader;
import org.hibernate.loader.plan2.exec.query.internal.SelectStatementBuilder;
import org.hibernate.loader.plan2.exec.query.spi.QueryBuildingParameters;
import org.hibernate.loader.plan2.spi.CollectionQuerySpace;
import org.hibernate.loader.plan2.spi.CollectionReturn;
import org.hibernate.loader.plan2.spi.EntityReference;
import org.hibernate.loader.plan2.spi.LoadPlan;
import org.hibernate.persister.collection.QueryableCollection;
import org.hibernate.persister.entity.OuterJoinLoadable;
/**
* Handles interpreting a LoadPlan (for loading of a collection) by:<ul>
* <li>generating the SQL query to perform</li>
* <li>creating the readers needed to read the results from the SQL's ResultSet</li>
* </ul>
*
* @author Gail Badner
*/
public abstract class CollectionLoadQueryDetails extends AbstractLoadQueryDetails {
private final CollectionReferenceAliases collectionReferenceAliases;
private final ReaderCollector readerCollector;
protected CollectionLoadQueryDetails(
LoadPlan loadPlan,
AliasResolutionContextImpl aliasResolutionContext,
CollectionReturn rootReturn,
QueryBuildingParameters buildingParameters,
SessionFactoryImplementor factory) {
super(
loadPlan,
aliasResolutionContext,
buildingParameters,
( (QueryableCollection) rootReturn.getCollectionPersister() ).getKeyColumnNames(),
rootReturn,
// collectionReferenceAliases.getCollectionTableAlias(),
// collectionReferenceAliases.getCollectionColumnAliases().getSuffix(),
// loadPlan.getQuerySpaces().getQuerySpaceByUid( rootReturn.getQuerySpaceUid() ),
// (OuterJoinLoadable) rootReturn.getCollectionPersister(),
factory
);
this.collectionReferenceAliases = aliasResolutionContext.generateCollectionReferenceAliases(
rootReturn.getQuerySpaceUid(),
rootReturn.getCollectionPersister()
);
this.readerCollector = new CollectionLoaderReaderCollectorImpl(
new CollectionReturnReader( rootReturn ),
new CollectionReferenceInitializerImpl( rootReturn, collectionReferenceAliases )
);
if ( rootReturn.getCollectionPersister().getElementType().isEntityType() ) {
final EntityReference elementEntityReference = rootReturn.getElementGraph().resolveEntityReference();
final EntityReferenceAliases elementEntityReferenceAliases = new EntityReferenceAliasesImpl(
collectionReferenceAliases.getElementTableAlias(),
collectionReferenceAliases.getEntityElementColumnAliases()
);
aliasResolutionContext.registerQuerySpaceAliases(
elementEntityReference.getQuerySpaceUid(),
elementEntityReferenceAliases
);
readerCollector.add(
new EntityReferenceInitializerImpl( elementEntityReference, elementEntityReferenceAliases )
);
}
if ( rootReturn.getCollectionPersister().hasIndex() &&
rootReturn.getCollectionPersister().getIndexType().isEntityType() ) {
final EntityReference indexEntityReference = rootReturn.getIndexGraph().resolveEntityReference();
final EntityReferenceAliases indexEntityReferenceAliases = aliasResolutionContext.generateEntityReferenceAliases(
indexEntityReference.getQuerySpaceUid(),
indexEntityReference.getEntityPersister()
);
readerCollector.add(
new EntityReferenceInitializerImpl( indexEntityReference, indexEntityReferenceAliases )
);
}
}
protected CollectionReturn getRootCollectionReturn() {
return (CollectionReturn) getRootReturn();
}
@Override
protected ReaderCollector getReaderCollector() {
return readerCollector;
}
@Override
protected CollectionQuerySpace getRootQuerySpace() {
return (CollectionQuerySpace) getQuerySpace( getRootCollectionReturn().getQuerySpaceUid() );
}
protected CollectionReferenceAliases getCollectionReferenceAliases() {
return collectionReferenceAliases;
}
protected QueryableCollection getQueryableCollection() {
return (QueryableCollection) getRootCollectionReturn().getCollectionPersister();
}
@Override
protected boolean shouldApplyRootReturnFilterBeforeKeyRestriction() {
return true;
}
@Override
protected void applyRootReturnSelectFragments(SelectStatementBuilder selectStatementBuilder) {
if ( getQueryableCollection().hasIndex() &&
getQueryableCollection().getIndexType().isEntityType() ) {
final EntityReference indexEntityReference = getRootCollectionReturn().getIndexGraph().resolveEntityReference();
final EntityReferenceAliases indexEntityReferenceAliases = getAliasResolutionContext().resolveEntityReferenceAliases(
indexEntityReference.getQuerySpaceUid()
);
selectStatementBuilder.appendSelectClauseFragment(
( (OuterJoinLoadable) indexEntityReference.getEntityPersister() ).selectFragment(
indexEntityReferenceAliases.getTableAlias(),
indexEntityReferenceAliases.getColumnAliases().getSuffix()
)
);
}
}
@Override
protected void applyRootReturnFilterRestrictions(SelectStatementBuilder selectStatementBuilder) {
selectStatementBuilder.appendRestrictions(
getQueryableCollection().filterFragment(
getRootTableAlias(),
getQueryBuildingParameters().getQueryInfluencers().getEnabledFilters()
)
);
}
@Override
protected void applyRootReturnWhereJoinRestrictions(SelectStatementBuilder selectStatementBuilder) {
}
@Override
protected void applyRootReturnOrderByFragments(SelectStatementBuilder selectStatementBuilder) {
final String ordering = getQueryableCollection().getSQLOrderByString( getRootTableAlias() );
if ( StringHelper.isNotEmpty( ordering ) ) {
selectStatementBuilder.appendOrderByFragment( ordering );
}
}
private static class CollectionLoaderReaderCollectorImpl extends ReaderCollectorImpl {
private final CollectionReturnReader collectionReturnReader;
public CollectionLoaderReaderCollectorImpl(
CollectionReturnReader collectionReturnReader,
CollectionReferenceInitializer collectionReferenceInitializer) {
this.collectionReturnReader = collectionReturnReader;
add( collectionReferenceInitializer );
}
@Override
public RowReader buildRowReader() {
return new CollectionLoaderRowReader( this );
}
@Override
public CollectionReturnReader getReturnReader() {
return collectionReturnReader;
}
}
public static class CollectionLoaderRowReader extends AbstractRowReader {
private final CollectionReturnReader rootReturnReader;
public CollectionLoaderRowReader(CollectionLoaderReaderCollectorImpl collectionLoaderReaderCollector) {
super( collectionLoaderReaderCollector );
this.rootReturnReader = collectionLoaderReaderCollector.getReturnReader();
}
@Override
protected Object readLogicalRow(ResultSet resultSet, ResultSetProcessingContextImpl context) throws SQLException {
return rootReturnReader.read( resultSet, context );
}
}
}

View File

@ -25,9 +25,7 @@ package org.hibernate.loader.plan2.exec.spi;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.jboss.logging.Logger;
@ -36,35 +34,26 @@ import org.hibernate.Session;
import org.hibernate.engine.spi.EntityKey;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.internal.CoreLogging;
import org.hibernate.loader.plan2.build.spi.LoadPlanTreePrinter;
import org.hibernate.loader.plan2.exec.internal.AliasResolutionContextImpl;
import org.hibernate.loader.plan2.exec.internal.FetchStats;
import org.hibernate.loader.plan2.exec.internal.Helper;
import org.hibernate.loader.plan2.exec.internal.LoadQueryJoinAndFetchProcessor;
import org.hibernate.loader.plan2.exec.process.internal.EntityReferenceInitializerImpl;
import org.hibernate.loader.plan2.exec.process.internal.EntityReturnReader;
import org.hibernate.loader.plan2.exec.process.internal.ResultSetProcessingContextImpl;
import org.hibernate.loader.plan2.exec.process.internal.ResultSetProcessorHelper;
import org.hibernate.loader.plan2.exec.process.internal.ResultSetProcessorImpl;
import org.hibernate.loader.plan2.exec.process.spi.AbstractRowReader;
import org.hibernate.loader.plan2.exec.process.spi.CollectionReferenceInitializer;
import org.hibernate.loader.plan2.exec.process.spi.EntityReferenceInitializer;
import org.hibernate.loader.plan2.exec.process.spi.ReaderCollector;
import org.hibernate.loader.plan2.exec.process.spi.ResultSetProcessingContext;
import org.hibernate.loader.plan2.exec.process.spi.ResultSetProcessor;
import org.hibernate.loader.plan2.exec.process.spi.RowReader;
import org.hibernate.loader.plan2.exec.query.internal.SelectStatementBuilder;
import org.hibernate.loader.plan2.exec.query.spi.QueryBuildingParameters;
import org.hibernate.loader.plan2.spi.EntityQuerySpace;
import org.hibernate.loader.plan2.spi.EntityReturn;
import org.hibernate.loader.plan2.spi.LoadPlan;
import org.hibernate.loader.plan2.spi.QuerySpaces;
import org.hibernate.loader.plan2.spi.QuerySpace;
import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.persister.entity.Joinable;
import org.hibernate.persister.entity.OuterJoinLoadable;
import org.hibernate.persister.entity.Queryable;
import org.hibernate.sql.ConditionFragment;
import org.hibernate.sql.DisjunctionFragment;
import org.hibernate.sql.InFragment;
import org.hibernate.type.ComponentType;
import org.hibernate.type.Type;
@ -76,24 +65,9 @@ import org.hibernate.type.Type;
*
* @author Steve Ebersole
*/
public class EntityLoadQueryDetails implements LoadQueryDetails {
public class EntityLoadQueryDetails extends AbstractLoadQueryDetails {
private static final Logger log = CoreLogging.logger( EntityLoadQueryDetails.class );
private final LoadPlan loadPlan;
private final String sqlStatement;
private final ResultSetProcessor resultSetProcessor;
@Override
public String getSqlStatement() {
return sqlStatement;
}
@Override
public ResultSetProcessor getResultSetProcessor() {
return resultSetProcessor;
}
/**
* Constructs a EntityLoadQueryDetails object from the given inputs.
*
@ -113,309 +87,171 @@ public class EntityLoadQueryDetails implements LoadQueryDetails {
final int batchSize = buildingParameters.getBatchSize();
final boolean shouldUseOptionalEntityInformation = batchSize == 1;
final EntityReturn rootReturn = Helper.INSTANCE.extractRootReturn( loadPlan, EntityReturn.class );
final String[] keyColumnNamesToUse = keyColumnNames != null
? keyColumnNames
: ( (Queryable) rootReturn.getEntityPersister() ).getIdentifierColumnNames();
// Should be just one querySpace (of type EntityQuerySpace) in querySpaces. Should we validate that?
// Should we make it a util method on Helper like we do for extractRootReturn ?
final AliasResolutionContextImpl aliasResolutionContext = new AliasResolutionContextImpl( factory );
return new EntityLoadQueryDetails(
loadPlan,
keyColumnNames,
shouldUseOptionalEntityInformation,
keyColumnNamesToUse,
aliasResolutionContext,
rootReturn,
buildingParameters,
factory
);
}
private final EntityReferenceAliases entityReferenceAliases;
private final ReaderCollector readerCollector;
protected EntityLoadQueryDetails(
LoadPlan loadPlan,
String[] keyColumnNames,
boolean shouldUseOptionalEntityInformation,
AliasResolutionContextImpl aliasResolutionContext,
EntityReturn rootReturn,
QueryBuildingParameters buildingParameters,
SessionFactoryImplementor factory) {
this.loadPlan = loadPlan;
final AliasResolutionContextImpl aliasResolutionContext = new AliasResolutionContextImpl( factory );
// LoadPlanTreePrinter.INSTANCE.logTree( loadPlan, aliasResolutionContext );
// if ( log.isDebugEnabled() ) {
// log.debug( LoadPlanTreePrinter.INSTANCE.toString( loadPlan ) );
// }
// There are 2 high-level requirements to perform here:
// 1) Determine the SQL required to carry out the given LoadPlan (and fulfill
// {@code LoadQueryDetails#getSqlStatement()}). SelectStatementBuilder collects the ongoing efforts to
// build the needed SQL.
// 2) Determine how to read information out of the ResultSet resulting from executing the indicated SQL
// (the SQL aliases). ReaderCollector and friends are where this work happens, ultimately
// producing a ResultSetProcessor
final SelectStatementBuilder select = new SelectStatementBuilder( factory.getDialect() );
final EntityReturn rootReturn = Helper.INSTANCE.extractRootReturn( loadPlan, EntityReturn.class );
final ReaderCollectorImpl readerCollector = new ReaderCollectorImpl();
final LoadQueryJoinAndFetchProcessor helper = new LoadQueryJoinAndFetchProcessor( aliasResolutionContext , buildingParameters, factory );
final String[] keyColumnNamesToUse = keyColumnNames != null
? keyColumnNames
: ( (Queryable) rootReturn.getEntityPersister() ).getIdentifierColumnNames();
// LoadPlan is broken down into 2 high-level pieces that we need to process here.
//
// First is the QuerySpaces, which roughly equates to the SQL FROM-clause. We'll cycle through
// those first, generating aliases into the AliasContext in addition to writing SQL FROM-clause information
// into SelectStatementBuilder. The AliasContext is populated here and the reused while process the SQL
// SELECT-clause into the SelectStatementBuilder and then again also to build the ResultSetProcessor
processQuerySpaces(
loadPlan.getQuerySpaces(),
select,
keyColumnNamesToUse,
helper,
super(
loadPlan,
aliasResolutionContext,
buildingParameters,
keyColumnNames,
rootReturn,
factory
);
// Next, we process the Returns and Fetches building the SELECT clause and at the same time building
// Readers for reading the described results out of a SQL ResultSet
final FetchStats fetchStats = processReturnAndFetches(
rootReturn,
select,
helper,
readerCollector,
aliasResolutionContext
this.entityReferenceAliases = aliasResolutionContext.generateEntityReferenceAliases(
rootReturn.getQuerySpaceUid(),
rootReturn.getEntityPersister()
);
LoadPlanTreePrinter.INSTANCE.logTree( loadPlan, aliasResolutionContext );
this.sqlStatement = select.toStatementString();
this.resultSetProcessor = new ResultSetProcessorImpl(
loadPlan,
readerCollector.buildRowReader(),
fetchStats.hasSubselectFetches()
this.readerCollector = new EntityLoaderReaderCollectorImpl(
new EntityReturnReader( rootReturn ),
new EntityReferenceInitializerImpl( rootReturn, entityReferenceAliases, true )
);
generate();
}
/**
* Main entry point for handling building the SQL SELECT clause and the corresponding Readers,
*
* @param rootReturn The root return reference we are processing
* @param select The SelectStatementBuilder
* @param helper The Join/Fetch helper
* @param readerCollector Collector for EntityReferenceInitializer and CollectionReferenceInitializer references
* @param aliasResolutionContext The alias resolution context
*
* @return Stats about the processed fetches
*/
private FetchStats processReturnAndFetches(
EntityReturn rootReturn,
SelectStatementBuilder select,
LoadQueryJoinAndFetchProcessor helper,
ReaderCollectorImpl readerCollector,
AliasResolutionContextImpl aliasResolutionContext) {
final EntityReferenceAliases entityReferenceAliases = aliasResolutionContext.resolveEntityReferenceAliases(
rootReturn.getQuerySpaceUid()
);
final OuterJoinLoadable rootLoadable = (OuterJoinLoadable) rootReturn.getEntityPersister();
// add the root persister SELECT fragments...
select.appendSelectClauseFragment(
rootLoadable.selectFragment(
entityReferenceAliases.getTableAlias(),
entityReferenceAliases.getColumnAliases().getSuffix()
)
);
final FetchStats fetchStats = helper.processFetches(
rootReturn,
select,
readerCollector
);
readerCollector.setRootReturnReader( new EntityReturnReader( rootReturn, entityReferenceAliases ) );
readerCollector.add( new EntityReferenceInitializerImpl( rootReturn, entityReferenceAliases, true ) );
return fetchStats;
private EntityReturn getRootEntityReturn() {
return (EntityReturn) getRootReturn();
}
/**
* Main entry point for properly handling the FROM clause and and joins and restrictions
*
* @param querySpaces The QuerySpaces
* @param select The SelectStatementBuilder
* @param keyColumnNamesToUse The column names to use from the entity table (space) in crafting the entity restriction
* (which entity/entities are we interested in?)
* @param helper The Join/Fetch helper
* @param aliasResolutionContext yadda
* @param buildingParameters yadda
* @param factory yadda
*/
private void processQuerySpaces(
QuerySpaces querySpaces,
SelectStatementBuilder select,
String[] keyColumnNamesToUse,
LoadQueryJoinAndFetchProcessor helper,
AliasResolutionContextImpl aliasResolutionContext,
QueryBuildingParameters buildingParameters,
SessionFactoryImplementor factory) {
// Should be just one querySpace (of type EntityQuerySpace) in querySpaces. Should we validate that?
// Should we make it a util method on Helper like we do for extractRootReturn ?
final EntityQuerySpace rootQuerySpace = Helper.INSTANCE.extractRootQuerySpace(
querySpaces,
EntityQuerySpace.class
);
final EntityReferenceAliases entityReferenceAliases = aliasResolutionContext.generateEntityReferenceAliases(
rootQuerySpace.getUid(),
rootQuerySpace.getEntityPersister()
);
final String rootTableAlias = entityReferenceAliases.getTableAlias();
applyTableFragments(
select,
factory,
buildingParameters,
rootTableAlias,
(OuterJoinLoadable) rootQuerySpace.getEntityPersister()
);
// add restrictions...
// first, the load key restrictions (which entity(s) do we want to load?)
applyKeyRestriction(
select,
entityReferenceAliases.getTableAlias(),
keyColumnNamesToUse,
buildingParameters.getBatchSize()
);
// don't quite remember why these 2 anymore, todo : research that and document this code or remove it etc..
final OuterJoinLoadable rootLoadable = (OuterJoinLoadable) rootQuerySpace.getEntityPersister();
final Queryable rootQueryable = (Queryable) rootQuerySpace.getEntityPersister();
select.appendRestrictions(
rootQueryable.filterFragment(
entityReferenceAliases.getTableAlias(),
Collections.emptyMap()
)
);
select.appendRestrictions(
rootLoadable.whereJoinFragment(
entityReferenceAliases.getTableAlias(),
true,
true
)
);
// then move on to joins...
helper.processQuerySpaceJoins( rootQuerySpace, select );
}
/**
* Applies "table fragments" to the FROM-CLAUSE of the given SelectStatementBuilder for the given Loadable
*
* @param select The SELECT statement builder
* @param factory The SessionFactory
* @param buildingParameters The query building context
* @param rootAlias The table alias to use
* @param rootLoadable The persister
*
* @see org.hibernate.persister.entity.OuterJoinLoadable#fromTableFragment(java.lang.String)
* @see org.hibernate.persister.entity.Joinable#fromJoinFragment(java.lang.String, boolean, boolean)
*/
private void applyTableFragments(
SelectStatementBuilder select,
SessionFactoryImplementor factory,
QueryBuildingParameters buildingParameters,
String rootAlias,
OuterJoinLoadable rootLoadable) {
protected void applyRootReturnTableFragments(SelectStatementBuilder select) {
final String fromTableFragment;
if ( buildingParameters.getLockOptions() != null ) {
fromTableFragment = factory.getDialect().appendLockHint(
buildingParameters.getLockOptions(),
rootLoadable.fromTableFragment( rootAlias )
final String rootAlias = entityReferenceAliases.getTableAlias();
final OuterJoinLoadable outerJoinLoadable = (OuterJoinLoadable) getRootEntityReturn().getEntityPersister();
if ( getQueryBuildingParameters().getLockOptions() != null ) {
fromTableFragment = getSessionFactory().getDialect().appendLockHint(
getQueryBuildingParameters().getLockOptions(),
outerJoinLoadable.fromTableFragment( rootAlias )
);
select.setLockOptions( buildingParameters.getLockOptions() );
select.setLockOptions( getQueryBuildingParameters().getLockOptions() );
}
else if ( buildingParameters.getLockMode() != null ) {
fromTableFragment = factory.getDialect().appendLockHint(
buildingParameters.getLockMode(),
rootLoadable.fromTableFragment( rootAlias )
else if ( getQueryBuildingParameters().getLockMode() != null ) {
fromTableFragment = getSessionFactory().getDialect().appendLockHint(
getQueryBuildingParameters().getLockMode(),
outerJoinLoadable.fromTableFragment( rootAlias )
);
select.setLockMode( buildingParameters.getLockMode() );
select.setLockMode( getQueryBuildingParameters().getLockMode() );
}
else {
fromTableFragment = rootLoadable.fromTableFragment( rootAlias );
fromTableFragment = outerJoinLoadable.fromTableFragment( rootAlias );
}
select.appendFromClauseFragment( fromTableFragment + rootLoadable.fromJoinFragment( rootAlias, true, true ) );
select.appendFromClauseFragment( fromTableFragment + outerJoinLoadable.fromJoinFragment( rootAlias, true, true ) );
}
private static class ReaderCollectorImpl implements ReaderCollector {
private EntityReturnReader rootReturnReader;
private final List<EntityReferenceInitializer> entityReferenceInitializers = new ArrayList<EntityReferenceInitializer>();
private List<CollectionReferenceInitializer> arrayReferenceInitializers;
private List<CollectionReferenceInitializer> collectionReferenceInitializers;
protected void applyRootReturnFilterRestrictions(SelectStatementBuilder selectStatementBuilder) {
final Queryable rootQueryable = (Queryable) getRootEntityReturn().getEntityPersister();
selectStatementBuilder.appendRestrictions(
rootQueryable.filterFragment(
entityReferenceAliases.getTableAlias(),
Collections.emptyMap()
)
);
}
@Override
public void add(CollectionReferenceInitializer collectionReferenceInitializer) {
if ( collectionReferenceInitializer.getCollectionReference().getCollectionPersister().isArray() ) {
if ( arrayReferenceInitializers == null ) {
arrayReferenceInitializers = new ArrayList<CollectionReferenceInitializer>();
}
arrayReferenceInitializers.add( collectionReferenceInitializer );
}
else {
if ( collectionReferenceInitializers == null ) {
collectionReferenceInitializers = new ArrayList<CollectionReferenceInitializer>();
}
collectionReferenceInitializers.add( collectionReferenceInitializer );
}
protected void applyRootReturnWhereJoinRestrictions(SelectStatementBuilder selectStatementBuilder) {
final Joinable joinable = (OuterJoinLoadable) getRootEntityReturn().getEntityPersister();
selectStatementBuilder.appendRestrictions(
joinable.whereJoinFragment(
entityReferenceAliases.getTableAlias(),
true,
true
)
);
}
@Override
public void add(EntityReferenceInitializer entityReferenceInitializer) {
if ( EntityReturnReader.class.isInstance( entityReferenceInitializer ) ) {
setRootReturnReader( (EntityReturnReader) entityReferenceInitializer );
}
entityReferenceInitializers.add( entityReferenceInitializer );
protected void applyRootReturnOrderByFragments(SelectStatementBuilder selectStatementBuilder) {
}
@Override
protected ReaderCollector getReaderCollector() {
return readerCollector;
}
@Override
protected QuerySpace getRootQuerySpace() {
return getQuerySpace( getRootEntityReturn().getQuerySpaceUid() );
}
@Override
protected String getRootTableAlias() {
return entityReferenceAliases.getTableAlias();
}
@Override
protected boolean shouldApplyRootReturnFilterBeforeKeyRestriction() {
return false;
}
protected void applyRootReturnSelectFragments(SelectStatementBuilder selectStatementBuilder) {
final OuterJoinLoadable outerJoinLoadable = (OuterJoinLoadable) getRootEntityReturn().getEntityPersister();
selectStatementBuilder.appendSelectClauseFragment(
outerJoinLoadable.selectFragment(
entityReferenceAliases.getTableAlias(),
entityReferenceAliases.getColumnAliases().getSuffix()
)
);
}
private static class EntityLoaderReaderCollectorImpl extends ReaderCollectorImpl {
private final EntityReturnReader entityReturnReader;
public EntityLoaderReaderCollectorImpl(
EntityReturnReader entityReturnReader,
EntityReferenceInitializer entityReferenceInitializer) {
this.entityReturnReader = entityReturnReader;
add( entityReferenceInitializer );
}
@Override
public RowReader buildRowReader() {
return new EntityLoaderRowReader(
rootReturnReader,
entityReferenceInitializers,
arrayReferenceInitializers,
collectionReferenceInitializers
);
return new EntityLoaderRowReader( this );
}
public void setRootReturnReader(EntityReturnReader entityReturnReader) {
if ( rootReturnReader != null ) {
throw new IllegalStateException( "Root return reader already set" );
}
rootReturnReader = entityReturnReader;
@Override
public EntityReturnReader getReturnReader() {
return entityReturnReader;
}
}
public static class EntityLoaderRowReader extends AbstractRowReader {
private final EntityReturnReader rootReturnReader;
private final List<EntityReferenceInitializer> entityReferenceInitializers;
private final List<CollectionReferenceInitializer> arrayReferenceInitializers;
private final List<CollectionReferenceInitializer> collectionReferenceInitializers;
public EntityLoaderRowReader(
EntityReturnReader rootReturnReader,
List<EntityReferenceInitializer> entityReferenceInitializers,
List<CollectionReferenceInitializer> arrayReferenceInitializers,
List<CollectionReferenceInitializer> collectionReferenceInitializers) {
this.rootReturnReader = rootReturnReader;
this.entityReferenceInitializers = entityReferenceInitializers != null
? entityReferenceInitializers
: Collections.<EntityReferenceInitializer>emptyList();
this.arrayReferenceInitializers = arrayReferenceInitializers != null
? arrayReferenceInitializers
: Collections.<CollectionReferenceInitializer>emptyList();
this.collectionReferenceInitializers = collectionReferenceInitializers != null
? collectionReferenceInitializers
: Collections.<CollectionReferenceInitializer>emptyList();
public EntityLoaderRowReader(EntityLoaderReaderCollectorImpl entityLoaderReaderCollector) {
super( entityLoaderReaderCollector );
this.rootReturnReader = entityLoaderReaderCollector.getReturnReader();
}
@Override
@ -459,62 +295,9 @@ public class EntityLoadQueryDetails implements LoadQueryDetails {
}
}
@Override
protected List<EntityReferenceInitializer> getEntityReferenceInitializers() {
return entityReferenceInitializers;
}
@Override
protected List<CollectionReferenceInitializer> getCollectionReferenceInitializers() {
return collectionReferenceInitializers;
}
@Override
protected List<CollectionReferenceInitializer> getArrayReferenceInitializers() {
return arrayReferenceInitializers;
}
@Override
protected Object readLogicalRow(ResultSet resultSet, ResultSetProcessingContextImpl context) throws SQLException {
return rootReturnReader.read( resultSet, context );
}
}
private static void applyKeyRestriction(SelectStatementBuilder select, String alias, String[] keyColumnNames, int batchSize) {
if ( keyColumnNames.length==1 ) {
// NOT A COMPOSITE KEY
// for batching, use "foo in (?, ?, ?)" for batching
// for no batching, use "foo = ?"
// (that distinction is handled inside InFragment)
final InFragment in = new InFragment().setColumn( alias, keyColumnNames[0] );
for ( int i = 0; i < batchSize; i++ ) {
in.addValue( "?" );
}
select.appendRestrictions( in.toFragmentString() );
}
else {
// A COMPOSITE KEY...
final ConditionFragment keyRestrictionBuilder = new ConditionFragment()
.setTableAlias( alias )
.setCondition( keyColumnNames, "?" );
final String keyRestrictionFragment = keyRestrictionBuilder.toFragmentString();
StringBuilder restrictions = new StringBuilder();
if ( batchSize==1 ) {
// for no batching, use "foo = ? and bar = ?"
restrictions.append( keyRestrictionFragment );
}
else {
// for batching, use "( (foo = ? and bar = ?) or (foo = ? and bar = ?) )"
restrictions.append( '(' );
DisjunctionFragment df = new DisjunctionFragment();
for ( int i=0; i<batchSize; i++ ) {
df.addCondition( keyRestrictionFragment );
}
restrictions.append( df.toFragmentString() );
restrictions.append( ')' );
}
select.appendRestrictions( restrictions.toString() );
}
}
}

View File

@ -0,0 +1,124 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.loader.plan2.exec.spi;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.loader.plan2.exec.internal.AliasResolutionContextImpl;
import org.hibernate.loader.plan2.exec.internal.Helper;
import org.hibernate.loader.plan2.exec.query.internal.SelectStatementBuilder;
import org.hibernate.loader.plan2.exec.query.spi.QueryBuildingParameters;
import org.hibernate.loader.plan2.spi.CollectionReturn;
import org.hibernate.loader.plan2.spi.EntityReference;
import org.hibernate.loader.plan2.spi.LoadPlan;
import org.hibernate.persister.entity.OuterJoinLoadable;
/**
* @author Gail Badner
*/
public class OneToManyLoadQueryDetails extends CollectionLoadQueryDetails {
/**
* Constructs a EntityLoadQueryDetails object from the given inputs.
*
* @param loadPlan The load plan
* @param buildingParameters And influencers that would affect the generated SQL (mostly we are concerned with those
* that add additional joins here)
* @param factory The SessionFactory
*
* @return The EntityLoadQueryDetails
*/
public static CollectionLoadQueryDetails makeForBatching(
LoadPlan loadPlan,
QueryBuildingParameters buildingParameters,
SessionFactoryImplementor factory) {
final CollectionReturn rootReturn = Helper.INSTANCE.extractRootReturn( loadPlan, CollectionReturn.class );
final AliasResolutionContextImpl aliasResolutionContext = new AliasResolutionContextImpl( factory );
return new OneToManyLoadQueryDetails(
loadPlan,
aliasResolutionContext,
rootReturn,
buildingParameters,
factory
);
}
protected OneToManyLoadQueryDetails(
LoadPlan loadPlan,
AliasResolutionContextImpl aliasResolutionContext,
CollectionReturn rootReturn,
QueryBuildingParameters buildingParameters,
SessionFactoryImplementor factory) {
super(
loadPlan,
aliasResolutionContext,
rootReturn,
buildingParameters,
factory
);
generate();
}
@Override
protected String getRootTableAlias() {
return getElementEntityReferenceAliases().getTableAlias();
}
@Override
protected void applyRootReturnSelectFragments(SelectStatementBuilder selectStatementBuilder) {
selectStatementBuilder.appendSelectClauseFragment(
getQueryableCollection().selectFragment(
null,
null,
//getCollectionReferenceAliases().getCollectionTableAlias(),
getElementEntityReferenceAliases().getTableAlias(),
getElementEntityReferenceAliases().getColumnAliases().getSuffix(),
getCollectionReferenceAliases().getCollectionColumnAliases().getSuffix(),
true
)
);
super.applyRootReturnSelectFragments( selectStatementBuilder );
}
@Override
protected void applyRootReturnTableFragments(SelectStatementBuilder selectStatementBuilder) {
final OuterJoinLoadable elementOuterJoinLoadable =
(OuterJoinLoadable) getElementEntityReference().getEntityPersister();
//final String tableAlias = getCollectionReferenceAliases().getCollectionTableAlias();
final String tableAlias = getElementEntityReferenceAliases().getTableAlias();
final String fragment =
elementOuterJoinLoadable.fromTableFragment( tableAlias ) +
elementOuterJoinLoadable.fromJoinFragment( tableAlias, true, true);
selectStatementBuilder.appendFromClauseFragment( fragment );
}
private EntityReference getElementEntityReference() {
return getRootCollectionReturn().getElementGraph().resolveEntityReference();
}
private EntityReferenceAliases getElementEntityReferenceAliases() {
return getAliasResolutionContext().resolveEntityReferenceAliases( getElementEntityReference().getQuerySpaceUid() );
}
}

View File

@ -33,7 +33,7 @@ import org.hibernate.persister.entity.EntityPersister;
public interface EntityReference extends FetchSource {
/**
* Obtain the UID of the QuerySpace (specifically a {@link CollectionQuerySpace}) that this CollectionReference
* Obtain the UID of the QuerySpace (specifically a {@link EntityQuerySpace}) that this EntityReference
* refers to.
*
* @return The UID

View File

@ -24,6 +24,7 @@
package org.hibernate.loader.plan2.spi;
import org.hibernate.type.AssociationType;
import org.hibernate.type.Type;
/**
* Specialization of a Join that is defined by the metadata.
@ -38,6 +39,6 @@ public interface JoinDefinedByMetadata extends Join {
*
* @return The property name
*/
public String getJoinedAssociationPropertyName();
public AssociationType getJoinedAssociationPropertyType();
public String getJoinedPropertyName();
public Type getJoinedPropertyType();
}

View File

@ -34,6 +34,7 @@ import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
@ -5240,9 +5241,28 @@ public abstract class AbstractEntityPersister
EntityIdentifierDefinitionHelper.buildNonEncapsulatedCompositeIdentifierDefinition( this );
}
private void collectAttributeDefinitions(List<AttributeDefinition> definitions, EntityMetamodel metamodel) {
private void collectAttributeDefinitions(
Map<String,AttributeDefinition> attributeDefinitionsByName,
EntityMetamodel metamodel) {
for ( int i = 0; i < metamodel.getPropertySpan(); i++ ) {
definitions.add( metamodel.getProperties()[i] );
final AttributeDefinition attributeDefinition = metamodel.getProperties()[i];
// Don't replace an attribute definition if it is already in attributeDefinitionsByName
// because the new value will be from a subclass.
final AttributeDefinition oldAttributeDefinition = attributeDefinitionsByName.get(
attributeDefinition.getName()
);
if ( oldAttributeDefinition != null ) {
if ( LOG.isTraceEnabled() ) {
LOG.tracef(
"Ignoring subclass attribute definition [%s.%s] because it is defined in a superclass ",
entityMetamodel.getName(),
attributeDefinition.getName()
);
}
}
else {
attributeDefinitionsByName.put( attributeDefinition.getName(), attributeDefinition );
}
}
// see if there are any subclass persisters...
@ -5259,7 +5279,7 @@ public abstract class AbstractEntityPersister
}
try {
final EntityPersister subClassEntityPersister = factory.getEntityPersister( subClassEntityName );
collectAttributeDefinitions( definitions, subClassEntityPersister.getEntityMetamodel() );
collectAttributeDefinitions( attributeDefinitionsByName, subClassEntityPersister.getEntityMetamodel() );
}
catch (MappingException e) {
throw new IllegalStateException(
@ -5284,8 +5304,8 @@ public abstract class AbstractEntityPersister
// to try and drive SQL generation on these (which we do ultimately). A possible solution there
// would be to delay all SQL generation until postInstantiate
List<AttributeDefinition> attributeDefinitions = new ArrayList<AttributeDefinition>();
collectAttributeDefinitions( attributeDefinitions, getEntityMetamodel() );
Map<String,AttributeDefinition> attributeDefinitionsByName = new LinkedHashMap<String,AttributeDefinition>();
collectAttributeDefinitions( attributeDefinitionsByName, getEntityMetamodel() );
// EntityMetamodel currentEntityMetamodel = this.getEntityMetamodel();
@ -5303,7 +5323,9 @@ public abstract class AbstractEntityPersister
// }
// }
this.attributeDefinitions = Collections.unmodifiableList( attributeDefinitions );
this.attributeDefinitions = Collections.unmodifiableList(
new ArrayList<AttributeDefinition>( attributeDefinitionsByName.values() )
);
// // todo : leverage the attribute definitions housed on EntityMetamodel
// // for that to work, we'd have to be able to walk our super entity persister(s)
// this.attributeDefinitions = new Iterable<AttributeDefinition>() {

View File

@ -37,6 +37,7 @@ import org.hibernate.persister.collection.CollectionPersister;
import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.persister.entity.OuterJoinLoadable;
import org.hibernate.type.AssociationType;
import org.hibernate.type.EntityType;
/**
* @author Steve Ebersole
@ -104,11 +105,18 @@ public class FetchStrategyHelper {
return FetchStyle.JOIN;
}
if ( mappingFetchMode == FetchMode.SELECT ) {
return FetchStyle.SELECT;
}
if ( type.isEntityType() ) {
EntityPersister persister = (EntityPersister) type.getAssociatedJoinable( sessionFactory );
if ( persister.isBatchLoadable() ) {
return FetchStyle.BATCH;
}
else if ( !persister.hasProxy() ) {
return FetchStyle.JOIN;
}
}
else {
CollectionPersister persister = (CollectionPersister) type.getAssociatedJoinable( sessionFactory );

View File

@ -241,7 +241,7 @@ public class MetamodelGraphWalker {
try {
final Type collectionIndexType = collectionIndexDefinition.getType();
if ( collectionIndexType.isComponentType() ) {
visitCompositeDefinition( collectionIndexDefinition.toCompositeDefinition() );
visitAttributes( collectionIndexDefinition.toCompositeDefinition() );
}
else if ( collectionIndexType.isAssociationType() ) {
visitEntityDefinition( collectionIndexDefinition.toEntityDefinition() );

View File

@ -31,6 +31,7 @@ import javax.persistence.ManyToOne;
import javax.persistence.OneToMany;
import java.util.List;
import org.hibernate.LockMode;
import org.hibernate.engine.spi.CascadingActions;
import org.hibernate.engine.spi.LoadQueryInfluencers;
import org.hibernate.loader.plan2.build.internal.CascadeStyleLoadPlanBuildingAssociationVisitationStrategy;
@ -40,6 +41,7 @@ import org.hibernate.loader.plan2.build.spi.MetamodelDrivenLoadPlanBuilder;
import org.hibernate.loader.plan2.exec.internal.AliasResolutionContextImpl;
import org.hibernate.loader.plan2.spi.CollectionReturn;
import org.hibernate.loader.plan2.spi.EntityFetch;
import org.hibernate.loader.plan2.spi.EntityReference;
import org.hibernate.loader.plan2.spi.EntityReturn;
import org.hibernate.loader.plan2.spi.Fetch;
import org.hibernate.loader.plan2.spi.LoadPlan;
@ -70,7 +72,8 @@ public class LoadPlanBuilderTest extends BaseCoreFunctionalTestCase {
EntityPersister ep = (EntityPersister) sessionFactory().getClassMetadata(Message.class);
FetchStyleLoadPlanBuildingAssociationVisitationStrategy strategy = new FetchStyleLoadPlanBuildingAssociationVisitationStrategy(
sessionFactory(),
LoadQueryInfluencers.NONE
LoadQueryInfluencers.NONE,
LockMode.NONE
);
LoadPlan plan = MetamodelDrivenLoadPlanBuilder.buildRootEntityLoadPlan( strategy, ep );
assertFalse( plan.hasAnyScalarReturns() );
@ -93,7 +96,8 @@ public class LoadPlanBuilderTest extends BaseCoreFunctionalTestCase {
CascadeStyleLoadPlanBuildingAssociationVisitationStrategy strategy = new CascadeStyleLoadPlanBuildingAssociationVisitationStrategy(
CascadingActions.MERGE,
sessionFactory(),
LoadQueryInfluencers.NONE
LoadQueryInfluencers.NONE,
LockMode.NONE
);
LoadPlan plan = MetamodelDrivenLoadPlanBuilder.buildRootEntityLoadPlan( strategy, ep );
assertFalse( plan.hasAnyScalarReturns() );
@ -115,7 +119,8 @@ public class LoadPlanBuilderTest extends BaseCoreFunctionalTestCase {
CollectionPersister cp = sessionFactory().getCollectionPersister( Poster.class.getName() + ".messages" );
FetchStyleLoadPlanBuildingAssociationVisitationStrategy strategy = new FetchStyleLoadPlanBuildingAssociationVisitationStrategy(
sessionFactory(),
LoadQueryInfluencers.NONE
LoadQueryInfluencers.NONE,
LockMode.NONE
);
LoadPlan plan = MetamodelDrivenLoadPlanBuilder.buildRootCollectionLoadPlan( strategy, cp );
assertFalse( plan.hasAnyScalarReturns() );
@ -123,12 +128,14 @@ public class LoadPlanBuilderTest extends BaseCoreFunctionalTestCase {
Return rtn = plan.getReturns().get( 0 );
CollectionReturn collectionReturn = ExtraAssertions.assertTyping( CollectionReturn.class, rtn );
assertNotNull( collectionReturn.getElementGraph() );
assertNotNull( collectionReturn.getElementGraph().getFetches() );
assertEquals( 1, collectionReturn.getElementGraph().getFetches().length ); // the collection elements are fetched
Fetch fetch = collectionReturn.getElementGraph().getFetches()[0];
EntityFetch entityFetch = ExtraAssertions.assertTyping( EntityFetch.class, fetch );
assertNotNull( entityFetch.getFetches() );
assertEquals( 0, entityFetch.getFetches().length );
// the collection Message elements are fetched, but Message.poster is not fetched
// (because that collection is owned by that Poster)
assertEquals( 0, collectionReturn.getElementGraph().getFetches().length );
EntityReference entityReference = ExtraAssertions.assertTyping( EntityReference.class, collectionReturn.getElementGraph() );
assertNotNull( entityReference.getFetches() );
assertEquals( 0, entityReference.getFetches().length );
LoadPlanTreePrinter.INSTANCE.logTree( plan, new AliasResolutionContextImpl( sessionFactory() ) );
}

View File

@ -78,7 +78,7 @@ public class LoadPlanStructureAssertionHelper {
);
// final EntityLoader loader = new EntityLoader( persister, lockMode, sf, influencers );
LoadPlan plan = buildLoadPlan( sf, persister, influencers );
LoadPlan plan = buildLoadPlan( sf, persister, influencers, lockMode );
EntityLoadQueryDetails details = EntityLoadQueryDetails.makeForBatching(
plan, persister.getKeyColumnNames(),
new QueryBuildingParameters() {
@ -110,13 +110,18 @@ public class LoadPlanStructureAssertionHelper {
public LoadPlan buildLoadPlan(
SessionFactoryImplementor sf,
OuterJoinLoadable persister,
LoadQueryInfluencers influencers) {
FetchStyleLoadPlanBuildingAssociationVisitationStrategy strategy = new FetchStyleLoadPlanBuildingAssociationVisitationStrategy( sf, influencers );
LoadQueryInfluencers influencers,
LockMode lockMode) {
FetchStyleLoadPlanBuildingAssociationVisitationStrategy strategy = new FetchStyleLoadPlanBuildingAssociationVisitationStrategy(
sf,
influencers,
lockMode
);
return MetamodelDrivenLoadPlanBuilder.buildRootEntityLoadPlan( strategy, persister );
}
public LoadPlan buildLoadPlan(SessionFactoryImplementor sf, OuterJoinLoadable persister) {
return buildLoadPlan( sf, persister, LoadQueryInfluencers.NONE );
return buildLoadPlan( sf, persister, LoadQueryInfluencers.NONE, LockMode.NONE );
}
private void compare(JoinWalker walker, EntityLoadQueryDetails details) {

View File

@ -49,7 +49,8 @@ public class Helper implements QueryBuildingParameters {
public LoadPlan buildLoadPlan(SessionFactoryImplementor sf, EntityPersister entityPersister) {
final FetchStyleLoadPlanBuildingAssociationVisitationStrategy strategy = new FetchStyleLoadPlanBuildingAssociationVisitationStrategy(
sf,
LoadQueryInfluencers.NONE
LoadQueryInfluencers.NONE,
LockMode.NONE
);
return MetamodelDrivenLoadPlanBuilder.buildRootEntityLoadPlan( strategy, entityPersister );
}