Initial working support for building and executing JdbcSelect operation from simple HQL

This commit is contained in:
Steve Ebersole 2019-09-04 20:40:48 -05:00 committed by Andrea Boriero
parent 70c051af6f
commit dbd108e0b7
45 changed files with 2374 additions and 1454 deletions

View File

@ -15,14 +15,11 @@ import org.hibernate.HibernateException;
import org.hibernate.cache.spi.QueryKey;
import org.hibernate.cache.spi.QueryResultsCache;
import org.hibernate.cache.spi.QueryResultsRegion;
import org.hibernate.cache.spi.QuerySpacesHelper;
import org.hibernate.cache.spi.SecondLevelCacheLogger;
import org.hibernate.cache.spi.TimestampsCache;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.internal.CoreLogging;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.type.Type;
import org.hibernate.type.TypeHelper;
/**
* The standard implementation of the Hibernate QueryCache interface. Works
@ -55,36 +52,14 @@ public class QueryResultsCacheImpl implements QueryResultsCache {
public boolean put(
final QueryKey key,
final List results,
final Type[] returnTypes,
final SharedSessionContractImplementor session) throws HibernateException {
if ( LOG.isDebugEnabled() ) {
LOG.debugf( "Caching query results in region: %s; timestamp=%s", cacheRegion.getName(), session.getTransactionStartTimestamp() );
}
final List resultsCopy = CollectionHelper.arrayList( results.size() );
final boolean isSingleResult = returnTypes.length == 1;
for ( Object aResult : results ) {
final Serializable resultRowForCache;
if ( isSingleResult ) {
resultRowForCache = returnTypes[0].disassemble( aResult, session, null );
}
else {
resultRowForCache = TypeHelper.disassemble( (Object[]) aResult, returnTypes, null, session, null );
}
resultsCopy.add( resultRowForCache );
if ( LOG.isTraceEnabled() ) {
logCachedResultRowDetails( returnTypes, aResult );
}
}
if ( LOG.isTraceEnabled() ) {
logCachedResultDetails( key, null, returnTypes, resultsCopy );
if ( SecondLevelCacheLogger.DEBUG_ENABLED ) {
SecondLevelCacheLogger.INSTANCE.debugf( "Caching query results in region: %s; timestamp=%s", cacheRegion.getName(), session.getTransactionStartTimestamp() );
}
final CacheItem cacheItem = new CacheItem(
session.getTransactionStartTimestamp(),
resultsCopy
deepCopy( results )
);
try {
@ -98,44 +73,40 @@ public class QueryResultsCacheImpl implements QueryResultsCache {
return true;
}
private static void logCachedResultDetails(QueryKey key, Set querySpaces, Type[] returnTypes, List result) {
if ( !LOG.isTraceEnabled() ) {
return;
}
LOG.trace( "key.hashCode=" + key.hashCode() );
LOG.trace( "querySpaces=" + querySpaces );
if ( returnTypes == null || returnTypes.length == 0 ) {
LOG.trace(
"Unexpected returnTypes is "
+ ( returnTypes == null ? "null" : "empty" ) + "! result"
+ ( result == null ? " is null" : ".size()=" + result.size() )
);
}
else {
final StringBuilder returnTypeInfo = new StringBuilder();
for ( Type returnType : returnTypes ) {
returnTypeInfo.append( "typename=" )
.append( returnType.getName() )
.append( " class=" )
.append( returnType.getReturnedClass().getName() )
.append( ' ' );
}
LOG.trace( "unexpected returnTypes is " + returnTypeInfo.toString() + "! result" );
}
private static <T> List<T> deepCopy(List<T> results) {
return new ArrayList<>( results );
}
@Override
@SuppressWarnings({ "unchecked" })
public List get(
QueryKey key,
Set<Serializable> spaces,
final Type[] returnTypes,
SharedSessionContractImplementor session) {
return get(
key,
QuerySpacesHelper.INSTANCE.toStringArray( spaces ),
returnTypes,
session
);
final QueryKey key,
final Set<String> spaces,
final SharedSessionContractImplementor session) throws HibernateException {
if ( SecondLevelCacheLogger.DEBUG_ENABLED ) {
SecondLevelCacheLogger.INSTANCE.debugf( "Checking cached query results in region: %s", cacheRegion.getName() );
}
final CacheItem cacheItem = getCachedData( key, session );
if ( cacheItem == null ) {
if ( SecondLevelCacheLogger.DEBUG_ENABLED ) {
SecondLevelCacheLogger.INSTANCE.debug( "Query results were not found in cache" );
}
return null;
}
if ( !timestampsCache.isUpToDate( spaces, cacheItem.timestamp, session ) ) {
if ( SecondLevelCacheLogger.DEBUG_ENABLED ) {
SecondLevelCacheLogger.INSTANCE.debug( "Cached query results were not up-to-date" );
}
return null;
}
if ( SecondLevelCacheLogger.DEBUG_ENABLED ) {
SecondLevelCacheLogger.INSTANCE.debug( "Returning cached query results" );
}
return deepCopy( cacheItem.results );
}
@Override
@ -143,42 +114,31 @@ public class QueryResultsCacheImpl implements QueryResultsCache {
public List get(
final QueryKey key,
final String[] spaces,
final Type[] returnTypes,
final SharedSessionContractImplementor session) {
if ( LOG.isDebugEnabled() ) {
LOG.debugf( "Checking cached query results in region: %s", cacheRegion.getName() );
final SharedSessionContractImplementor session) throws HibernateException {
if ( SecondLevelCacheLogger.DEBUG_ENABLED ) {
SecondLevelCacheLogger.INSTANCE.debugf( "Checking cached query results in region: %s", cacheRegion.getName() );
}
final CacheItem cacheItem = getCachedData( key, session );
if ( cacheItem == null ) {
if ( LOG.isDebugEnabled() ) {
LOG.debug( "Query results were not found in cache" );
if ( SecondLevelCacheLogger.DEBUG_ENABLED ) {
SecondLevelCacheLogger.INSTANCE.debug( "Query results were not found in cache" );
}
return null;
}
if ( !timestampsCache.isUpToDate( spaces, cacheItem.timestamp, session ) ) {
if ( LOG.isDebugEnabled() ) {
LOG.debug( "Cached query results were not up-to-date" );
if ( SecondLevelCacheLogger.DEBUG_ENABLED ) {
SecondLevelCacheLogger.INSTANCE.debug( "Cached query results were not up-to-date" );
}
return null;
}
if ( LOG.isDebugEnabled() ) {
LOG.debug( "Returning cached query results" );
if ( SecondLevelCacheLogger.DEBUG_ENABLED ) {
SecondLevelCacheLogger.INSTANCE.debug( "Returning cached query results" );
}
final boolean singleResult = returnTypes.length == 1;
for ( int i = 0; i < cacheItem.results.size(); i++ ) {
if ( singleResult ) {
returnTypes[0].beforeAssemble( (Serializable) cacheItem.results.get( i ), session );
}
else {
TypeHelper.beforeAssemble( (Serializable[]) cacheItem.results.get( i ), returnTypes, session );
}
}
return assembleCachedResult( key, cacheItem.results, singleResult, returnTypes, session );
return deepCopy( cacheItem.results );
}
private CacheItem getCachedData(QueryKey key, SharedSessionContractImplementor session) {
@ -193,90 +153,6 @@ public class QueryResultsCacheImpl implements QueryResultsCache {
return cachedItem;
}
@SuppressWarnings("unchecked")
private List assembleCachedResult(
final QueryKey key,
final List cached,
boolean singleResult,
final Type[] returnTypes,
final SharedSessionContractImplementor session) throws HibernateException {
final List result = new ArrayList( cached.size() );
if ( singleResult ) {
for ( Object aCached : cached ) {
result.add( returnTypes[0].assemble( (Serializable) aCached, session, null ) );
}
}
else {
for ( int i = 0; i < cached.size(); i++ ) {
result.add(
TypeHelper.assemble( (Serializable[]) cached.get( i ), returnTypes, session, null )
);
if ( LOG.isTraceEnabled() ) {
logCachedResultRowDetails( returnTypes, result.get( i ) );
}
}
}
return result;
}
private static void logCachedResultRowDetails(Type[] returnTypes, Object result) {
logCachedResultRowDetails(
returnTypes,
( result instanceof Object[] ? (Object[]) result : new Object[] { result } )
);
}
private static void logCachedResultRowDetails(Type[] returnTypes, Object[] tuple) {
if ( !LOG.isTraceEnabled() ) {
return;
}
if ( tuple == null ) {
LOG.tracef(
"tuple is null; returnTypes is %s",
returnTypes == null ? "null" : "Type[" + returnTypes.length + "]"
);
if ( returnTypes != null && returnTypes.length > 1 ) {
LOG.trace(
"Unexpected result tuple! tuple is null; should be Object["
+ returnTypes.length + "]!"
);
}
}
else {
if ( returnTypes == null || returnTypes.length == 0 ) {
LOG.trace(
"Unexpected result tuple! tuple is null; returnTypes is "
+ ( returnTypes == null ? "null" : "empty" )
);
}
LOG.tracef(
"tuple is Object[%s]; returnTypes is %s",
tuple.length,
returnTypes == null ? "null" : "Type[" + returnTypes.length + "]"
);
if ( returnTypes != null && tuple.length != returnTypes.length ) {
LOG.trace(
"Unexpected tuple length! transformer= expected="
+ returnTypes.length + " got=" + tuple.length
);
}
else {
for ( int j = 0; j < tuple.length; j++ ) {
if ( tuple[j] != null && returnTypes != null
&& ! returnTypes[j].getReturnedClass().isInstance( tuple[j] ) ) {
LOG.trace(
"Unexpected tuple value type! transformer= expected="
+ returnTypes[j].getReturnedClass().getName()
+ " got="
+ tuple[j].getClass().getName()
);
}
}
}
}
}
@Override
public String toString() {
return "QueryResultsCache(" + cacheRegion.getName() + ')';

View File

@ -6,6 +6,8 @@
*/
package org.hibernate.cache.internal;
import java.util.Collection;
import org.hibernate.cache.spi.TimestampsCache;
import org.hibernate.cache.spi.TimestampsRegion;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
@ -44,4 +46,13 @@ public class TimestampsCacheDisabledImpl implements TimestampsCache {
log.trace( "TimestampsRegionAccess#isUpToDate - disabled" );
return false;
}
@Override
public boolean isUpToDate(
Collection<String> spaces,
Long timestamp,
SharedSessionContractImplementor session) {
log.trace( "TimestampsRegionAccess#isUpToDate - disabled" );
return false;
}
}

View File

@ -7,10 +7,11 @@
package org.hibernate.cache.internal;
import java.io.Serializable;
import java.util.Collection;
import org.hibernate.cache.spi.RegionFactory;
import org.hibernate.cache.spi.TimestampsRegion;
import org.hibernate.cache.spi.TimestampsCache;
import org.hibernate.cache.spi.TimestampsRegion;
import org.hibernate.engine.spi.SessionEventListenerManager;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
@ -26,6 +27,8 @@ import org.jboss.logging.Logger;
public class TimestampsCacheEnabledImpl implements TimestampsCache {
private static final Logger log = Logger.getLogger( TimestampsCacheEnabledImpl.class );
public static final boolean DEBUG_ENABLED = log.isDebugEnabled();
private final TimestampsRegion timestampsRegion;
public TimestampsCacheEnabledImpl(TimestampsRegion timestampsRegion) {
@ -109,36 +112,65 @@ public class TimestampsCacheEnabledImpl implements TimestampsCache {
String[] spaces,
Long timestamp,
SharedSessionContractImplementor session) {
final StatisticsImplementor statistics = session.getFactory().getStatistics();
final boolean stats = statistics.isStatisticsEnabled();
final boolean debugEnabled = log.isDebugEnabled();
for ( Serializable space : spaces ) {
final Long lastUpdate = getLastUpdateTimestampForSpace( space, session );
if ( lastUpdate == null ) {
// the last update timestamp for the given space was evicted from the
// cache or there have been no writes to it since startup
if ( stats ) {
statistics.updateTimestampsCacheMiss();
}
}
else {
if ( debugEnabled ) {
log.debugf(
"[%s] last update timestamp: %s",
space,
lastUpdate + ", result set timestamp: " + timestamp
);
}
if ( stats ) {
statistics.updateTimestampsCacheHit();
}
if ( lastUpdate >= timestamp ) {
return false;
}
if ( isSpaceOutOfDate( space, timestamp, session, statistics ) ) {
return false;
}
}
return true;
}
private boolean isSpaceOutOfDate(
Serializable space,
Long timestamp,
SharedSessionContractImplementor session,
StatisticsImplementor statistics) {
final Long lastUpdate = getLastUpdateTimestampForSpace( space, session );
if ( lastUpdate == null ) {
// the last update timestamp for the given space was evicted from the
// cache or there have been no writes to it since startup
if ( statistics.isStatisticsEnabled() ) {
statistics.updateTimestampsCacheMiss();
}
}
else {
if ( DEBUG_ENABLED ) {
log.debugf(
"[%s] last update timestamp: %s",
space,
lastUpdate + ", result set timestamp: " + timestamp
);
}
if ( statistics.isStatisticsEnabled() ) {
statistics.updateTimestampsCacheHit();
}
//noinspection RedundantIfStatement
if ( lastUpdate >= timestamp ) {
return true;
}
}
return false;
}
@Override
public boolean isUpToDate(
Collection<String> spaces,
Long timestamp,
SharedSessionContractImplementor session) {
final StatisticsImplementor statistics = session.getFactory().getStatistics();
for ( Serializable space : spaces ) {
if ( isSpaceOutOfDate( space, timestamp, session, statistics ) ) {
return false;
}
}
return true;
}

View File

@ -176,45 +176,6 @@ public interface CacheImplementor extends Service, Cache, Serializable {
CollectionDataAccess getCollectionRegionAccess(NavigableRole collectionRole);
/**
* Get {@code UpdateTimestampsCache} instance managed by the {@code SessionFactory}.
*
* @deprecated Use {@link #getTimestampsCache} instead
*/
@Deprecated
default UpdateTimestampsCache getUpdateTimestampsCache() {
return getTimestampsCache();
}
/**
* Get the default {@code QueryCache}.
*
* @deprecated Use {@link #getDefaultQueryResultsCache} instead.
*/
@Deprecated
default QueryCache getQueryCache() {
return getDefaultQueryResultsCache();
}
/**
* Get the default {@code QueryCache}.
*
* @deprecated Use {@link #getDefaultQueryResultsCache} instead.
*/
@Deprecated
default QueryCache getDefaultQueryCache() {
return getDefaultQueryResultsCache();
}
/**
* @deprecated Use {@link #getQueryResultsCache(String)} instead, but using unqualified name
*/
@Deprecated
default QueryCache getQueryCache(String regionName) throws HibernateException {
return getQueryResultsCache( unqualifyRegionName( regionName ) );
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Some new (default) support methods for the above deprecations
// - themselves deprecated

View File

@ -1,81 +0,0 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.cache.spi;
import java.io.Serializable;
import java.util.List;
import java.util.Set;
import org.hibernate.cache.CacheException;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.type.Type;
/**
* @author Steve Ebersole
*
* @deprecated Use {@link QueryResultsCache} instead -
* {@link CacheImplementor#getQueryResultsCache} rather than
* {@link CacheImplementor#getQueryCache}
*/
@Deprecated
public interface QueryCache {
/**
* Clear items from the query cache.
*
* @throws CacheException Indicates a problem delegating to the underlying cache.
*/
void clear();
/**
* Put a result into the query cache.
*
* @param key The cache key
* @param returnTypes The result types
* @param result The results to cache
* @param isNaturalKeyLookup Was this a natural id lookup?
* @param session The originating session
*
* @return Whether the put actually happened.
*/
boolean put(
QueryKey key,
Type[] returnTypes,
List result,
boolean isNaturalKeyLookup,
SharedSessionContractImplementor session);
/**
* Get results from the cache.
*
* @param key The cache key
* @param returnTypes The result types
* @param isNaturalKeyLookup Was this a natural id lookup?
* @param spaces The query spaces (used in invalidation plus validation checks)
* @param session The originating session
*
* @return The cached results; may be null.
*/
List get(
QueryKey key,
Type[] returnTypes,
boolean isNaturalKeyLookup,
Set<Serializable> spaces,
SharedSessionContractImplementor session);
/**
* Destroy the cache.
*/
void destroy();
/**
* The underlying cache factory region being used.
*
* @return The cache region.
*/
QueryResultsRegion getRegion();
}

View File

@ -8,18 +8,12 @@ package org.hibernate.cache.spi;
import java.io.IOException;
import java.io.Serializable;
import java.util.Collections;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import org.hibernate.engine.spi.QueryParameters;
import org.hibernate.engine.spi.RowSelection;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.engine.spi.TypedValue;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.transform.CacheableResultTransformer;
import org.hibernate.type.Type;
import org.hibernate.query.Limit;
import org.hibernate.query.spi.QueryParameterBindings;
/**
* A key that identifies a particular query with bound parameter values. This is the object Hibernate uses
@ -35,18 +29,36 @@ public class QueryKey implements Serializable {
public interface ParameterBindingsMemento {
}
public static QueryKey from(
String sqlQueryString,
Limit limit,
QueryParameterBindings parameterBindings,
SharedSessionContractImplementor persistenceContext) {
// todo (6.0) : here is where we should centralize cacheable-or-not
// if this method returns null, the query should be considered un-cacheable
//
// todo (6.0) : should limited (first/max) results be cacheable?
// todo (6.0) : should filtered results be cacheable?
final Limit limitToUse = limit == null ? Limit.NONE : limit;
return new QueryKey(
sqlQueryString,
parameterBindings.generateQueryKeyMemento(),
limitToUse.getFirstRow(),
limitToUse.getMaxRows(),
persistenceContext.getTenantIdentifier(),
persistenceContext.getLoadQueryInfluencers().getEnabledFilterNames()
);
}
private final String sqlQueryString;
private final Type[] positionalParameterTypes;
private final Object[] positionalParameterValues;
private final Map namedParameters;
private final ParameterBindingsMemento parameterBindingsMemento;
private final Integer firstRow;
private final Integer maxRows;
private final String tenantIdentifier;
private final Set filterKeys;
// the explicit user-provided result transformer, not the one used with "select new". Here to avoid mangling
// transformed/non-transformed results.
private final CacheableResultTransformer customTransformer;
private final Set<String> enabledFilterNames;
/**
* For performance reasons, the hashCode is cached; however, it is marked transient so that it can be
@ -54,134 +66,22 @@ public class QueryKey implements Serializable {
*/
private transient int hashCode;
/**
* Generates a QueryKey.
*
* @param queryString The sql query string.
* @param queryParameters The query parameters
* @param filterKeys The keys of any enabled filters.
* @param session The current session.
* @param customTransformer The result transformer; should be null if data is not transformed before being cached.
*
* @return The generate query cache key.
*/
public static QueryKey generateQueryKey(
String queryString,
QueryParameters queryParameters,
Set filterKeys,
SharedSessionContractImplementor session,
CacheableResultTransformer customTransformer) {
// disassemble positional parameters
final int positionalParameterCount = queryParameters.getPositionalParameterTypes().length;
final Type[] types = new Type[positionalParameterCount];
final Object[] values = new Object[positionalParameterCount];
for ( int i = 0; i < positionalParameterCount; i++ ) {
types[i] = queryParameters.getPositionalParameterTypes()[i];
values[i] = types[i].disassemble( queryParameters.getPositionalParameterValues()[i], session, null );
}
// disassemble named parameters
final Map<String,TypedValue> namedParameters;
if ( queryParameters.getNamedParameters() == null ) {
namedParameters = null;
}
else {
namedParameters = CollectionHelper.mapOfSize( queryParameters.getNamedParameters().size() );
for ( Map.Entry<String,TypedValue> namedParameterEntry : queryParameters.getNamedParameters().entrySet() ) {
namedParameters.put(
namedParameterEntry.getKey(),
new TypedValue(
namedParameterEntry.getValue().getType(),
namedParameterEntry.getValue().getType().disassemble(
namedParameterEntry.getValue().getValue(),
session,
null
)
)
);
}
}
// decode row selection...
final RowSelection selection = queryParameters.getRowSelection();
final Integer firstRow;
final Integer maxRows;
if ( selection != null ) {
firstRow = selection.getFirstRow();
maxRows = selection.getMaxRows();
}
else {
firstRow = null;
maxRows = null;
}
return new QueryKey(
queryString,
types,
values,
namedParameters,
firstRow,
maxRows,
filterKeys,
session.getTenantIdentifier(),
customTransformer
);
}
/**
* Package-protected constructor.
*
* @param sqlQueryString The sql query string.
* @param positionalParameterTypes Positional parameter types.
* @param positionalParameterValues Positional parameter values.
* @param namedParameters Named parameters.
* @param firstRow First row selection, if any.
* @param maxRows Max-rows selection, if any.
* @param filterKeys Enabled filter keys, if any.
* @param customTransformer Custom result transformer, if one.
* @param tenantIdentifier The tenant identifier in effect for this query, or {@code null}
*/
QueryKey(
String sqlQueryString,
Type[] positionalParameterTypes,
Object[] positionalParameterValues,
Map namedParameters,
public QueryKey(
String sql,
ParameterBindingsMemento parameterBindingsMemento,
Integer firstRow,
Integer maxRows,
Set filterKeys,
String tenantIdentifier,
CacheableResultTransformer customTransformer) {
this.sqlQueryString = sqlQueryString;
this.positionalParameterTypes = positionalParameterTypes;
this.positionalParameterValues = positionalParameterValues;
this.namedParameters = namedParameters;
Set<String> enabledFilterNames) {
this.sqlQueryString = sql;
this.parameterBindingsMemento = parameterBindingsMemento;
this.firstRow = firstRow;
this.maxRows = maxRows;
this.tenantIdentifier = tenantIdentifier;
this.filterKeys = filterKeys;
this.customTransformer = customTransformer;
this.enabledFilterNames = enabledFilterNames;
this.hashCode = generateHashCode();
}
/**
* Provides access to the explicitly user-provided result transformer.
*
* @return The result transformer.
*/
public CacheableResultTransformer getResultTransformer() {
return customTransformer;
}
/**
* Provide (unmodifiable) access to the named parameters that are part of this query.
*
* @return The (unmodifiable) map of named parameters
*/
@SuppressWarnings("unchecked")
public Map getNamedParameters() {
return Collections.unmodifiableMap( namedParameters );
}
/**
* Deserialization hook used to re-init the cached hashcode which is needed for proper clustering support.
*
@ -197,20 +97,17 @@ public class QueryKey implements Serializable {
private int generateHashCode() {
int result = 13;
result = 37 * result + sqlQueryString.hashCode();
result = 37 * result + ( firstRow==null ? 0 : firstRow.hashCode() );
result = 37 * result + ( maxRows==null ? 0 : maxRows.hashCode() );
for ( int i=0; i< positionalParameterValues.length; i++ ) {
result = 37 * result + ( positionalParameterValues[i]==null ? 0 : positionalParameterTypes[i].getHashCode( positionalParameterValues[i] ) );
}
result = 37 * result + ( namedParameters==null ? 0 : namedParameters.hashCode() );
result = 37 * result + ( filterKeys ==null ? 0 : filterKeys.hashCode() );
result = 37 * result + ( customTransformer==null ? 0 : customTransformer.hashCode() );
result = 37 * result + parameterBindingsMemento.hashCode();
result = 37 * result + ( enabledFilterNames == null ? 0 : enabledFilterNames.hashCode() );
result = 37 * result + ( tenantIdentifier==null ? 0 : tenantIdentifier.hashCode() );
result = 37 * result + sqlQueryString.hashCode();
return result;
}
@Override
@SuppressWarnings("RedundantIfStatement")
public boolean equals(Object other) {
if ( !( other instanceof QueryKey ) ) {
return false;
@ -220,69 +117,29 @@ public class QueryKey implements Serializable {
if ( !sqlQueryString.equals( that.sqlQueryString ) ) {
return false;
}
if ( !Objects.equals( firstRow, that.firstRow ) || !Objects.equals( maxRows, that.maxRows ) ) {
if ( !Objects.equals( tenantIdentifier, that.tenantIdentifier ) ) {
return false;
}
if ( !Objects.equals( customTransformer, that.customTransformer ) ) {
return false;
}
if ( positionalParameterTypes == null ) {
if ( that.positionalParameterTypes != null ) {
return false;
}
}
else {
if ( that.positionalParameterTypes == null ) {
return false;
}
if ( positionalParameterTypes.length != that.positionalParameterTypes.length ) {
return false;
}
for ( int i = 0; i < positionalParameterTypes.length; i++ ) {
if ( positionalParameterTypes[i].getReturnedClass() != that.positionalParameterTypes[i].getReturnedClass() ) {
return false;
}
if ( !positionalParameterTypes[i].isEqual( positionalParameterValues[i], that.positionalParameterValues[i] ) ) {
return false;
}
}
}
return Objects.equals( filterKeys, that.filterKeys )
&& Objects.equals( namedParameters, that.namedParameters )
&& Objects.equals( tenantIdentifier, that.tenantIdentifier );
if ( !Objects.equals( firstRow, that.firstRow )
|| !Objects.equals( maxRows, that.maxRows ) ) {
return false;
}
if ( !Objects.equals( parameterBindingsMemento, that.parameterBindingsMemento ) ) {
return false;
}
if ( !Objects.equals( enabledFilterNames, that.enabledFilterNames ) ) {
return false;
}
return true;
}
@Override
public int hashCode() {
return hashCode;
}
@Override
public String toString() {
final StringBuilder buffer = new StringBuilder( "sql: " ).append( sqlQueryString );
if ( positionalParameterValues != null ) {
buffer.append( "; parameters: " );
for ( Object positionalParameterValue : positionalParameterValues ) {
buffer.append( positionalParameterValue ).append( ", " );
}
}
if ( namedParameters != null ) {
buffer.append( "; named parameters: " ).append( namedParameters );
}
if ( filterKeys != null ) {
buffer.append( "; filterKeys: " ).append( filterKeys );
}
if ( firstRow != null ) {
buffer.append( "; first row: " ).append( firstRow );
}
if ( maxRows != null ) {
buffer.append( "; max rows: " ).append( maxRows );
}
if ( customTransformer != null ) {
buffer.append( "; transformer: " ).append( customTransformer );
}
return buffer.toString();
}
}

View File

@ -13,7 +13,6 @@ import java.util.Set;
import org.hibernate.HibernateException;
import org.hibernate.cache.CacheException;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.type.Type;
/**
* Defines the responsibility for managing query result data caching
@ -22,23 +21,12 @@ import org.hibernate.type.Type;
* @author Gavin King
* @author Steve Ebersole
*/
public interface QueryResultsCache extends QueryCache {
public interface QueryResultsCache {
/**
* The underlying cache region being used.
*/
@Override
QueryResultsRegion getRegion();
/**
* Clear items from the query cache.
*
* @throws CacheException Indicates a problem delegating to the underlying cache.
*/
@Override
default void clear() throws CacheException {
getRegion().clear();
}
/**
* Put a result into the query cache.
*
@ -53,7 +41,6 @@ public interface QueryResultsCache extends QueryCache {
boolean put(
QueryKey key,
List result,
Type[] returnTypes,
SharedSessionContractImplementor session) throws HibernateException;
/**
@ -69,8 +56,7 @@ public interface QueryResultsCache extends QueryCache {
*/
List get(
QueryKey key,
Set<Serializable> spaces,
Type[] returnTypes,
Set<String> spaces,
SharedSessionContractImplementor session) throws HibernateException;
/**
@ -87,34 +73,17 @@ public interface QueryResultsCache extends QueryCache {
List get(
QueryKey key,
String[] spaces,
Type[] returnTypes,
SharedSessionContractImplementor session) throws HibernateException;
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Deprecations
@Override
default boolean put(
QueryKey key,
Type[] returnTypes,
List result,
boolean isNaturalKeyLookup,
SharedSessionContractImplementor session) {
return put( key, result, returnTypes, session );
/**
* Clear items from the query cache.
*
* @throws CacheException Indicates a problem delegating to the underlying cache.
*/
default void clear() throws CacheException {
getRegion().clear();
}
@Override
default List get(
QueryKey key,
Type[] returnTypes,
boolean isNaturalKeyLookup,
Set<Serializable> spaces,
SharedSessionContractImplementor session) {
return get( key, spaces, returnTypes, session );
}
@Override
default void destroy() {
// nothing to do.. the region itself gets destroyed
}

View File

@ -30,6 +30,9 @@ public interface SecondLevelCacheLogger extends BasicLogger {
"org.hibernate.orm.cache"
);
boolean DEBUG_ENABLED = INSTANCE.isDebugEnabled();
boolean TRACE_ENABLED = INSTANCE.isTraceEnabled();
int NAMESPACE = 90001000;
@LogMessage(level = WARN)

View File

@ -6,9 +6,7 @@
*/
package org.hibernate.cache.spi;
import java.io.Serializable;
import java.util.Set;
import java.util.function.Consumer;
import java.util.Collection;
import org.hibernate.cache.CacheException;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
@ -18,7 +16,7 @@ import org.hibernate.engine.spi.SharedSessionContractImplementor;
*
* @author Steve Ebersole
*/
public interface TimestampsCache extends UpdateTimestampsCache {
public interface TimestampsCache {
/**
* The region used to store all timestamps data
*/
@ -43,66 +41,27 @@ public interface TimestampsCache extends UpdateTimestampsCache {
/**
* Perform an up-to-date check for the given set of query spaces as
* part of verifying the validity of cached query results.
*
* @param spaces The spaces to check
* @param timestamp The timestamp from the transaction when the query results were cached.
* @param session The session whether this check originated.
*
* @return Whether all those spaces are up-to-date
*/
boolean isUpToDate(
String[] spaces,
Long timestamp,
SharedSessionContractImplementor session);
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Deprecations
@Override
default void preInvalidate(Serializable[] spaces, SharedSessionContractImplementor session) {
final String[] spaceStrings = new String[ spaces.length ];
// todo - does this copy work?
System.arraycopy( spaces, 0, spaceStrings, 0, spaces.length );
preInvalidate( spaceStrings, session );
}
@Override
default void invalidate(Serializable[] spaces, SharedSessionContractImplementor session) {
final String[] spaceStrings = new String[ spaces.length ];
// todo - does this copy work?
System.arraycopy( spaces, 0, spaceStrings, 0, spaces.length );
invalidate( spaceStrings, session );
}
@Override
default boolean isUpToDate(
Set<Serializable> spaces,
/**
* Perform an up-to-date check for the given set of query spaces as
* part of verifying the validity of cached query results.
*/
boolean isUpToDate(
Collection<String> spaces,
Long timestamp,
SharedSessionContractImplementor session) {
final String[] spaceArray = new String[ spaces.size() ];
SharedSessionContractImplementor session);
spaces.forEach(
new Consumer<Serializable>() {
int position = 0;
@Override
public void accept(Serializable serializable) {
spaceArray[position++] = (String) serializable;
}
}
);
return isUpToDate( spaceArray, timestamp, session );
}
@Override
default void clear() throws CacheException {
getRegion().clear();
}
@Override
default void destroy() {
// nothing to do - the region itself is destroyed
}
}

View File

@ -1,81 +0,0 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.cache.spi;
import java.io.Serializable;
import java.util.Set;
import org.hibernate.cache.CacheException;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
/**
* Tracks the timestamps of the most recent updates to particular tables. It is
* important that the cache timeout of the underlying cache implementation be set
* to a higher value than the timeouts of any of the query caches. In fact, we
* recommend that the the underlying cache not be configured for expiry at all.
* Note, in particular, that an LRU cache expiry policy is never appropriate.
*
* @author Gavin King
* @author Mikheil Kapanadze
*
* @deprecated Use {@link TimestampsCache} instead
*/
@SuppressWarnings("unused")
@Deprecated
public interface UpdateTimestampsCache {
/**
* Get the underlying cache region where data is stored..
*
* @return The underlying region.
*/
TimestampsRegion getRegion();
/**
* Perform pre-invalidation.
*
* @param spaces The spaces to pre-invalidate
*
* @throws CacheException Indicated problem delegating to underlying region.
*/
void preInvalidate(Serializable[] spaces, SharedSessionContractImplementor session) throws CacheException;
/**
* Perform invalidation.
*
*
* @param spaces The spaces to invalidate.
* @param session
*
* @throws CacheException Indicated problem delegating to underlying region.
*/
void invalidate(Serializable[] spaces, SharedSessionContractImplementor session) throws CacheException;
/**
* Perform an up-to-date check for the given set of query spaces.
*
*
* @param spaces The spaces to check
* @param timestamp The timestamp against which to check.
*
* @throws CacheException Indicated problem delegating to underlying region.
*/
boolean isUpToDate(Set<Serializable> spaces, Long timestamp, SharedSessionContractImplementor session) throws CacheException;
/**
* Clear the update-timestamps data.
*
* @throws CacheException Indicates problem delegating call to underlying region.
*/
void clear() throws CacheException;
/**
* Destroys the cache.
*
* @throws CacheException Indicates problem delegating call to underlying region.
*/
void destroy();
}

View File

@ -13,6 +13,8 @@ import org.hibernate.engine.jdbc.connections.spi.JdbcConnectionAccess;
import org.hibernate.engine.jdbc.env.spi.ExtractedDatabaseMetaData;
import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
import org.hibernate.service.Service;
import org.hibernate.sql.exec.internal.JdbcSelectExecutorStandardImpl;
import org.hibernate.sql.exec.spi.JdbcSelectExecutor;
/**
* Contract for services around JDBC operations. These represent shared resources, aka not varied by session/use.
@ -75,4 +77,11 @@ public interface JdbcServices extends Service {
* @return The ResultSet wrapper.
*/
ResultSetWrapper getResultSetWrapper();
/**
* Access the executor for {@link org.hibernate.sql.exec.spi.JdbcSelect} operations
*/
default JdbcSelectExecutor getJdbcSelectExecutor() {
return JdbcSelectExecutorStandardImpl.INSTANCE;
}
}

View File

@ -6,15 +6,13 @@
*/
package org.hibernate.internal;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import org.hibernate.HibernateException;
import org.hibernate.engine.jdbc.spi.JdbcCoordinator;
import org.hibernate.engine.spi.QueryParameters;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.loader.Loader;
import org.hibernate.query.spi.ScrollableResultsImplementor;
import org.hibernate.sql.results.internal.JdbcValuesSourceProcessingStateStandardImpl;
import org.hibernate.sql.results.internal.RowProcessingStateStandardImpl;
import org.hibernate.sql.results.spi.JdbcValues;
import org.hibernate.sql.results.spi.JdbcValuesSourceProcessingOptions;
import org.hibernate.sql.results.spi.RowReader;
/**
@ -23,62 +21,30 @@ import org.hibernate.sql.results.spi.RowReader;
* @author Steve Ebersole
*/
public abstract class AbstractScrollableResults<R> implements ScrollableResultsImplementor<R> {
private static final CoreMessageLogger LOG = CoreLogging.messageLogger( AbstractScrollableResults.class );
private final ResultSet resultSet;
private final PreparedStatement ps;
private final SharedSessionContractImplementor session;
private final Loader loader;
private final QueryParameters queryParameters;
private final JdbcValues jdbcValues;
private final JdbcValuesSourceProcessingOptions processingOptions;
private final JdbcValuesSourceProcessingStateStandardImpl jdbcValuesSourceProcessingState;
private final RowProcessingStateStandardImpl rowProcessingState;
private final RowReader<R> rowReader;
private final SharedSessionContractImplementor persistenceContext;
private boolean closed;
@SuppressWarnings("WeakerAccess")
protected AbstractScrollableResults(
ResultSet rs,
PreparedStatement ps,
SharedSessionContractImplementor sess,
Loader loader,
QueryParameters queryParameters,
RowReader<R> rowReader) {
this.resultSet = rs;
this.ps = ps;
this.session = sess;
this.loader = loader;
this.queryParameters = queryParameters;
public AbstractScrollableResults(
JdbcValues jdbcValues,
JdbcValuesSourceProcessingOptions processingOptions,
JdbcValuesSourceProcessingStateStandardImpl jdbcValuesSourceProcessingState,
RowProcessingStateStandardImpl rowProcessingState,
RowReader<R> rowReader,
SharedSessionContractImplementor persistenceContext) {
this.jdbcValues = jdbcValues;
this.processingOptions = processingOptions;
this.jdbcValuesSourceProcessingState = jdbcValuesSourceProcessingState;
this.rowProcessingState = rowProcessingState;
this.rowReader = rowReader;
this.persistenceContext = persistenceContext;
}
protected abstract R getCurrentRow();
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Access to state fr sub-types
protected ResultSet getResultSet() {
return resultSet;
}
protected PreparedStatement getPs() {
return ps;
}
protected SharedSessionContractImplementor getSession() {
return session;
}
protected Loader getLoader() {
return loader;
}
protected QueryParameters getQueryParameters() {
return queryParameters;
}
protected RowReader<R> getRowReader() {
return rowReader;
}
@Override
public final R get() throws HibernateException {
@ -88,13 +54,34 @@ public abstract class AbstractScrollableResults<R> implements ScrollableResultsI
return getCurrentRow();
}
protected void afterScrollOperation() {
session.afterScrollOperation();
protected abstract R getCurrentRow();
protected JdbcValues getJdbcValues() {
return jdbcValues;
}
@Override
public boolean isClosed() {
return this.closed;
protected JdbcValuesSourceProcessingOptions getProcessingOptions() {
return processingOptions;
}
protected JdbcValuesSourceProcessingStateStandardImpl getJdbcValuesSourceProcessingState() {
return jdbcValuesSourceProcessingState;
}
protected RowProcessingStateStandardImpl getRowProcessingState() {
return rowProcessingState;
}
protected RowReader<R> getRowReader() {
return rowReader;
}
protected SharedSessionContractImplementor getPersistenceContext() {
return persistenceContext;
}
protected void afterScrollOperation() {
getPersistenceContext().afterScrollOperation();
}
@Override
@ -104,23 +91,14 @@ public abstract class AbstractScrollableResults<R> implements ScrollableResultsI
return;
}
// getJdbcValues().finishUp();
// getPersistenceContext().getJdbcCoordinator().afterStatementExecution();
// // not absolutely necessary, but does help with aggressive release
// //session.getJDBCContext().getConnectionManager().closeQueryStatement( ps, resultSet );
// session.getJdbcCoordinator().getResourceRegistry().release( ps );
// session.getJdbcCoordinator().afterStatementExecution();
// try {
// session.getPersistenceContext().getLoadContexts().cleanup( resultSet );
// }
// catch (Throwable ignore) {
// // ignore this error for now
// if ( LOG.isTraceEnabled() ) {
// LOG.tracev( "Exception trying to cleanup load context : {0}", ignore.getMessage() );
// }
// }
getJdbcValues().finishUp();
getPersistenceContext().getJdbcCoordinator().afterStatementExecution();
this.closed = true;
}
@Override
public boolean isClosed() {
return this.closed;
}
}

View File

@ -6,15 +6,13 @@
*/
package org.hibernate.internal;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import org.hibernate.HibernateException;
import org.hibernate.NotYetImplementedFor6Exception;
import org.hibernate.engine.spi.QueryParameters;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.loader.Loader;
import org.hibernate.sql.results.internal.JdbcValuesSourceProcessingStateStandardImpl;
import org.hibernate.sql.results.internal.RowProcessingStateStandardImpl;
import org.hibernate.sql.results.spi.JdbcValues;
import org.hibernate.sql.results.spi.JdbcValuesSourceProcessingOptions;
import org.hibernate.sql.results.spi.RowReader;
/**
@ -24,17 +22,26 @@ import org.hibernate.sql.results.spi.RowReader;
*/
public class FetchingScrollableResultsImpl<R> extends AbstractScrollableResults<R> {
private R currentRow;
private int currentPosition;
private Integer maxPosition;
public FetchingScrollableResultsImpl(
ResultSet rs,
PreparedStatement ps,
SharedSessionContractImplementor sess,
Loader loader,
QueryParameters queryParameters,
RowReader<R> rowReader) {
super( rs, ps, sess, loader, queryParameters, rowReader );
JdbcValues jdbcValues,
JdbcValuesSourceProcessingOptions processingOptions,
JdbcValuesSourceProcessingStateStandardImpl jdbcValuesSourceProcessingState,
RowProcessingStateStandardImpl rowProcessingState,
RowReader<R> rowReader,
SharedSessionContractImplementor persistenceContext) {
super(
jdbcValues,
processingOptions,
jdbcValuesSourceProcessingState,
rowProcessingState,
rowReader,
persistenceContext
);
this.maxPosition = jdbcValuesSourceProcessingState.getQueryOptions().getEffectiveLimit().getMaxRows();
}
@Override
@ -150,38 +157,40 @@ public class FetchingScrollableResultsImpl<R> extends AbstractScrollableResults<
@Override
public boolean last() {
boolean more = false;
if ( maxPosition != null ) {
if ( currentPosition > maxPosition ) {
more = previous();
}
for ( int i = currentPosition; i < maxPosition; i++ ) {
more = next();
}
}
else {
try {
if ( isResultSetEmpty() || getResultSet().isAfterLast() ) {
// should not be able to reach last without maxPosition being set
// unless there are no results
return false;
}
throw new NotYetImplementedFor6Exception( getClass() );
while ( !getResultSet().isAfterLast() ) {
more = next();
}
}
catch (SQLException e) {
throw getSession().getFactory().getSQLExceptionHelper().convert(
e,
"exception calling isAfterLast()"
);
}
}
afterScrollOperation();
return more;
// boolean more = false;
// if ( maxPosition != null ) {
// if ( currentPosition > maxPosition ) {
// more = previous();
// }
// for ( int i = currentPosition; i < maxPosition; i++ ) {
// more = next();
// }
// }
// else {
// try {
// if ( isResultSetEmpty() || getResultSet().isAfterLast() ) {
// // should not be able to reach last without maxPosition being set
// // unless there are no results
// return false;
// }
//
// while ( !getResultSet().isAfterLast() ) {
// more = next();
// }
// }
// catch (SQLException e) {
// throw getSession().getFactory().getSQLExceptionHelper().convert(
// e,
// "exception calling isAfterLast()"
// );
// }
// }
//
// afterScrollOperation();
//
// return more;
}
@Override
@ -196,17 +205,19 @@ public class FetchingScrollableResultsImpl<R> extends AbstractScrollableResults<
@Override
public void beforeFirst() {
try {
getResultSet().beforeFirst();
}
catch (SQLException e) {
throw getSession().getFactory().getSQLExceptionHelper().convert(
e,
"exception calling beforeFirst()"
);
}
currentRow = null;
currentPosition = 0;
throw new NotYetImplementedFor6Exception( getClass() );
// try {
// getResultSet().beforeFirst();
// }
// catch (SQLException e) {
// throw getSession().getFactory().getSQLExceptionHelper().convert(
// e,
// "exception calling beforeFirst()"
// );
// }
// currentRow = null;
// currentPosition = 0;
}
@Override
@ -247,16 +258,16 @@ public class FetchingScrollableResultsImpl<R> extends AbstractScrollableResults<
return scroll( rowNumber - currentPosition );
}
private boolean isResultSetEmpty() {
try {
return currentPosition == 0 && !getResultSet().isBeforeFirst() && !getResultSet().isAfterLast();
}
catch (SQLException e) {
throw getSession().getFactory().getSQLExceptionHelper().convert(
e,
"Could not determine if resultset is empty due to exception calling isBeforeFirst or isAfterLast()"
);
}
}
// private boolean isResultSetEmpty() {
// try {
// return currentPosition == 0 && !getResultSet().isBeforeFirst() && !getResultSet().isAfterLast();
// }
// catch (SQLException e) {
// throw getSession().getFactory().getSQLExceptionHelper().convert(
// e,
// "Could not determine if resultset is empty due to exception calling isBeforeFirst or isAfterLast()"
// );
// }
// }
}

View File

@ -6,17 +6,16 @@
*/
package org.hibernate.internal;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import org.hibernate.HibernateException;
import org.hibernate.JDBCException;
import org.hibernate.NotYetImplementedFor6Exception;
import org.hibernate.engine.spi.PersistenceContext;
import org.hibernate.engine.spi.QueryParameters;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.loader.Loader;
import org.hibernate.sql.results.internal.JdbcValuesSourceProcessingStateStandardImpl;
import org.hibernate.sql.results.internal.RowProcessingStateStandardImpl;
import org.hibernate.sql.results.spi.JdbcValues;
import org.hibernate.sql.results.spi.JdbcValuesSourceProcessingOptions;
import org.hibernate.sql.results.spi.RowReader;
/**
@ -28,13 +27,20 @@ public class ScrollableResultsImpl<R> extends AbstractScrollableResults<R> {
private R currentRow;
public ScrollableResultsImpl(
ResultSet rs,
PreparedStatement ps,
SharedSessionContractImplementor sess,
Loader loader,
QueryParameters queryParameters,
RowReader<R> rowReader) {
super( rs, ps, sess, loader, queryParameters, rowReader );
JdbcValues jdbcValues,
JdbcValuesSourceProcessingOptions processingOptions,
JdbcValuesSourceProcessingStateStandardImpl jdbcValuesSourceProcessingState,
RowProcessingStateStandardImpl rowProcessingState,
RowReader<R> rowReader,
SharedSessionContractImplementor persistenceContext) {
super(
jdbcValues,
processingOptions,
jdbcValuesSourceProcessingState,
rowProcessingState,
rowReader,
persistenceContext
);
}
@Override
@ -44,48 +50,60 @@ public class ScrollableResultsImpl<R> extends AbstractScrollableResults<R> {
@Override
public boolean scroll(int i) {
try {
final boolean result = getResultSet().relative( i );
prepareCurrentRow( result );
return result;
}
catch (SQLException sqle) {
throw convert( sqle, "could not advance using scroll()" );
}
throw new NotYetImplementedFor6Exception();
// todo (6.0) : need these scrollable ResultSet "re-positioning"-style methods on the JdbcValues stuff
// try {
// final boolean result = getResultSet().relative( i );
// prepareCurrentRow( result );
// return result;
// }
// catch (SQLException sqle) {
// throw convert( sqle, "could not advance using scroll()" );
// }
}
protected JDBCException convert(SQLException sqle, String message) {
return getSession().getFactory().getSQLExceptionHelper().convert( sqle, message );
return getPersistenceContext().getJdbcServices().getSqlExceptionHelper().convert( sqle, message );
}
@Override
public boolean first() {
try {
final boolean result = getResultSet().first();
prepareCurrentRow( result );
return result;
}
catch (SQLException sqle) {
throw convert( sqle, "could not advance using first()" );
}
throw new NotYetImplementedFor6Exception();
// todo (6.0) : need these scrollable ResultSet "re-positioning"-style methods on the JdbcValues stuff
// try {
// final boolean result = getResultSet().first();
// prepareCurrentRow( result );
// return result;
// }
// catch (SQLException sqle) {
// throw convert( sqle, "could not advance using first()" );
// }
}
@Override
public boolean last() {
try {
final boolean result = getResultSet().last();
prepareCurrentRow( result );
return result;
}
catch (SQLException sqle) {
throw convert( sqle, "could not advance using last()" );
}
throw new NotYetImplementedFor6Exception();
// todo (6.0) : need these scrollable ResultSet "re-positioning"-style methods on the JdbcValues stuff
// try {
// final boolean result = getResultSet().last();
// prepareCurrentRow( result );
// return result;
// }
// catch (SQLException sqle) {
// throw convert( sqle, "could not advance using last()" );
// }
}
@Override
public boolean next() {
try {
final boolean result = getResultSet().next();
final boolean result = getJdbcValues().next( getRowProcessingState() );
prepareCurrentRow( result );
return result;
}
@ -96,114 +114,127 @@ public class ScrollableResultsImpl<R> extends AbstractScrollableResults<R> {
@Override
public boolean previous() {
try {
final boolean result = getResultSet().previous();
prepareCurrentRow( result );
return result;
}
catch (SQLException sqle) {
throw convert( sqle, "could not advance using previous()" );
}
throw new NotYetImplementedFor6Exception();
// todo (6.0) : need these scrollable ResultSet "re-positioning"-style methods on the JdbcValues stuff
// try {
// final boolean result = getResultSet().previous();
// prepareCurrentRow( result );
// return result;
// }
// catch (SQLException sqle) {
// throw convert( sqle, "could not advance using previous()" );
// }
}
@Override
public void afterLast() {
try {
getResultSet().afterLast();
}
catch (SQLException sqle) {
throw convert( sqle, "exception calling afterLast()" );
}
throw new NotYetImplementedFor6Exception();
// todo (6.0) : need these scrollable ResultSet "re-positioning"-style methods on the JdbcValues stuff
// try {
// getResultSet().afterLast();
// }
// catch (SQLException sqle) {
// throw convert( sqle, "exception calling afterLast()" );
// }
}
@Override
public void beforeFirst() {
try {
getResultSet().beforeFirst();
}
catch (SQLException sqle) {
throw convert( sqle, "exception calling beforeFirst()" );
}
throw new NotYetImplementedFor6Exception();
// todo (6.0) : need these scrollable ResultSet "re-positioning"-style methods on the JdbcValues stuff
// try {
// getResultSet().beforeFirst();
// }
// catch (SQLException sqle) {
// throw convert( sqle, "exception calling beforeFirst()" );
// }
}
@Override
public boolean isFirst() {
try {
return getResultSet().isFirst();
}
catch (SQLException sqle) {
throw convert( sqle, "exception calling isFirst()" );
}
throw new NotYetImplementedFor6Exception();
// todo (6.0) : need these scrollable ResultSet "re-positioning"-style methods on the JdbcValues stuff
// try {
// return getResultSet().isFirst();
// }
// catch (SQLException sqle) {
// throw convert( sqle, "exception calling isFirst()" );
// }
}
@Override
public boolean isLast() {
try {
return getResultSet().isLast();
}
catch (SQLException sqle) {
throw convert( sqle, "exception calling isLast()" );
}
throw new NotYetImplementedFor6Exception();
// todo (6.0) : need these scrollable ResultSet "re-positioning"-style methods on the JdbcValues stuff
// try {
// return getResultSet().isLast();
// }
// catch (SQLException sqle) {
// throw convert( sqle, "exception calling isLast()" );
// }
}
@Override
public int getRowNumber() throws HibernateException {
try {
return getResultSet().getRow() - 1;
}
catch (SQLException sqle) {
throw convert( sqle, "exception calling getRow()" );
}
throw new NotYetImplementedFor6Exception();
// todo (6.0) : need these scrollable ResultSet "re-positioning"-style methods on the JdbcValues stuff
// try {
// return getResultSet().getRow() - 1;
// }
// catch (SQLException sqle) {
// throw convert( sqle, "exception calling getRow()" );
// }
}
@Override
public boolean setRowNumber(int rowNumber) throws HibernateException {
if ( rowNumber >= 0 ) {
rowNumber++;
}
throw new NotYetImplementedFor6Exception();
try {
final boolean result = getResultSet().absolute( rowNumber );
prepareCurrentRow( result );
return result;
}
catch (SQLException sqle) {
throw convert( sqle, "could not advance using absolute()" );
}
// todo (6.0) : need these scrollable ResultSet "re-positioning"-style methods on the JdbcValues stuff
// if ( rowNumber >= 0 ) {
// rowNumber++;
// }
//
// try {
// final boolean result = getResultSet().absolute( rowNumber );
// prepareCurrentRow( result );
// return result;
// }
// catch (SQLException sqle) {
// throw convert( sqle, "could not advance using absolute()" );
// }
}
private void prepareCurrentRow(boolean underlyingScrollSuccessful) {
throw new NotYetImplementedFor6Exception( getClass() );
// if ( !underlyingScrollSuccessful ) {
// currentRow = null;
// return;
// }
//
// final PersistenceContext persistenceContext = getSession().getPersistenceContextInternal();
// persistenceContext.beforeLoad();
// try {
// final Object result = getLoader().loadSingleRow(
// getResultSet(),
// getSession(),
// getQueryParameters(),
// true
// );
// if ( result != null && result.getClass().isArray() ) {
// currentRow = (Object[]) result;
// }
// else {
// currentRow = new Object[] {result};
// }
//
// if ( getHolderInstantiator() != null ) {
// currentRow = new Object[] { getHolderInstantiator().instantiate( currentRow ) };
// }
// }
// finally {
// persistenceContext.afterLoad();
// }
//
// afterScrollOperation();
if ( !underlyingScrollSuccessful ) {
currentRow = null;
return;
}
try {
currentRow = getRowReader().readRow(
getRowProcessingState(),
getProcessingOptions()
);
}
catch (SQLException e) {
throw convert( e, "Unable to read row as part of ScrollableResult handling" );
}
afterScrollOperation();
}
}

View File

@ -2633,137 +2633,138 @@ public abstract class Loader {
final Set<Serializable> querySpaces,
final Type[] resultTypes) {
QueryResultsCache queryCache = factory.getCache().getQueryResultsCache( queryParameters.getCacheRegion() );
QueryKey key = generateQueryKey( session, queryParameters );
if ( querySpaces == null || querySpaces.size() == 0 ) {
LOG.tracev( "Unexpected querySpaces is {0}", ( querySpaces == null ? querySpaces : "empty" ) );
}
else {
LOG.tracev( "querySpaces is {0}", querySpaces );
}
List result = getResultFromQueryCache(
session,
queryParameters,
querySpaces,
resultTypes,
queryCache,
key
);
if ( result == null ) {
result = doList( session, queryParameters, key.getResultTransformer() );
putResultInQueryCache(
session,
queryParameters,
resultTypes,
queryCache,
key,
result
);
}
ResultTransformer resolvedTransformer = resolveResultTransformer( queryParameters.getResultTransformer() );
if ( resolvedTransformer != null ) {
result = (
areResultSetRowsTransformedImmediately() ?
key.getResultTransformer().retransformResults(
result,
getResultRowAliases(),
queryParameters.getResultTransformer(),
includeInResultRow()
) :
key.getResultTransformer().untransformToTuples(
result
)
);
}
return getResultList( result, queryParameters.getResultTransformer() );
// QueryResultsCache queryCache = factory.getCache().getQueryResultsCache( queryParameters.getCacheRegion() );
//
// QueryKey key = generateQueryKey( session, queryParameters );
//
// if ( querySpaces == null || querySpaces.size() == 0 ) {
// LOG.tracev( "Unexpected querySpaces is {0}", ( querySpaces == null ? querySpaces : "empty" ) );
// }
// else {
// LOG.tracev( "querySpaces is {0}", querySpaces );
// }
//
// List result = getResultFromQueryCache(
// session,
// queryParameters,
// querySpaces,
// resultTypes,
// queryCache,
// key
// );
//
// if ( result == null ) {
// result = doList( session, queryParameters, key.getResultTransformer() );
//
// putResultInQueryCache(
// session,
// queryParameters,
// resultTypes,
// queryCache,
// key,
// result
// );
// }
//
// ResultTransformer resolvedTransformer = resolveResultTransformer( queryParameters.getResultTransformer() );
// if ( resolvedTransformer != null ) {
// result = (
// areResultSetRowsTransformedImmediately() ?
// key.getResultTransformer().retransformResults(
// result,
// getResultRowAliases(),
// queryParameters.getResultTransformer(),
// includeInResultRow()
// ) :
// key.getResultTransformer().untransformToTuples(
// result
// )
// );
// }
//
// return getResultList( result, queryParameters.getResultTransformer() );
throw new UnsupportedOperationException( );
}
private QueryKey generateQueryKey(
SharedSessionContractImplementor session,
QueryParameters queryParameters) {
return QueryKey.generateQueryKey(
getSQLString(),
queryParameters,
FilterKey.createFilterKeys( session.getLoadQueryInfluencers().getEnabledFilters() ),
session,
createCacheableResultTransformer( queryParameters )
);
}
private CacheableResultTransformer createCacheableResultTransformer(QueryParameters queryParameters) {
return CacheableResultTransformer.create(
queryParameters.getResultTransformer(),
getResultRowAliases(),
includeInResultRow()
);
}
private List getResultFromQueryCache(
final SharedSessionContractImplementor session,
final QueryParameters queryParameters,
final Set<Serializable> querySpaces,
final Type[] resultTypes,
final QueryResultsCache queryCache,
final QueryKey key) {
List result = null;
if ( session.getCacheMode().isGetEnabled() ) {
boolean isImmutableNaturalKeyLookup =
queryParameters.isNaturalKeyLookup() &&
resultTypes.length == 1 &&
resultTypes[0].isEntityType() &&
getEntityPersister( EntityType.class.cast( resultTypes[0] ) )
.getEntityMetamodel()
.hasImmutableNaturalId();
final PersistenceContext persistenceContext = session.getPersistenceContextInternal();
boolean defaultReadOnlyOrig = persistenceContext.isDefaultReadOnly();
if ( queryParameters.isReadOnlyInitialized() ) {
// The read-only/modifiable mode for the query was explicitly set.
// Temporarily set the default read-only/modifiable setting to the query's setting.
persistenceContext.setDefaultReadOnly( queryParameters.isReadOnly() );
}
else {
// The read-only/modifiable setting for the query was not initialized.
// Use the default read-only/modifiable from the persistence context instead.
queryParameters.setReadOnly( persistenceContext.isDefaultReadOnly() );
}
try {
result = queryCache.get(
key,
querySpaces,
key.getResultTransformer().getCachedResultTypes( resultTypes ),
session
);
}
finally {
persistenceContext.setDefaultReadOnly( defaultReadOnlyOrig );
}
final StatisticsImplementor statistics = factory.getStatistics();
if ( statistics.isStatisticsEnabled() ) {
if ( result == null ) {
statistics.queryCacheMiss( getQueryIdentifier(), queryCache.getRegion().getName() );
}
else {
statistics.queryCacheHit( getQueryIdentifier(), queryCache.getRegion().getName() );
}
}
}
return result;
}
private EntityPersister getEntityPersister(EntityType entityType) {
return factory.getMetamodel().entityPersister( entityType.getAssociatedEntityName() );
}
// private QueryKey generateQueryKey(
// SharedSessionContractImplementor session,
// QueryParameters queryParameters) {
// return QueryKey.generateQueryKey(
// getSQLString(),
// queryParameters,
// FilterKey.createFilterKeys( session.getLoadQueryInfluencers().getEnabledFilters() ),
// session,
// createCacheableResultTransformer( queryParameters )
// );
// }
//
// private CacheableResultTransformer createCacheableResultTransformer(QueryParameters queryParameters) {
// return CacheableResultTransformer.create(
// queryParameters.getResultTransformer(),
// getResultRowAliases(),
// includeInResultRow()
// );
// }
//
// private List getResultFromQueryCache(
// final SharedSessionContractImplementor session,
// final QueryParameters queryParameters,
// final Set<Serializable> querySpaces,
// final Type[] resultTypes,
// final QueryResultsCache queryCache,
// final QueryKey key) {
// List result = null;
//
// if ( session.getCacheMode().isGetEnabled() ) {
// boolean isImmutableNaturalKeyLookup =
// queryParameters.isNaturalKeyLookup() &&
// resultTypes.length == 1 &&
// resultTypes[0].isEntityType() &&
// getEntityPersister( EntityType.class.cast( resultTypes[0] ) )
// .getEntityMetamodel()
// .hasImmutableNaturalId();
//
// final PersistenceContext persistenceContext = session.getPersistenceContextInternal();
// boolean defaultReadOnlyOrig = persistenceContext.isDefaultReadOnly();
// if ( queryParameters.isReadOnlyInitialized() ) {
// // The read-only/modifiable mode for the query was explicitly set.
// // Temporarily set the default read-only/modifiable setting to the query's setting.
// persistenceContext.setDefaultReadOnly( queryParameters.isReadOnly() );
// }
// else {
// // The read-only/modifiable setting for the query was not initialized.
// // Use the default read-only/modifiable from the persistence context instead.
// queryParameters.setReadOnly( persistenceContext.isDefaultReadOnly() );
// }
// try {
// result = queryCache.get(
// key,
// querySpaces,
// key.getResultTransformer().getCachedResultTypes( resultTypes ),
// session
// );
// }
// finally {
// persistenceContext.setDefaultReadOnly( defaultReadOnlyOrig );
// }
//
// final StatisticsImplementor statistics = factory.getStatistics();
// if ( statistics.isStatisticsEnabled() ) {
// if ( result == null ) {
// statistics.queryCacheMiss( getQueryIdentifier(), queryCache.getRegion().getName() );
// }
// else {
// statistics.queryCacheHit( getQueryIdentifier(), queryCache.getRegion().getName() );
// }
// }
// }
//
// return result;
// }
//
// private EntityPersister getEntityPersister(EntityType entityType) {
// return factory.getMetamodel().entityPersister( entityType.getAssociatedEntityName() );
// }
protected void putResultInQueryCache(
final SharedSessionContractImplementor session,
@ -2772,18 +2773,20 @@ public abstract class Loader {
final QueryResultsCache queryCache,
final QueryKey key,
final List result) {
if ( session.getCacheMode().isPutEnabled() ) {
boolean put = queryCache.put(
key,
result,
key.getResultTransformer().getCachedResultTypes( resultTypes ),
session
);
final StatisticsImplementor statistics = factory.getStatistics();
if ( put && statistics.isStatisticsEnabled() ) {
statistics.queryCachePut( getQueryIdentifier(), queryCache.getRegion().getName() );
}
}
// if ( session.getCacheMode().isPutEnabled() ) {
// boolean put = queryCache.put(
// key,
// result,
// key.getResultTransformer().getCachedResultTypes( resultTypes ),
// session
// );
// final StatisticsImplementor statistics = factory.getStatistics();
// if ( put && statistics.isStatisticsEnabled() ) {
// statistics.queryCachePut( getQueryIdentifier(), queryCache.getRegion().getName() );
// }
// }
throw new UnsupportedOperationException( );
}
/**
@ -2874,70 +2877,71 @@ public abstract class Loader {
final Type[] returnTypes,
final RowReader rowReader,
final SharedSessionContractImplementor session) throws HibernateException {
checkScrollability();
final StatisticsImplementor statistics = getFactory().getStatistics();
final boolean stats = getQueryIdentifier() != null &&
statistics.isStatisticsEnabled();
long startTime = 0;
if ( stats ) {
startTime = System.nanoTime();
}
try {
// Don't use Collections#emptyList() here -- follow on locking potentially adds AfterLoadActions,
// so the list cannot be immutable.
final SqlStatementWrapper wrapper = executeQueryStatement(
queryParameters,
true,
new ArrayList<AfterLoadAction>(),
session
);
final ResultSet rs = wrapper.getResultSet();
final PreparedStatement st = (PreparedStatement) wrapper.getStatement();
if ( stats ) {
final long endTime = System.nanoTime();
final long milliseconds = TimeUnit.MILLISECONDS.convert( endTime - startTime, TimeUnit.NANOSECONDS );
statistics.queryExecuted(
getQueryIdentifier(),
0,
milliseconds
);
}
if ( needsFetchingScroll() ) {
//noinspection unchecked
return new FetchingScrollableResultsImpl(
rs,
st,
session,
this,
queryParameters,
rowReader
);
}
else {
//noinspection unchecked
return new ScrollableResultsImpl(
rs,
st,
session,
this,
queryParameters,
rowReader
);
}
}
catch (SQLException sqle) {
throw factory.getJdbcServices().getSqlExceptionHelper().convert(
sqle,
"could not execute query using scroll",
getSQLString()
);
}
throw new UnsupportedOperationException( );
// checkScrollability();
//
// final StatisticsImplementor statistics = getFactory().getStatistics();
// final boolean stats = getQueryIdentifier() != null &&
// statistics.isStatisticsEnabled();
// long startTime = 0;
// if ( stats ) {
// startTime = System.nanoTime();
// }
//
// try {
// // Don't use Collections#emptyList() here -- follow on locking potentially adds AfterLoadActions,
// // so the list cannot be immutable.
// final SqlStatementWrapper wrapper = executeQueryStatement(
// queryParameters,
// true,
// new ArrayList<AfterLoadAction>(),
// session
// );
// final ResultSet rs = wrapper.getResultSet();
// final PreparedStatement st = (PreparedStatement) wrapper.getStatement();
//
// if ( stats ) {
// final long endTime = System.nanoTime();
// final long milliseconds = TimeUnit.MILLISECONDS.convert( endTime - startTime, TimeUnit.NANOSECONDS );
// statistics.queryExecuted(
// getQueryIdentifier(),
// 0,
// milliseconds
// );
// }
//
// if ( needsFetchingScroll() ) {
// //noinspection unchecked
// return new FetchingScrollableResultsImpl(
// rs,
// st,
// session,
// this,
// queryParameters,
// rowReader
// );
// }
// else {
// //noinspection unchecked
// return new ScrollableResultsImpl(
// rs,
// st,
// session,
// this,
// queryParameters,
// rowReader
// );
// }
//
// }
// catch (SQLException sqle) {
// throw factory.getJdbcServices().getSqlExceptionHelper().convert(
// sqle,
// "could not execute query using scroll",
// getSQLString()
// );
// }
}
/**

View File

@ -16,7 +16,6 @@ import org.hibernate.NotYetImplementedFor6Exception;
import org.hibernate.ScrollMode;
import org.hibernate.internal.util.streams.StingArrayCollector;
import org.hibernate.query.IllegalQueryOperationException;
import org.hibernate.query.internal.QueryHelper;
import org.hibernate.query.spi.QueryOptions;
import org.hibernate.query.spi.QueryParameterImplementor;
import org.hibernate.query.spi.ScrollableResultsImplementor;
@ -171,36 +170,19 @@ public class ConcreteSqmSelectQueryPlan<R> implements SelectQueryPlan<R> {
executionContext.getSession()
);
// try {
// // todo (6.0) : make these executors resolvable to allow plugging in custom ones.
// // Dialect?
// return JdbcSelectExecutorStandardImpl.INSTANCE.list(
// jdbcSelect,
// jdbcParameterBindings,
// executionContext,
// rowTransformer
// );
// }
// finally {
// domainParameterXref.clearExpansions();
// }
throw new NotYetImplementedFor6Exception( getClass() );
try {
return executionContext.getSession().getFactory().getJdbcServices().getJdbcSelectExecutor().list(
jdbcSelect,
jdbcParameterBindings,
executionContext,
rowTransformer
);
}
finally {
domainParameterXref.clearExpansions();
}
}
// private SqmSelectToSqlAstConverter getSqmSelectToSqlAstConverter(ExecutionContext executionContext) {
// // todo (6.0) : for cases where we have no "load query influencers" we could use a cached SQL AST
// return new SqmSelectToSqlAstConverter(
// executionContext.getQueryOptions(),
// domainParameterXref,
// executionContext.getDomainParameterBindingContext().getQueryParameterBindings(),
// executionContext.getLoadQueryInfluencers(),
// afterLoadAction -> {},
// executionContext.getSession().getFactory()
// );
// }
@Override
@SuppressWarnings("unchecked")
public ScrollableResultsImplementor performScroll(ScrollMode scrollMode, ExecutionContext executionContext) {

View File

@ -0,0 +1,303 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.sql.exec.internal;
import java.io.Serializable;
import java.sql.PreparedStatement;
import java.util.ArrayList;
import java.util.List;
import java.util.Spliterator;
import java.util.Spliterators;
import java.util.function.Function;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import org.hibernate.CacheMode;
import org.hibernate.ScrollMode;
import org.hibernate.cache.spi.QueryKey;
import org.hibernate.cache.spi.QueryResultsCache;
import org.hibernate.loader.spi.AfterLoadAction;
import org.hibernate.query.internal.ScrollableResultsIterator;
import org.hibernate.query.spi.ScrollableResultsImplementor;
import org.hibernate.sql.exec.spi.ExecutionContext;
import org.hibernate.sql.exec.spi.JdbcParameterBindings;
import org.hibernate.sql.exec.spi.JdbcSelect;
import org.hibernate.sql.exec.spi.JdbcSelectExecutor;
import org.hibernate.sql.results.internal.DeferredResultSetAccess;
import org.hibernate.sql.results.internal.Helper;
import org.hibernate.sql.results.internal.JdbcValuesCacheHit;
import org.hibernate.sql.results.internal.JdbcValuesResultSetImpl;
import org.hibernate.sql.results.internal.JdbcValuesSourceProcessingStateStandardImpl;
import org.hibernate.sql.results.internal.ResultSetAccess;
import org.hibernate.sql.results.internal.RowProcessingStateStandardImpl;
import org.hibernate.sql.results.spi.JdbcValues;
import org.hibernate.sql.results.spi.JdbcValuesMapping;
import org.hibernate.sql.results.spi.JdbcValuesSourceProcessingOptions;
import org.hibernate.sql.results.spi.ListResultsConsumer;
import org.hibernate.sql.results.spi.ResultsConsumer;
import org.hibernate.sql.results.spi.RowReader;
import org.hibernate.sql.results.spi.RowTransformer;
import org.hibernate.sql.results.spi.ScrollableResultsConsumer;
import org.jboss.logging.Logger;
/**
* @author Steve Ebersole
*/
public class JdbcSelectExecutorStandardImpl implements JdbcSelectExecutor {
// todo (6.0) : Make resolving these executors swappable - JdbcServices?
// Since JdbcServices is just a "composition service", this is actually
// a very good option...
// todo (6.0) : where do affected-table-names get checked for up-to-date?
// who is responsible for that? Here?
/**
* Singleton access
*/
public static final JdbcSelectExecutorStandardImpl INSTANCE = new JdbcSelectExecutorStandardImpl();
private static final Logger log = Logger.getLogger( JdbcSelectExecutorStandardImpl.class );
@Override
public <R> List<R> list(
JdbcSelect jdbcSelect,
JdbcParameterBindings jdbcParameterBindings,
ExecutionContext executionContext,
RowTransformer<R> rowTransformer) {
return executeQuery(
jdbcSelect,
jdbcParameterBindings,
executionContext,
rowTransformer,
(sql) -> executionContext.getSession()
.getJdbcCoordinator()
.getStatementPreparer()
.prepareStatement( sql ),
ListResultsConsumer.instance()
);
}
@Override
public <R> ScrollableResultsImplementor<R> scroll(
JdbcSelect jdbcSelect,
ScrollMode scrollMode,
JdbcParameterBindings jdbcParameterBindings,
ExecutionContext executionContext,
RowTransformer<R> rowTransformer) {
return executeQuery(
jdbcSelect,
jdbcParameterBindings,
executionContext,
rowTransformer,
(sql) -> executionContext.getSession().getJdbcCoordinator().getStatementPreparer().prepareQueryStatement(
sql,
true,
scrollMode
),
ScrollableResultsConsumer.instance()
);
}
@Override
public <R> Stream<R> stream(
JdbcSelect jdbcSelect,
JdbcParameterBindings jdbcParameterBindings,
ExecutionContext executionContext,
RowTransformer<R> rowTransformer) {
final ScrollableResultsImplementor<R> scrollableResults = scroll(
jdbcSelect,
ScrollMode.FORWARD_ONLY,
jdbcParameterBindings,
executionContext,
rowTransformer
);
final ScrollableResultsIterator<R> iterator = new ScrollableResultsIterator<>( scrollableResults );
final Spliterator<R> spliterator = Spliterators.spliteratorUnknownSize( iterator, Spliterator.NONNULL );
final Stream<R> stream = StreamSupport.stream( spliterator, false );
return stream.onClose( scrollableResults::close );
}
private enum ExecuteAction {
EXECUTE_QUERY,
}
private <T, R> T executeQuery(
JdbcSelect jdbcSelect,
JdbcParameterBindings jdbcParameterBindings,
ExecutionContext executionContext,
RowTransformer<R> rowTransformer,
Function<String, PreparedStatement> statementCreator,
ResultsConsumer<T,R> resultsConsumer) {
final JdbcValues jdbcValues = resolveJdbcValuesSource(
jdbcSelect,
executionContext,
new DeferredResultSetAccess(
jdbcSelect,
jdbcParameterBindings,
executionContext,
statementCreator
)
);
/*
* Processing options effectively are only used for entity loading. Here we don't need these values.
*/
final JdbcValuesSourceProcessingOptions processingOptions = new JdbcValuesSourceProcessingOptions() {
@Override
public Object getEffectiveOptionalObject() {
return null;
}
@Override
public String getEffectiveOptionalEntityName() {
return null;
}
@Override
public Serializable getEffectiveOptionalId() {
return null;
}
@Override
public boolean shouldReturnProxies() {
return true;
}
};
final JdbcValuesSourceProcessingStateStandardImpl jdbcValuesSourceProcessingState =
new JdbcValuesSourceProcessingStateStandardImpl( executionContext, processingOptions );
final List<AfterLoadAction> afterLoadActions = new ArrayList<>();
final RowReader<R> rowReader = Helper.createRowReader(
executionContext.getSession().getFactory(),
afterLoadActions::add,
rowTransformer,
jdbcValues
);
final RowProcessingStateStandardImpl rowProcessingState = new RowProcessingStateStandardImpl(
jdbcValuesSourceProcessingState,
executionContext.getQueryOptions(),
rowReader,
jdbcValues
);
final T result = resultsConsumer.consume(
jdbcValues,
executionContext.getSession(),
processingOptions,
jdbcValuesSourceProcessingState,
rowProcessingState,
rowReader
);
for ( AfterLoadAction afterLoadAction : afterLoadActions ) {
// todo (6.0) : see notes on
afterLoadAction.afterLoad( executionContext.getSession(), null, null );
}
return result;
}
@SuppressWarnings("unchecked")
private JdbcValues resolveJdbcValuesSource(
JdbcSelect jdbcSelect,
ExecutionContext executionContext,
ResultSetAccess resultSetAccess) {
final List<Object[]> cachedResults;
final boolean queryCacheEnabled = executionContext.getSession().getFactory().getSessionFactoryOptions().isQueryCacheEnabled();
final CacheMode cacheMode = resolveCacheMode( executionContext );
final JdbcValuesMapping jdbcValuesMapping = jdbcSelect.getJdbcValuesMappingProducer()
.resolve( resultSetAccess, executionContext.getSession().getFactory() );
final QueryKey queryResultsCacheKey;
if ( queryCacheEnabled && cacheMode.isGetEnabled() ) {
log.debugf( "Reading Query result cache data per CacheMode#isGetEnabled [%s]", cacheMode.name() );
final QueryResultsCache queryCache = executionContext.getSession().getFactory()
.getCache()
.getQueryResultsCache( executionContext.getQueryOptions().getResultCacheRegionName() );
// todo (6.0) : not sure that it is at all important that we account for QueryResults
// these cached values are "lower level" than that, representing the
// "raw" JDBC values.
//
// todo (6.0) : relatedly ^^, pretty sure that SqlSelections are also irrelevant
queryResultsCacheKey = QueryKey.from(
jdbcSelect.getSql(),
executionContext.getQueryOptions().getLimit(),
executionContext.getDomainParameterBindingContext().getQueryParameterBindings(),
executionContext.getSession()
);
cachedResults = queryCache.get(
// todo (6.0) : QueryCache#get takes the `queryResultsCacheKey` see tat discussion above
queryResultsCacheKey,
// todo (6.0) : `querySpaces` and `session` make perfect sense as args, but its odd passing those into this method just to pass along
// atm we do not even collect querySpaces, but we need to
jdbcSelect.getAffectedTableNames(),
executionContext.getSession()
);
// todo (6.0) : `querySpaces` and `session` are used in QueryCache#get to verify "up-to-dateness" via UpdateTimestampsCache
// better imo to move UpdateTimestampsCache handling here and have QueryCache be a simple access to
// the underlying query result cache region.
//
// todo (6.0) : if we go this route (^^), still beneficial to have an abstraction over different UpdateTimestampsCache-based
// invalidation strategies - QueryCacheInvalidationStrategy
}
else {
log.debugf( "Skipping reading Query result cache data: cache-enabled = %s, cache-mode = %s",
queryCacheEnabled,
cacheMode.name()
);
cachedResults = null;
queryResultsCacheKey = null;
}
if ( cachedResults == null || cachedResults.isEmpty() ) {
return new JdbcValuesResultSetImpl(
resultSetAccess,
queryResultsCacheKey,
executionContext.getQueryOptions(),
jdbcValuesMapping,
executionContext
);
}
else {
return new JdbcValuesCacheHit(
cachedResults,
jdbcValuesMapping
);
}
}
private CacheMode resolveCacheMode(ExecutionContext executionContext) {
CacheMode cacheMode = executionContext.getQueryOptions().getCacheMode();
if ( cacheMode != null ) {
return cacheMode;
}
cacheMode = executionContext.getSession().getCacheMode();
if ( cacheMode != null ) {
return cacheMode;
}
return CacheMode.NORMAL;
}
}

View File

@ -0,0 +1,45 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.sql.results.internal;
import java.sql.SQLException;
import org.hibernate.sql.results.internal.caching.QueryCachePutManager;
import org.hibernate.sql.results.spi.JdbcValues;
import org.hibernate.sql.results.spi.RowProcessingState;
/**
* @author Steve Ebersole
*/
public abstract class AbstractJdbcValues implements JdbcValues {
private final QueryCachePutManager queryCachePutManager;
public AbstractJdbcValues(QueryCachePutManager queryCachePutManager) {
if ( queryCachePutManager == null ) {
throw new IllegalArgumentException( "QueryCachePutManager cannot be null" );
}
this.queryCachePutManager = queryCachePutManager;
}
@Override
public final boolean next(RowProcessingState rowProcessingState) throws SQLException {
if ( getCurrentRowValuesArray() != null ) {
queryCachePutManager.registerJdbcRow( getCurrentRowValuesArray() );
}
return processNext( rowProcessingState );
}
protected abstract boolean processNext(RowProcessingState rowProcessingState);
@Override
public final void finishUp() {
queryCachePutManager.finishUp();
release();
}
protected abstract void release();
}

View File

@ -0,0 +1,89 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.sql.results.internal;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.type.descriptor.sql.SqlTypeDescriptor;
/**
* @author Steve Ebersole
*/
public abstract class AbstractResultSetAccess implements ResultSetAccess {
private final SharedSessionContractImplementor persistenceContext;
private ResultSetMetaData resultSetMetaData;
public AbstractResultSetAccess(SharedSessionContractImplementor persistenceContext) {
this.persistenceContext = persistenceContext;
}
protected SharedSessionContractImplementor getPersistenceContext() {
return persistenceContext;
}
protected ResultSetMetaData getMetaData() {
// todo (6.0) : we need to consider a way to abstract this from JDBC so we can re-use all of this code for cached results as well
if ( resultSetMetaData == null ) {
try {
resultSetMetaData = getResultSet().getMetaData();
}
catch (SQLException e) {
throw persistenceContext.getJdbcServices().getSqlExceptionHelper().convert(
e,
"Unable to access ResultSetMetaData"
);
}
}
return resultSetMetaData;
}
@Override
public int getColumnCount() {
try {
return getMetaData().getColumnCount();
}
catch (SQLException e) {
throw getFactory().getJdbcServices().getJdbcEnvironment().getSqlExceptionHelper().convert(
e,
"Unable to access ResultSet column count"
);
}
}
@Override
public int resolveColumnPosition(String columnName) {
try {
return getResultSet().findColumn( columnName );
}
catch (SQLException e) {
throw getFactory().getJdbcServices().getJdbcEnvironment().getSqlExceptionHelper().convert(
e,
"Unable to find column position by name"
);
}
}
@Override
public String resolveColumnName(int position) {
try {
return getFactory().getJdbcServices().
getJdbcEnvironment()
.getDialect()
.getColumnAliasExtractor()
.extractColumnAlias( getMetaData(), position );
}
catch (SQLException e) {
throw getFactory().getJdbcServices().getJdbcEnvironment().getSqlExceptionHelper().convert(
e,
"Unable to find column name by position"
);
}
}
}

View File

@ -0,0 +1,131 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.sql.results.internal;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.function.Function;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.internal.CoreLogging;
import org.hibernate.resource.jdbc.spi.LogicalConnectionImplementor;
import org.hibernate.sql.exec.spi.ExecutionContext;
import org.hibernate.sql.exec.spi.JdbcParameterBinder;
import org.hibernate.sql.exec.spi.JdbcParameterBindings;
import org.hibernate.sql.exec.spi.JdbcSelect;
import org.jboss.logging.Logger;
/**
* @author Steve Ebersole
*/
public class DeferredResultSetAccess extends AbstractResultSetAccess {
private static final Logger log = CoreLogging.logger( DeferredResultSetAccess.class );
private final JdbcSelect jdbcSelect;
private final JdbcParameterBindings jdbcParameterBindings;
private final ExecutionContext executionContext;
private final Function<String, PreparedStatement> statementCreator;
private PreparedStatement preparedStatement;
private ResultSet resultSet;
public DeferredResultSetAccess(
JdbcSelect jdbcSelect,
JdbcParameterBindings jdbcParameterBindings,
ExecutionContext executionContext,
Function<String, PreparedStatement> statementCreator) {
super( executionContext.getSession() );
this.jdbcParameterBindings = jdbcParameterBindings;
this.executionContext = executionContext;
this.jdbcSelect = jdbcSelect;
this.statementCreator = statementCreator;
}
@Override
public ResultSet getResultSet() {
if ( resultSet == null ) {
executeQuery();
}
return resultSet;
}
@Override
public SessionFactoryImplementor getFactory() {
return executionContext.getSession().getFactory();
}
private void executeQuery() {
final LogicalConnectionImplementor logicalConnection = getPersistenceContext().getJdbcCoordinator().getLogicalConnection();
final JdbcServices jdbcServices = getPersistenceContext().getFactory().getServiceRegistry().getService( JdbcServices.class );
final String sql = jdbcSelect.getSql();
try {
log.tracef( "Executing query to retrieve ResultSet : %s", sql );
// prepare the query
preparedStatement = statementCreator.apply( sql );
// set options
if ( executionContext.getQueryOptions().getFetchSize() != null ) {
preparedStatement.setFetchSize( executionContext.getQueryOptions().getFetchSize() );
}
if ( executionContext.getQueryOptions().getTimeout() != null ) {
preparedStatement.setQueryTimeout( executionContext.getQueryOptions().getTimeout() );
}
// todo : limit/offset
// bind parameters
// todo : validate that all query parameters were bound?
int paramBindingPosition = 1;
for ( JdbcParameterBinder parameterBinder : jdbcSelect.getParameterBinders() ) {
parameterBinder.bindParameterValue(
preparedStatement,
paramBindingPosition++,
jdbcParameterBindings,
executionContext
);
}
resultSet = preparedStatement.executeQuery();
logicalConnection.getResourceRegistry().register( resultSet, preparedStatement );
}
catch (SQLException e) {
throw jdbcServices.getSqlExceptionHelper().convert(
e,
"JDBC exception executing SQL [" + sql + "]"
);
}
finally {
logicalConnection.afterStatement();
}
}
@Override
public void release() {
if ( resultSet != null ) {
getPersistenceContext().getJdbcCoordinator()
.getLogicalConnection()
.getResourceRegistry()
.release( resultSet, preparedStatement );
resultSet = null;
}
if ( preparedStatement != null ) {
getPersistenceContext().getJdbcCoordinator()
.getLogicalConnection()
.getResourceRegistry()
.release( preparedStatement );
preparedStatement = null;
}
}
}

View File

@ -0,0 +1,50 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.sql.results.internal;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
/**
* @author Steve Ebersole
*/
public class DirectResultSetAccess extends AbstractResultSetAccess {
private final PreparedStatement resultSetSource;
private final ResultSet resultSet;
public DirectResultSetAccess(
SharedSessionContractImplementor persistenceContext,
PreparedStatement resultSetSource,
ResultSet resultSet) {
super( persistenceContext );
this.resultSetSource = resultSetSource;
this.resultSet = resultSet;
persistenceContext.getJdbcCoordinator().getLogicalConnection().getResourceRegistry().register( resultSet, resultSetSource );
}
@Override
public ResultSet getResultSet() {
return resultSet;
}
@Override
public SessionFactoryImplementor getFactory() {
return getPersistenceContext().getFactory();
}
@Override
public void release() {
getPersistenceContext().getJdbcCoordinator()
.getLogicalConnection()
.getResourceRegistry()
.release( resultSet, resultSetSource );
}
}

View File

@ -0,0 +1,61 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.sql.results.internal;
import java.util.ArrayList;
import java.util.List;
import java.util.function.Consumer;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.sql.exec.spi.Callback;
import org.hibernate.sql.results.SqlResultsLogger;
import org.hibernate.sql.results.spi.DomainResultAssembler;
import org.hibernate.sql.results.spi.Initializer;
import org.hibernate.sql.results.spi.JdbcValues;
import org.hibernate.sql.results.spi.RowReader;
import org.hibernate.sql.results.spi.RowTransformer;
/**
* @author Steve Ebersole
*/
public class Helper {
public static <R> RowReader<R> createRowReader(
SessionFactoryImplementor sessionFactory,
Callback callback,
RowTransformer<R> rowTransformer,
JdbcValues jdbcValues) {
final List<Initializer> initializers = new ArrayList<>();
final List<DomainResultAssembler> assemblers = jdbcValues.getValuesMapping().resolveAssemblers(
getInitializerConsumer( initializers ),
() -> sessionFactory
);
return new StandardRowReader<>(
assemblers,
initializers,
rowTransformer,
callback
);
}
private static Consumer<Initializer> getInitializerConsumer(List<Initializer> initializers) {
if ( SqlResultsLogger.INSTANCE.isDebugEnabled() ) {
return initializer -> {
SqlResultsLogger.INSTANCE.debug( "Adding initializer : " + initializer );
initializers.add( initializer );
};
}
else {
return initializer -> {
// noinspection Convert2MethodRef
initializers.add( initializer );
};
}
}
}

View File

@ -0,0 +1,72 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.sql.results.internal;
import java.util.List;
import org.hibernate.sql.results.internal.caching.QueryCachePutManagerDisabledImpl;
import org.hibernate.sql.results.spi.JdbcValuesMapping;
import org.hibernate.sql.results.spi.RowProcessingState;
/**
* A JdbcValuesSource implementation for cases where we had a cache hit.
*
* @author Steve Ebersole
*/
public class JdbcValuesCacheHit extends AbstractJdbcValues {
private Object[][] cachedData;
private final int numberOfRows;
private JdbcValuesMapping resolvedMapping;
private int position = -1;
public JdbcValuesCacheHit(Object[][] cachedData, JdbcValuesMapping resolvedMapping) {
// if we have a cache hit we should not be writting back to the cache.
// its silly because the state would always be the same.
super( QueryCachePutManagerDisabledImpl.INSTANCE );
this.cachedData = cachedData;
this.numberOfRows = cachedData.length;
this.resolvedMapping = resolvedMapping;
}
public JdbcValuesCacheHit(List<Object[]> cachedResults, JdbcValuesMapping resolvedMapping) {
this( (Object[][]) cachedResults.toArray(), resolvedMapping );
}
@Override
protected boolean processNext(RowProcessingState rowProcessingState) {
// NOTE : explicitly skipping limit handling under the truth that
// because the cached state ought to be the same size since
// the cache key includes limits
if ( isExhausted() ) {
return false;
}
position++;
return true;
}
private boolean isExhausted() {
return position >= numberOfRows;
}
@Override
public JdbcValuesMapping getValuesMapping() {
return resolvedMapping;
}
@Override
public Object[] getCurrentRowValuesArray() {
if ( isExhausted() ) {
return null;
}
return cachedData[position];
}
@Override
protected void release() {
cachedData = null;
}
}

View File

@ -0,0 +1,163 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.sql.results.internal;
import java.sql.SQLException;
import org.hibernate.CacheMode;
import org.hibernate.cache.spi.QueryKey;
import org.hibernate.cache.spi.QueryResultsCache;
import org.hibernate.query.Limit;
import org.hibernate.query.spi.QueryOptions;
import org.hibernate.sql.ast.spi.SqlSelection;
import org.hibernate.sql.exec.ExecutionException;
import org.hibernate.sql.exec.spi.ExecutionContext;
import org.hibernate.sql.results.internal.caching.QueryCachePutManager;
import org.hibernate.sql.results.internal.caching.QueryCachePutManagerDisabledImpl;
import org.hibernate.sql.results.internal.caching.QueryCachePutManagerEnabledImpl;
import org.hibernate.sql.results.spi.JdbcValuesMapping;
import org.hibernate.sql.results.spi.RowProcessingState;
/**
* JdbcValuesSource implementation for a JDBC ResultSet as the source
*
* @author Steve Ebersole
*/
public class JdbcValuesResultSetImpl extends AbstractJdbcValues {
private final ResultSetAccess resultSetAccess;
private final JdbcValuesMapping valuesMapping;
private final ExecutionContext executionContext;
// todo (6.0) - manage limit-based skips
private final int numberOfRowsToProcess;
// we start position at -1 prior to any next call so that the first next call
// increments position to 0, which is the first row
private int position = -1;
private Object[] currentRowJdbcValues;
public JdbcValuesResultSetImpl(
ResultSetAccess resultSetAccess,
QueryKey queryCacheKey,
QueryOptions queryOptions,
JdbcValuesMapping valuesMapping,
ExecutionContext executionContext) {
super( resolveQueryCachePutManager( executionContext, queryOptions, queryCacheKey ) );
this.resultSetAccess = resultSetAccess;
this.valuesMapping = valuesMapping;
this.executionContext = executionContext;
// todo (6.0) : decide how to handle paged/limited results
this.numberOfRowsToProcess = interpretNumberOfRowsToProcess( queryOptions );
}
private static int interpretNumberOfRowsToProcess(QueryOptions queryOptions) {
if ( queryOptions.getLimit() == null ) {
return -1;
}
final Limit limit = queryOptions.getLimit();
if ( limit.getMaxRows() == null ) {
return -1;
}
return limit.getMaxRows();
}
private static QueryCachePutManager resolveQueryCachePutManager(
ExecutionContext executionContext,
QueryOptions queryOptions,
QueryKey queryCacheKey) {
final boolean queryCacheEnabled = executionContext.getSession()
.getFactory()
.getSessionFactoryOptions()
.isQueryCacheEnabled();
final CacheMode cacheMode = queryOptions.getCacheMode();
if ( queryCacheEnabled && cacheMode.isPutEnabled() ) {
final QueryResultsCache queryCache = executionContext.getSession().getFactory()
.getCache()
.getQueryResultsCache( queryOptions.getResultCacheRegionName() );
return new QueryCachePutManagerEnabledImpl( queryCache, queryCacheKey );
}
else {
return QueryCachePutManagerDisabledImpl.INSTANCE;
}
}
@Override
protected final boolean processNext(RowProcessingState rowProcessingState) {
currentRowJdbcValues = null;
if ( numberOfRowsToProcess != -1 && position > numberOfRowsToProcess ) {
// numberOfRowsToProcess != -1 means we had some limit, and
// position > numberOfRowsToProcess means we have exceeded the
// number of limited rows
return false;
}
position++;
try {
if ( !resultSetAccess.getResultSet().next() ) {
return false;
}
}
catch (SQLException e) {
throw makeExecutionException( "Error advancing JDBC ResultSet", e );
}
try {
currentRowJdbcValues = readCurrentRowValues( rowProcessingState );
return true;
}
catch (SQLException e) {
throw makeExecutionException( "Error reading JDBC row values", e );
}
}
private ExecutionException makeExecutionException(String message, SQLException cause) {
return new ExecutionException(
message,
executionContext.getSession().getJdbcServices().getSqlExceptionHelper().convert(
cause,
message
)
);
}
private Object[] readCurrentRowValues(RowProcessingState rowProcessingState) throws SQLException {
final int numberOfSqlSelections = valuesMapping.getSqlSelections().size();
final Object[] row = new Object[numberOfSqlSelections];
for ( SqlSelection sqlSelection : valuesMapping.getSqlSelections() ) {
row[ sqlSelection.getValuesArrayPosition() ] = sqlSelection.getJdbcValueExtractor().extract(
resultSetAccess.getResultSet(),
sqlSelection.getJdbcResultSetIndex(),
executionContext.getSession()
);
}
return row;
}
@Override
protected void release() {
resultSetAccess.release();
}
@Override
public JdbcValuesMapping getValuesMapping() {
return valuesMapping;
}
@Override
public Object[] getCurrentRowValuesArray() {
return currentRowJdbcValues;
}
}

View File

@ -17,7 +17,6 @@ import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.event.spi.EventSource;
import org.hibernate.event.spi.PostLoadEvent;
import org.hibernate.event.spi.PreLoadEvent;
import org.hibernate.graph.spi.AppliedGraph;
import org.hibernate.query.spi.QueryOptions;
import org.hibernate.sql.exec.spi.ExecutionContext;
import org.hibernate.sql.results.internal.domain.ArrayInitializer;

View File

@ -0,0 +1,79 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.sql.results.internal;
import java.sql.ResultSet;
import java.sql.SQLException;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.sql.results.spi.JdbcValuesMetadata;
import org.hibernate.type.descriptor.sql.SqlTypeDescriptor;
/**
* Access to a JDBC ResultSet and information about it.
*
* @author Steve Ebersole
*/
public interface ResultSetAccess extends JdbcValuesMetadata {
ResultSet getResultSet();
SessionFactoryImplementor getFactory();
void release();
default int getColumnCount() {
try {
return getResultSet().getMetaData().getColumnCount();
}
catch (SQLException e) {
throw getFactory().getJdbcServices().getJdbcEnvironment().getSqlExceptionHelper().convert(
e,
"Unable to access ResultSet column count"
);
}
}
default int resolveColumnPosition(String columnName) {
try {
return getResultSet().findColumn( columnName );
}
catch (SQLException e) {
throw getFactory().getJdbcServices().getJdbcEnvironment().getSqlExceptionHelper().convert(
e,
"Unable to find column position by name"
);
}
}
default String resolveColumnName(int position) {
try {
return getFactory().getJdbcServices().getJdbcEnvironment()
.getDialect()
.getColumnAliasExtractor()
.extractColumnAlias( getResultSet().getMetaData(), position );
}
catch (SQLException e) {
throw getFactory().getJdbcServices().getJdbcEnvironment().getSqlExceptionHelper().convert(
e,
"Unable to find column name by position"
);
}
}
default SqlTypeDescriptor resolveSqlTypeDescriptor(int position) {
try {
return getFactory().getTypeConfiguration()
.getSqlTypeDescriptorRegistry()
.getDescriptor( getResultSet().getMetaData().getColumnType( position ) );
}
catch (SQLException e) {
throw getFactory().getJdbcServices().getSqlExceptionHelper().convert(
e,
"Unable to determine JDBC type code for ResultSet position " + position
);
}
}
}

View File

@ -0,0 +1,120 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.sql.results.internal;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.hibernate.NotYetImplementedFor6Exception;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.loader.plan.spi.EntityFetch;
import org.hibernate.query.NavigablePath;
import org.hibernate.query.spi.QueryOptions;
import org.hibernate.sql.ast.spi.SqlSelection;
import org.hibernate.sql.exec.spi.Callback;
import org.hibernate.sql.exec.spi.DomainParameterBindingContext;
import org.hibernate.sql.results.spi.Initializer;
import org.hibernate.sql.results.spi.JdbcValues;
import org.hibernate.sql.results.spi.JdbcValuesSourceProcessingState;
import org.hibernate.sql.results.spi.RowProcessingState;
import org.hibernate.sql.results.spi.RowReader;
import org.jboss.logging.Logger;
/**
* @author Steve Ebersole
*/
public class RowProcessingStateStandardImpl implements RowProcessingState {
private static final Logger log = Logger.getLogger( RowProcessingStateStandardImpl.class );
private final JdbcValuesSourceProcessingStateStandardImpl resultSetProcessingState;
private final QueryOptions queryOptions;
private final Map<NavigablePath, Initializer> initializerMap;
private final JdbcValues jdbcValues;
private Object[] currentRowJdbcValues;
public RowProcessingStateStandardImpl(
JdbcValuesSourceProcessingStateStandardImpl resultSetProcessingState,
QueryOptions queryOptions,
RowReader<?> rowReader,
JdbcValues jdbcValues) {
this.resultSetProcessingState = resultSetProcessingState;
this.queryOptions = queryOptions;
this.jdbcValues = jdbcValues;
final List<Initializer> initializers = rowReader.getInitializers();
if ( initializers == null || initializers.isEmpty() ) {
initializerMap = null;
}
else {
initializerMap = new HashMap<>();
for ( Initializer initializer : initializers ) {
initializerMap.put( initializer.getNavigablePath(), initializer );
}
}
}
@Override
public JdbcValuesSourceProcessingState getJdbcValuesSourceProcessingState() {
return resultSetProcessingState;
}
public boolean next() throws SQLException {
if ( jdbcValues.next( this ) ) {
currentRowJdbcValues = jdbcValues.getCurrentRowValuesArray();
return true;
}
else {
currentRowJdbcValues = null;
return false;
}
}
@Override
public Object getJdbcValue(int position) {
return currentRowJdbcValues[ position ];
}
@Override
public void registerNonExists(EntityFetch fetch) {
}
@Override
public void finishRowProcessing() {
currentRowJdbcValues = null;
}
@Override
public SharedSessionContractImplementor getSession() {
return getJdbcValuesSourceProcessingState().getExecutionContext().getSession();
}
@Override
public QueryOptions getQueryOptions() {
return queryOptions;
}
@Override
public DomainParameterBindingContext getDomainParameterBindingContext() {
throw new NotYetImplementedFor6Exception();
}
@Override
public Callback getCallback() {
return afterLoadAction -> {};
}
@Override
public Initializer resolveInitializer(NavigablePath path) {
return initializerMap == null ? null : initializerMap.get( path );
}
}

View File

@ -0,0 +1,136 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.sql.results.internal;
import java.util.List;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.query.named.RowReaderMemento;
import org.hibernate.sql.exec.spi.Callback;
import org.hibernate.sql.results.spi.DomainResultAssembler;
import org.hibernate.sql.results.spi.Initializer;
import org.hibernate.sql.results.spi.JdbcValuesSourceProcessingOptions;
import org.hibernate.sql.results.spi.JdbcValuesSourceProcessingState;
import org.hibernate.sql.results.spi.RowProcessingState;
import org.hibernate.sql.results.spi.RowReader;
import org.hibernate.sql.results.spi.RowTransformer;
import org.jboss.logging.Logger;
/**
* @author Steve Ebersole
*/
public class StandardRowReader<T> implements RowReader<T> {
private static final Logger LOG = Logger.getLogger( StandardRowReader.class );
private final List<DomainResultAssembler> resultAssemblers;
private final List<Initializer> initializers;
private final RowTransformer<T> rowTransformer;
private final int assemblerCount;
private final Callback callback;
public StandardRowReader(
List<DomainResultAssembler> resultAssemblers,
List<Initializer> initializers,
RowTransformer<T> rowTransformer,
Callback callback) {
this.resultAssemblers = resultAssemblers;
this.initializers = initializers;
this.rowTransformer = rowTransformer;
this.assemblerCount = resultAssemblers.size();
this.callback = callback;
}
@Override
@SuppressWarnings("unchecked")
public Class<T> getResultJavaType() {
if ( resultAssemblers.size() == 1 ) {
return resultAssemblers.get( 0 ).getAssembledJavaTypeDescriptor().getJavaType();
}
return (Class<T>) Object[].class;
}
@Override
public List<Initializer> getInitializers() {
return initializers;
}
@Override
public int getNumberOfResults() {
return rowTransformer.determineNumberOfResultElements( assemblerCount );
}
@Override
public T readRow(RowProcessingState rowProcessingState, JdbcValuesSourceProcessingOptions options) {
LOG.info( "---Processing Row---" );
coordinateInitializers( rowProcessingState, options );
// finally assemble the results
final Object[] result = new Object[assemblerCount];
for ( int i = 0; i < assemblerCount; i++ ) {
result[i] = resultAssemblers.get( i ).assemble( rowProcessingState, options );
}
afterRow( rowProcessingState, options );
return rowTransformer.transformRow( result );
}
private void afterRow(RowProcessingState rowProcessingState, JdbcValuesSourceProcessingOptions options) {
// todo : add AfterLoadActions handling here via Callback
for ( Initializer initializer : initializers ) {
initializer.finishUpRow( rowProcessingState );
}
}
private void coordinateInitializers(
RowProcessingState rowProcessingState,
JdbcValuesSourceProcessingOptions options) {
for ( Initializer initializer : initializers ) {
initializer.resolveKey( rowProcessingState );
}
for ( Initializer initializer : initializers ) {
initializer.resolveInstance( rowProcessingState );
}
for ( Initializer initializer : initializers ) {
initializer.initializeInstance( rowProcessingState );
}
}
@Override
public void finishUp(JdbcValuesSourceProcessingState processingState) {
for ( Initializer initializer : initializers ) {
initializer.endLoading( processingState.getExecutionContext() );
}
// todo : use Callback to execute AfterLoadActions
// todo : another option is to use Callback to execute the AfterLoadActions after each row
}
@Override
public RowReaderMemento toMemento(SessionFactoryImplementor factory) {
return new RowReaderMemento() {
@Override
public Class[] getResultClasses() {
return new Class[0];
}
@Override
public String[] getResultMappingNames() {
return new String[0];
}
};
}
}

View File

@ -0,0 +1,16 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.sql.results.internal.caching;
/**
* @author Steve Ebersole
*/
public interface QueryCachePutManager {
void registerJdbcRow(Object[] values);
void finishUp();
}

View File

@ -0,0 +1,32 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.sql.results.internal.caching;
/**
* QueryCachePutManager implementation for cases where we will not be putting
* Query results into the cache.
*
* @author Steve Ebersole
*/
public class QueryCachePutManagerDisabledImpl implements QueryCachePutManager {
/**
* Singleton access
*/
public static final QueryCachePutManagerDisabledImpl INSTANCE = new QueryCachePutManagerDisabledImpl();
private QueryCachePutManagerDisabledImpl() {
}
@Override
public void registerJdbcRow(Object[] values) {
}
@Override
public void finishUp() {
}
}

View File

@ -0,0 +1,49 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.sql.results.internal.caching;
import java.util.ArrayList;
import java.util.List;
import org.hibernate.cache.spi.QueryKey;
import org.hibernate.cache.spi.QueryResultsCache;
/**
* QueryCachePutManager implementation for cases where we will be putting
* Query results into the cache.
*
* @author Steve Ebersole
*/
public class QueryCachePutManagerEnabledImpl implements QueryCachePutManager {
private final QueryResultsCache queryCache;
private final QueryKey queryKey;
private List<Object[]> dataToCache;
public QueryCachePutManagerEnabledImpl(QueryResultsCache queryCache, QueryKey queryKey) {
this.queryCache = queryCache;
this.queryKey = queryKey;
}
@Override
public void registerJdbcRow(Object[] values) {
if ( dataToCache == null ) {
dataToCache = new ArrayList<>();
}
dataToCache.add( values );
}
@Override
public void finishUp() {
queryCache.put(
queryKey,
dataToCache,
// todo (6.0) : needs access to Session to pass along to cache call
null
);
}
}

View File

@ -0,0 +1,11 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
/**
* Support for caching of query results
*/
package org.hibernate.sql.results.internal.caching;

View File

@ -11,6 +11,7 @@ import org.hibernate.collection.internal.PersistentArrayHolder;
import org.hibernate.persister.collection.CollectionPersister;
import org.hibernate.query.NavigablePath;
import org.hibernate.sql.results.spi.CollectionInitializer;
import org.hibernate.sql.results.spi.RowProcessingState;
/**
* @author Chris Cranford
@ -53,6 +54,26 @@ public class ArrayInitializer implements CollectionInitializer {
return navigablePath;
}
@Override
public void resolveKey(RowProcessingState rowProcessingState) {
}
@Override
public void resolveInstance(RowProcessingState rowProcessingState) {
}
@Override
public void initializeInstance(RowProcessingState rowProcessingState) {
}
@Override
public void finishUpRow(RowProcessingState rowProcessingState) {
}
@Override
public PersistentArrayHolder getCollectionInstance() {
throw new NotYetImplementedFor6Exception( getClass() );

View File

@ -7,6 +7,7 @@
package org.hibernate.sql.results.spi;
import org.hibernate.query.NavigablePath;
import org.hibernate.sql.exec.spi.ExecutionContext;
/**
* Defines a multi-step process for initializing entity, collection and
@ -19,4 +20,48 @@ public interface Initializer {
Object getInitializedInstance();
NavigablePath getNavigablePath();
/**
* Step 1 - Resolve the key value for this initializer for the current
* row.
*
* After this point, the initializer knows the entity/collection/component
* key for the current row
*/
void resolveKey(RowProcessingState rowProcessingState);
/**
* Step 2 - Using the key resolved in {@link #resolveKey}, resolve the
* instance (of the thing initialized) to use for the current row.
*
* After this point, the initializer knows the entity/collection/component
* instance for the current row based on the resolved key
*
* todo (6.0) : much of the various implementations of this are similar enough to handle in a common base implementation (templating?)
* things like resolving as managed (Session cache), from second-level cache, from LoadContext, etc..
*/
void resolveInstance(RowProcessingState rowProcessingState);
/**
* Step 3 - Initialize the state of the instance resolved in
* {@link #resolveInstance} from the current row values.
*
* All resolved state for the current row is injected into the resolved
* instance
*/
void initializeInstance(RowProcessingState rowProcessingState);
/**
* Lifecycle method called at the end of the current row processing.
* Provides ability to complete processing from the current row and
* prepare for the next row.
*/
void finishUpRow(RowProcessingState rowProcessingState);
/**
* Lifecycle method called at the very end of the result values processing
*/
default void endLoading(ExecutionContext context) {
// by default - nothing to do
}
}

View File

@ -0,0 +1,52 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.sql.results.spi;
import java.sql.SQLException;
/**
* Provides unified access to query results (JDBC values - see
* {@link RowProcessingState#getJdbcValue} whether they come from
* query cache or ResultSet. Implementations also manage any cache puts
* if required.
*
* @author Steve Ebersole
*/
public interface JdbcValues {
JdbcValuesMapping getValuesMapping();
// todo : ? - add ResultSet.previous() and ResultSet.absolute(int) style methods (to support ScrollableResults)?
/**
* Think JDBC's {@code ResultSet#next}. Advances the "cursor position"
* and return a boolean indicating whether advancing positioned the
* cursor beyond the set of available results.
*
* @return {@code true} indicates the call did not position the cursor beyond
* the available results ({@link #getCurrentRowValuesArray} will not return
* null); false indicates we are now beyond the end of the available results
* ({@link #getCurrentRowValuesArray} will return null)
*/
boolean next(RowProcessingState rowProcessingState) throws SQLException;
/**
* Get the JDBC values for the row currently positioned at within
* this source.
*
* @return The current row's JDBC values, or {@code null} if the position
* is beyond the end of the available results.
*/
Object[] getCurrentRowValuesArray();
/**
* todo (6.0) : is this needed?
* ^^ it's supposed to give impls a chance to write to the query cache
* or release ResultSet it. But that could technically be handled by the
* case of `#next` returning false the first time.
*/
void finishUp();
}

View File

@ -0,0 +1,84 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.sql.results.spi;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.sql.results.internal.JdbcValuesSourceProcessingStateStandardImpl;
import org.hibernate.sql.results.internal.RowProcessingStateStandardImpl;
/**
* @author Steve Ebersole
*/
public class ListResultsConsumer<R> implements ResultsConsumer<List<R>, R> {
/**
* Singleton access
*/
public static final ListResultsConsumer INSTANCE = new ListResultsConsumer();
@SuppressWarnings("unchecked")
public static <R> ListResultsConsumer<R> instance() {
return INSTANCE;
}
@Override
public List<R> consume(
JdbcValues jdbcValues,
SharedSessionContractImplementor session,
JdbcValuesSourceProcessingOptions processingOptions,
JdbcValuesSourceProcessingStateStandardImpl jdbcValuesSourceProcessingState,
RowProcessingStateStandardImpl rowProcessingState,
RowReader<R> rowReader) {
try {
session.getPersistenceContext().getLoadContexts().register( jdbcValuesSourceProcessingState );
boolean uniqueRows = false;
final Class<R> resultJavaType = rowReader.getResultJavaType();
if ( resultJavaType != null && ! resultJavaType.isArray() ) {
final EntityPersister entityDescriptor = session.getFactory().getMetamodel().findEntityDescriptor( resultJavaType );
if ( entityDescriptor != null ) {
uniqueRows = true;
}
}
final List<R> results = new ArrayList<>();
while ( rowProcessingState.next() ) {
final R row = rowReader.readRow( rowProcessingState, processingOptions );
boolean add = true;
if ( uniqueRows ) {
if ( results.contains( row ) ) {
add = false;
}
}
if ( add ) {
results.add( row );
}
rowProcessingState.finishRowProcessing();
}
return results;
}
catch (SQLException e) {
throw session.getJdbcServices().getSqlExceptionHelper().convert(
e,
"Error processing return rows"
);
}
finally {
rowReader.finishUp( jdbcValuesSourceProcessingState );
jdbcValuesSourceProcessingState.finishUp();
jdbcValues.finishUp();
}
}
}

View File

@ -0,0 +1,24 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.sql.results.spi;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.sql.results.internal.JdbcValuesSourceProcessingStateStandardImpl;
import org.hibernate.sql.results.internal.RowProcessingStateStandardImpl;
/**
* @author Steve Ebersole
*/
public interface ResultsConsumer<T, R> {
T consume(
JdbcValues jdbcValues,
SharedSessionContractImplementor session,
JdbcValuesSourceProcessingOptions processingOptions,
JdbcValuesSourceProcessingStateStandardImpl jdbcValuesSourceProcessingState,
RowProcessingStateStandardImpl rowProcessingState,
RowReader<R> rowReader);
}

View File

@ -6,6 +6,7 @@
*/
package org.hibernate.sql.results.spi;
import org.hibernate.loader.plan.spi.EntityFetch;
import org.hibernate.query.NavigablePath;
import org.hibernate.sql.ast.spi.SqlSelection;
import org.hibernate.sql.exec.spi.ExecutionContext;
@ -43,6 +44,8 @@ public interface RowProcessingState extends ExecutionContext {
*/
Object getJdbcValue(int position);
void registerNonExists(EntityFetch fetch);
/**
* Callback at the end of processing the current "row"
*/

View File

@ -9,6 +9,9 @@ package org.hibernate.sql.results.spi;
import java.sql.SQLException;
import java.util.List;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.query.named.RowReaderMemento;
/**
* Coordinates the process of reading a single result values row
*
@ -46,4 +49,6 @@ public interface RowReader<R> {
* Called at the end of processing all rows
*/
void finishUp(JdbcValuesSourceProcessingState context);
RowReaderMemento toMemento(SessionFactoryImplementor factory);
}

View File

@ -0,0 +1,63 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.sql.results.spi;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.internal.FetchingScrollableResultsImpl;
import org.hibernate.internal.ScrollableResultsImpl;
import org.hibernate.query.spi.ScrollableResultsImplementor;
import org.hibernate.sql.results.internal.JdbcValuesSourceProcessingStateStandardImpl;
import org.hibernate.sql.results.internal.RowProcessingStateStandardImpl;
/**
* @author Steve Ebersole
*/
public class ScrollableResultsConsumer<R> implements ResultsConsumer<ScrollableResultsImplementor<R>, R> {
/**
* Singleton access to the standard scrollable-results consumer instance
*/
public static final ScrollableResultsConsumer INSTANCE = new ScrollableResultsConsumer();
@SuppressWarnings("unchecked")
public static <R> ScrollableResultsConsumer<R> instance() {
return INSTANCE;
}
@Override
public ScrollableResultsImplementor<R> consume(
JdbcValues jdbcValues,
SharedSessionContractImplementor session,
JdbcValuesSourceProcessingOptions processingOptions,
JdbcValuesSourceProcessingStateStandardImpl jdbcValuesSourceProcessingState,
RowProcessingStateStandardImpl rowProcessingState,
RowReader<R> rowReader) {
if ( containsCollectionFetches( jdbcValues.getValuesMapping() ) ) {
return new FetchingScrollableResultsImpl<>(
jdbcValues,
processingOptions,
jdbcValuesSourceProcessingState,
rowProcessingState,
rowReader,
session
);
}
else {
return new ScrollableResultsImpl<>(
jdbcValues,
processingOptions,
jdbcValuesSourceProcessingState,
rowProcessingState,
rowReader,
session
);
}
}
private boolean containsCollectionFetches(JdbcValuesMapping valuesMapping) {
return false;
}
}

View File

@ -76,6 +76,14 @@ public enum Action {
this.externalHbm2ddlName = externalHbm2ddlName;
}
public String getExternalJpaName() {
return externalJpaName;
}
public String getExternalHbm2ddlName() {
return externalHbm2ddlName;
}
@Override
public String toString() {
return getClass().getSimpleName() + "(externalJpaName=" + externalJpaName + ", externalHbm2ddlName=" + externalHbm2ddlName + ")";

View File

@ -1,256 +0,0 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.cache.spi;
import java.io.Serializable;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.hibernate.internal.util.SerializationHelper;
import org.hibernate.internal.util.collections.ArrayHelper;
import org.hibernate.transform.AliasToBeanResultTransformer;
import org.hibernate.transform.AliasToEntityMapResultTransformer;
import org.hibernate.transform.AliasedTupleSubsetResultTransformer;
import org.hibernate.transform.CacheableResultTransformer;
import org.hibernate.transform.DistinctResultTransformer;
import org.hibernate.transform.DistinctRootEntityResultTransformer;
import org.hibernate.transform.PassThroughResultTransformer;
import org.hibernate.transform.ResultTransformer;
import org.hibernate.transform.RootEntityResultTransformer;
import org.hibernate.transform.ToListResultTransformer;
import org.hibernate.transform.TupleSubsetResultTransformer;
import org.hibernate.testing.junit4.BaseUnitTestCase;
import org.junit.Test;
/**
* Tests relating to {@link QueryKey} instances.
*
* @author Steve Ebersole
*/
public class QueryKeyTest extends BaseUnitTestCase {
private static final String QUERY_STRING = "the query string";
public static class AClass implements Serializable {
private String propAccessedByField;
private String propAccessedByMethod;
private int propValue;
public AClass() {
}
public AClass(String propAccessedByField) {
this.propAccessedByField = propAccessedByField;
}
public String getPropAccessedByMethod() {
return propAccessedByMethod;
}
public void setPropAccessedByMethod(String propAccessedByMethod) {
this.propAccessedByMethod = propAccessedByMethod;
}
}
@Test
public void testSerializedEqualityResultTransformer() throws Exception {
// settings are lazily initialized when calling transformTuple(),
// so they have not been initialized for the following test
// (it *should* be initialized before creating a QueryKey)
doResultTransformerTest( new AliasToBeanResultTransformer( AClass.class ), false );
// initialize settings for the next test
AliasToBeanResultTransformer transformer = new AliasToBeanResultTransformer( AClass.class );
transformer.transformTuple(
new Object[] { "abc", "def" },
new String[] { "propAccessedByField", "propAccessedByMethod" }
);
doResultTransformerTest( transformer, false );
doResultTransformerTest( AliasToEntityMapResultTransformer.INSTANCE, true );
doResultTransformerTest( DistinctResultTransformer.INSTANCE, true );
doResultTransformerTest( DistinctRootEntityResultTransformer.INSTANCE, true );
doResultTransformerTest( PassThroughResultTransformer.INSTANCE, true );
doResultTransformerTest( RootEntityResultTransformer.INSTANCE, true );
doResultTransformerTest( ToListResultTransformer.INSTANCE, true );
}
// Reproduces HHH-5628; commented out because FailureExpected is not working here...
/*
public void testAliasToBeanConstructorFailureExpected() throws Exception {
// AliasToBeanConstructorResultTransformer is not Serializable because
// java.lang.reflect.Constructor is not Serializable;
doResultTransformerTest(
new AliasToBeanConstructorResultTransformer( AClass.class.getConstructor( String.class ) ), false
);
}
*/
private void doResultTransformerTest(ResultTransformer transformer, boolean isSingleton) {
Map transformerMap = new HashMap();
transformerMap.put( transformer, "" );
assert transformerMap.size() == 1 : "really messed up";
Object old = transformerMap.put( transformer, "value" );
assert old != null && transformerMap.size() == 1 : "apparent QueryKey equals/hashCode issue";
// finally, lets serialize it and see what happens
ResultTransformer transformer2 = ( ResultTransformer ) SerializationHelper.clone( transformer );
old = transformerMap.put( transformer2, "new value" );
assert old != null && transformerMap.size() == 1 : "deserialization did not set hashCode or equals properly";
if ( isSingleton ) {
assert transformer == transformer2: "deserialization issue for singleton transformer";
}
else {
assert transformer != transformer2: "deserialization issue for non-singleton transformer";
}
assert transformer.equals( transformer2 ): "deep copy issue";
}
@Test
public void testSerializedEquality() throws Exception {
doTest( buildBasicKey( null ) );
doTest( buildBasicKey( CacheableResultTransformer.create( null, null, new boolean[] { true } ) ) );
doTest( buildBasicKey( CacheableResultTransformer.create( null, new String[] { null }, new boolean[] { true } ) ) );
doTest( buildBasicKey( CacheableResultTransformer.create( null, new String[] { "a" }, new boolean[] { true } ) ) );
doTest( buildBasicKey( CacheableResultTransformer.create( null, null, new boolean[] { false, true } ) ) );
doTest( buildBasicKey( CacheableResultTransformer.create( null, new String[] { "a" }, new boolean[] { true, false } ) ) );
doTest( buildBasicKey( CacheableResultTransformer.create( null, new String[] { "a", null }, new boolean[] { true, true } ) ) );
}
@Test
public void testSerializedEqualityWithTupleSubsetResultTransfprmer() throws Exception {
doTestWithTupleSubsetResultTransformer(
new AliasToBeanResultTransformer( AClass.class ),
new String[] { "propAccessedByField", "propAccessedByMethod" }
);
doTestWithTupleSubsetResultTransformer( AliasToEntityMapResultTransformer.INSTANCE, new String[] { "a", "b" } );
doTestWithTupleSubsetResultTransformer( DistinctRootEntityResultTransformer.INSTANCE, new String[] { "a", "b" } );
doTestWithTupleSubsetResultTransformer( PassThroughResultTransformer.INSTANCE, new String[] { "a", "b" } );
doTestWithTupleSubsetResultTransformer( RootEntityResultTransformer.INSTANCE, new String[] { "a", "b" } );
// The following are not TupleSubsetResultTransformers:
// DistinctResultTransformer.INSTANCE
// ToListResultTransformer.INSTANCE
}
public void doTestWithTupleSubsetResultTransformer(TupleSubsetResultTransformer transformer,
String[] aliases) throws Exception {
doTest( buildBasicKey(
CacheableResultTransformer.create(
transformer,
new String[] { aliases[ 0 ], aliases[ 1 ] },
new boolean[] { true, true } )
) );
doTest( buildBasicKey(
CacheableResultTransformer.create(
transformer,
new String[] { aliases[ 0 ], aliases[ 1 ] },
new boolean[] { true, true, false } )
) );
doTest( buildBasicKey(
CacheableResultTransformer.create(
transformer,
new String[] { aliases[ 1 ] },
new boolean[] { true } )
) );
doTest( buildBasicKey(
CacheableResultTransformer.create(
transformer,
new String[] { null, aliases[ 1 ] },
new boolean[] { true, true } )
) );
doTest( buildBasicKey(
CacheableResultTransformer.create(
transformer,
new String[] { aliases[ 0 ], null },
new boolean[] { true, true } )
) );
doTest( buildBasicKey(
CacheableResultTransformer.create(
transformer,
new String[] { aliases[ 0 ] },
new boolean[] { false, true } )
) );
doTest( buildBasicKey(
CacheableResultTransformer.create(
transformer,
new String[] { aliases[ 0 ] },
new boolean[] { true, false } )
) );
doTest( buildBasicKey(
CacheableResultTransformer.create(
transformer,
new String[] { aliases[ 0 ] },
new boolean[] { false, true, false } )
) );
if ( ! ( transformer instanceof AliasedTupleSubsetResultTransformer ) ) {
doTestWithTupleSubsetResultTransformerNullAliases( transformer );
}
}
public void doTestWithTupleSubsetResultTransformerNullAliases(TupleSubsetResultTransformer transformer) throws Exception {
doTest( buildBasicKey( CacheableResultTransformer.create( transformer, null, new boolean[] { true } ) ) );
doTest( buildBasicKey( CacheableResultTransformer.create( transformer, null, new boolean[] { true, true } ) ) );
doTest( buildBasicKey( CacheableResultTransformer.create( transformer, null, new boolean[] { true, true, true } ) ) );
doTest( buildBasicKey( CacheableResultTransformer.create( transformer, null, new boolean[] { false, true } ) ) );
doTest( buildBasicKey( CacheableResultTransformer.create( transformer, null, new boolean[] { true, false } ) ) );
doTest( buildBasicKey( CacheableResultTransformer.create( transformer, null, new boolean[] { false, true, true } ) ) );
doTest( buildBasicKey( CacheableResultTransformer.create( transformer, null, new boolean[] {true, false, true } ) ) );
doTest( buildBasicKey( CacheableResultTransformer.create( transformer, null, new boolean[] {true, true, false } ) ) );
doTest( buildBasicKey( CacheableResultTransformer.create( transformer, null, new boolean[] {false, false, true } ) ) );
doTest( buildBasicKey( CacheableResultTransformer.create( transformer, null, new boolean[] {false, true, false } ) ) );
doTest( buildBasicKey( CacheableResultTransformer.create( transformer, null, new boolean[] {false, false, true } ) ) );
}
private QueryKey buildBasicKey(CacheableResultTransformer resultTransformer) {
return new QueryKey(
QUERY_STRING,
ArrayHelper.EMPTY_TYPE_ARRAY, // positional param types
ArrayHelper.EMPTY_OBJECT_ARRAY, // positional param values
Collections.EMPTY_MAP, // named params
null, // firstRow selection
null, // maxRows selection
Collections.EMPTY_SET, // filter keys
null, // tenantIdentifier
resultTransformer // the result transformer
);
}
private void doTest(QueryKey key) {
Map keyMap = new HashMap();
Map transformerMap = new HashMap();
keyMap.put( key, "" );
assert keyMap.size() == 1 : "really messed up";
Object old = keyMap.put( key, "value" );
assert old != null && keyMap.size() == 1 : "apparent QueryKey equals/hashCode issue";
if ( key.getResultTransformer() != null ) {
transformerMap.put( key.getResultTransformer(), "" );
assert transformerMap.size() == 1 : "really messed up";
old = transformerMap.put( key.getResultTransformer(), "value" );
assert old != null && transformerMap.size() == 1 : "apparent QueryKey equals/hashCode issue";
}
// finally, lets serialize it and see what happens
QueryKey key2 = ( QueryKey ) SerializationHelper.clone( key );
assert key != key2 : "deep copy issue";
old = keyMap.put( key2, "new value" );
assert old != null && keyMap.size() == 1 : "deserialization did not set hashCode or equals properly";
if ( key.getResultTransformer() == null ) {
assert key2.getResultTransformer() == null;
}
else {
old = transformerMap.put( key2.getResultTransformer(), "new value" );
assert old != null && transformerMap.size() == 1 : "deserialization did not set hashCode or equals properly";
assert key.getResultTransformer() != key2.getResultTransformer(): "deserialization issue for non-singleton transformer";
assert key.getResultTransformer().equals( key2.getResultTransformer() ): "deep copy issue";
}
}
}

View File

@ -6,19 +6,12 @@
*/
package org.hibernate.orm.test.sql.ast;
import java.util.List;
import java.util.Map;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.query.hql.spi.HqlQueryImplementor;
import org.hibernate.query.spi.QueryImplementor;
import org.hibernate.query.spi.QueryParameterImplementor;
import org.hibernate.query.sqm.internal.DomainParameterXref;
import org.hibernate.query.sqm.internal.QuerySqmImpl;
import org.hibernate.query.sqm.internal.SqmUtil;
import org.hibernate.query.sqm.sql.internal.SqmSelectInterpretation;
import org.hibernate.query.sqm.sql.internal.SqmSelectToSqlAstConverter;
import org.hibernate.query.sqm.tree.expression.SqmParameter;
import org.hibernate.query.sqm.tree.select.SqmSelectStatement;
import org.hibernate.sql.ast.spi.SqlAstSelectToJdbcSelectConverter;
import org.hibernate.sql.ast.spi.SqlSelection;
@ -26,7 +19,6 @@ import org.hibernate.sql.ast.tree.from.FromClause;
import org.hibernate.sql.ast.tree.from.TableGroup;
import org.hibernate.sql.ast.tree.select.SelectClause;
import org.hibernate.sql.ast.tree.select.SelectStatement;
import org.hibernate.sql.exec.spi.JdbcParameter;
import org.hibernate.sql.exec.spi.JdbcSelect;
import org.hibernate.testing.orm.junit.DomainModel;
@ -35,8 +27,6 @@ import org.hibernate.testing.orm.junit.SessionFactory;
import org.hibernate.testing.orm.junit.SessionFactoryScope;
import org.junit.jupiter.api.Test;
import org.hamcrest.CoreMatchers;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.MatcherAssert.assertThat;
@ -84,15 +74,15 @@ public class SmokeTests {
);
assertThat( jdbcSelectOperation.getSql(), is( "select s1_0.name from mapping_simple_entity as s1_0" ) );
final DomainParameterXref domainParameterXref = DomainParameterXref.from( sqmStatement );
final Map<QueryParameterImplementor<?>, Map<SqmParameter, List<JdbcParameter>>> paramsXref = SqmUtil.generateJdbcParamsXref(
domainParameterXref,
() -> sqmInterpretation.getJdbcParamsBySqmParam()
);
// try to execute the Query...
// final List<String> names = query.list();
}
);
}
@Test
public void testSimpleHqlExecution(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
final QueryImplementor<String> query = session.createQuery( "select e.name from SimpleEntity e", String.class );
query.list();
}
);
}

View File

@ -15,6 +15,6 @@
<!-- Query caches with legacy names -->
<cache name="hibernate.test.org.hibernate.cache.internal.StandardQueryCache" maxElementsInMemory="10000" />
<cache name="hibernate.test.org.hibernate.cache.spi.UpdateTimestampsCache" maxElementsInMemory="10000" />
<cache name="hibernate.test.org.hibernate.cache.spi.TimestampsCache" maxElementsInMemory="10000" />
</ehcache>

View File

@ -95,6 +95,10 @@ public class ServiceRegistryExtension
private static void configureServices(ServiceRegistry serviceRegistryAnn, StandardServiceRegistryBuilder ssrb) {
try {
for ( ServiceRegistry.Setting setting : serviceRegistryAnn.settings() ) {
ssrb.applySetting( setting.name(), setting.value() );
}
for ( Class<? extends ServiceContributor> contributorClass : serviceRegistryAnn.serviceContributors() ) {
final ServiceContributor serviceContributor = contributorClass.newInstance();
serviceContributor.contribute( ssrb );