1. remove commons-collections dependency.

2. moved org.hibernate.internal.util.collections.ConcurrentReferenceHashMap from envers module to core, and change it to doesn't allow null key / value.
3. Using ConcurrentReferenceHashMap this to replace old ReferenceMap (from commons-collections) which used in StatefulPersistenceContext.
4. copied org.hibernate.internal.util.collections.BoundedConcurrentHashMap from infinispan code base.
5. deprecate properties QUERY_PLAN_CACHE_MAX_STRONG_REFERENCES and QUERY_PLAN_CACHE_MAX_SOFT_REFERENCES
6. add two new properties QUERY_PLAN_CACHE_PARAMETER_METADATA_MAX_SIZE and QUERY_PLAN_CACHE_MAX_SIZE
7. remove LRUMap, SimpleMRUCache and SoftLimitMRUCache
This commit is contained in:
Strong Liu 2012-02-28 23:54:27 +08:00
parent 01fe115adf
commit b4659173a8
19 changed files with 4566 additions and 2270 deletions

View File

@ -63,9 +63,6 @@ libraries = [
jandex: 'org.jboss:jandex:1.0.3.Final',
classmate: 'com.fasterxml:classmate:0.5.4',
// Jakarta commons-collections todo : get rid of commons-collections dependency
commons_collections:
'commons-collections:commons-collections:3.2.1',
// Dom4J
dom4j: 'dom4j:dom4j:1.6.1@jar',

View File

@ -4,7 +4,6 @@ apply plugin: org.hibernate.build.gradle.inject.InjectionPlugin
apply plugin: org.hibernate.build.gradle.testing.matrix.MatrixTestingPlugin
dependencies {
compile( libraries.commons_collections )
compile( libraries.jta )
compile( libraries.dom4j ) {
transitive = false

View File

@ -452,15 +452,37 @@ public interface AvailableSettings {
public static final String PREFER_POOLED_VALUES_LO = "hibernate.id.optimizer.pooled.prefer_lo";
/**
* The maximum number of strong references maintained by {@link org.hibernate.internal.util.collections.SoftLimitMRUCache}. Default is 128.
* The maximum number of strong references maintained by {@link org.hibernate.engine.query.spi.QueryPlanCache}. Default is 128.
* @deprecated in favor of {@link #QUERY_PLAN_CACHE_PARAMETER_METADATA_MAX_SIZE}
*/
@Deprecated
public static final String QUERY_PLAN_CACHE_MAX_STRONG_REFERENCES = "hibernate.query.plan_cache_max_strong_references";
/**
* The maximum number of soft references maintained by {@link org.hibernate.internal.util.collections.SoftLimitMRUCache}. Default is 2048.
* The maximum number of soft references maintained by {@link org.hibernate.engine.query.spi.QueryPlanCache}. Default is 2048.
* @deprecated in favor of {@link #QUERY_PLAN_CACHE_MAX_SIZE}
*/
@Deprecated
public static final String QUERY_PLAN_CACHE_MAX_SOFT_REFERENCES = "hibernate.query.plan_cache_max_soft_references";
/**
* The maximum number of entries including:
* <ul>
* <li>{@link org.hibernate.engine.query.spi.HQLQueryPlan}</li>
* <li>{@link org.hibernate.engine.query.spi.FilterQueryPlan}</li>
* <li>{@link org.hibernate.engine.query.spi.NativeSQLQueryPlan}</li>
* </ul>
*
* maintained by {@link org.hibernate.engine.query.spi.QueryPlanCache}. Default is 2048.
*/
public static final String QUERY_PLAN_CACHE_MAX_SIZE = "hibernate.query.plan_cache_max_size";
/**
* The maximum number of {@link org.hibernate.engine.query.spi.ParameterMetadata} maintained
* by {@link org.hibernate.engine.query.spi.QueryPlanCache}. Default is 128.
*/
public static final String QUERY_PLAN_CACHE_PARAMETER_METADATA_MAX_SIZE = "hibernate.query.plan_parameter_metadata_max_size";
/**
* Should we not use contextual LOB creation (aka based on {@link java.sql.Connection#createBlob()} et al).
*/

View File

@ -36,11 +36,7 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.commons.collections.map.AbstractReferenceMap;
import org.apache.commons.collections.map.ReferenceMap;
import org.jboss.logging.Logger;
import org.hibernate.AssertionFailure;
import org.hibernate.Hibernate;
import org.hibernate.HibernateException;
@ -64,6 +60,7 @@ import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.engine.spi.Status;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.MarkerObject;
import org.hibernate.internal.util.collections.ConcurrentReferenceHashMap;
import org.hibernate.internal.util.collections.IdentityMap;
import org.hibernate.persister.collection.CollectionPersister;
import org.hibernate.persister.entity.EntityPersister;
@ -158,7 +155,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
entitiesByKey = new HashMap<EntityKey, Object>( INIT_COLL_SIZE );
entitiesByUniqueKey = new HashMap<EntityUniqueKey, Object>( INIT_COLL_SIZE );
//noinspection unchecked
proxiesByKey = (Map<EntityKey, Object>) new ReferenceMap( AbstractReferenceMap.HARD, AbstractReferenceMap.WEAK );
proxiesByKey = new ConcurrentReferenceHashMap<EntityKey, Object>( INIT_COLL_SIZE, .75f, 1, ConcurrentReferenceHashMap.ReferenceType.STRONG, ConcurrentReferenceHashMap.ReferenceType.WEAK, null );
entitySnapshotsByKey = new HashMap<EntityKey, Object>( INIT_COLL_SIZE );
entityEntries = IdentityMap.instantiateSequenced( INIT_COLL_SIZE );
@ -199,19 +196,14 @@ public class StatefulPersistenceContext implements PersistenceContext {
@Override
public void addUnownedCollection(CollectionKey key, PersistentCollection collection) {
if (unownedCollections==null) {
unownedCollections = new HashMap<CollectionKey,PersistentCollection>(8);
unownedCollections = new HashMap<CollectionKey,PersistentCollection>(INIT_COLL_SIZE);
}
unownedCollections.put( key, collection );
}
@Override
public PersistentCollection useUnownedCollection(CollectionKey key) {
if ( unownedCollections == null ) {
return null;
}
else {
return unownedCollections.remove(key);
}
return ( unownedCollections == null ) ? null : unownedCollections.remove( key );
}
@Override
@ -225,8 +217,11 @@ public class StatefulPersistenceContext implements PersistenceContext {
@Override
public void clear() {
for ( Object o : proxiesByKey.values() ) {
final LazyInitializer li = ((HibernateProxy) o).getHibernateLazyInitializer();
li.unsetSession();
if ( o == null ) {
//entry may be GCd
continue;
}
((HibernateProxy) o).getHibernateLazyInitializer().unsetSession();
}
for ( Map.Entry<PersistentCollection, CollectionEntry> aCollectionEntryArray : IdentityMap.concurrentEntries( collectionEntries ) ) {
aCollectionEntryArray.getKey().unsetSession( getSession() );
@ -733,14 +728,11 @@ public class StatefulPersistenceContext implements PersistenceContext {
@Override
public Object proxyFor(EntityPersister persister, EntityKey key, Object impl)
throws HibernateException {
if ( !persister.hasProxy() ) return impl;
Object proxy = proxiesByKey.get(key);
if ( proxy != null ) {
return narrowProxy(proxy, persister, key, impl);
}
else {
if ( !persister.hasProxy() ) {
return impl;
}
Object proxy = proxiesByKey.get( key );
return ( proxy != null ) ? narrowProxy( proxy, persister, key, impl ) : impl;
}
/**
@ -1574,7 +1566,14 @@ public class StatefulPersistenceContext implements PersistenceContext {
count = ois.readInt();
if ( tracing ) LOG.trace("Starting deserialization of [" + count + "] proxiesByKey entries");
//noinspection unchecked
rtn.proxiesByKey = new ReferenceMap( AbstractReferenceMap.HARD, AbstractReferenceMap.WEAK, count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count, .75f );
rtn.proxiesByKey = new ConcurrentReferenceHashMap<EntityKey, Object>(
count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count,
.75f,
1,
ConcurrentReferenceHashMap.ReferenceType.STRONG,
ConcurrentReferenceHashMap.ReferenceType.WEAK,
null
);
for ( int i = 0; i < count; i++ ) {
EntityKey ek = EntityKey.deserialize( ois, session );
Object proxy = ois.readObject();

View File

@ -25,6 +25,7 @@ package org.hibernate.engine.query.spi;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
@ -46,6 +47,7 @@ import org.hibernate.hql.internal.QuerySplitter;
import org.hibernate.hql.spi.FilterTranslator;
import org.hibernate.hql.spi.ParameterTranslations;
import org.hibernate.hql.spi.QueryTranslator;
import org.hibernate.hql.spi.QueryTranslatorFactory;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.collections.ArrayHelper;
import org.hibernate.internal.util.collections.EmptyIterator;
@ -75,36 +77,38 @@ public class HQLQueryPlan implements Serializable {
private final Set enabledFilterNames;
private final boolean shallow;
public HQLQueryPlan(String hql, boolean shallow, Map enabledFilters, SessionFactoryImplementor factory) {
this( hql, null, shallow, enabledFilters, factory );
}
protected HQLQueryPlan(String hql, String collectionRole, boolean shallow, Map enabledFilters, SessionFactoryImplementor factory) {
protected HQLQueryPlan(String hql, String collectionRole, boolean shallow, Map enabledFilters, SessionFactoryImplementor factory){
this.sourceQuery = hql;
this.shallow = shallow;
Set copy = new HashSet();
copy.addAll( enabledFilters.keySet() );
this.enabledFilterNames = java.util.Collections.unmodifiableSet( copy );
Set combinedQuerySpaces = new HashSet();
String[] concreteQueryStrings = QuerySplitter.concreteQueries( hql, factory );
final String[] concreteQueryStrings = QuerySplitter.concreteQueries( hql, factory );
final int length = concreteQueryStrings.length;
translators = new QueryTranslator[length];
List sqlStringList = new ArrayList();
this.translators = new QueryTranslator[length];
List<String> sqlStringList = new ArrayList<String>();
Set combinedQuerySpaces = new HashSet();
final boolean hasCollectionRole = (collectionRole == null);
final Map querySubstitutions = factory.getSettings().getQuerySubstitutions();
final QueryTranslatorFactory queryTranslatorFactory = factory.getSettings().getQueryTranslatorFactory();
for ( int i=0; i<length; i++ ) {
if ( collectionRole == null ) {
translators[i] = factory.getSettings()
.getQueryTranslatorFactory()
if ( hasCollectionRole ) {
translators[i] = queryTranslatorFactory
.createQueryTranslator( hql, concreteQueryStrings[i], enabledFilters, factory );
translators[i].compile( factory.getSettings().getQuerySubstitutions(), shallow );
translators[i].compile( querySubstitutions, shallow );
}
else {
translators[i] = factory.getSettings()
.getQueryTranslatorFactory()
translators[i] = queryTranslatorFactory
.createFilterTranslator( hql, concreteQueryStrings[i], enabledFilters, factory );
( ( FilterTranslator ) translators[i] ).compile( collectionRole, factory.getSettings().getQuerySubstitutions(), shallow );
( ( FilterTranslator ) translators[i] ).compile( collectionRole, querySubstitutions, shallow );
}
combinedQuerySpaces.addAll( translators[i].getQuerySpaces() );
sqlStringList.addAll( translators[i].collectSqlStrings() );
@ -123,13 +127,8 @@ public class HQLQueryPlan implements Serializable {
returnMetadata = null;
}
else {
if ( length > 1 ) {
final int returns = translators[0].getReturnTypes().length;
returnMetadata = new ReturnMetadata( translators[0].getReturnAliases(), new Type[returns] );
}
else {
returnMetadata = new ReturnMetadata( translators[0].getReturnAliases(), translators[0].getReturnTypes() );
}
final Type[] types = ( length > 1 ) ? new Type[translators[0].getReturnTypes().length] : translators[0].getReturnTypes();
returnMetadata = new ReturnMetadata( translators[0].getReturnAliases(), types );
}
}
}
@ -192,20 +191,19 @@ public class HQLQueryPlan implements Serializable {
List combinedResults = new ArrayList();
IdentitySet distinction = new IdentitySet();
int includedCount = -1;
translator_loop: for ( int i = 0; i < translators.length; i++ ) {
List tmp = translators[i].list( session, queryParametersToUse );
translator_loop:
for ( QueryTranslator translator : translators ) {
List tmp = translator.list( session, queryParametersToUse );
if ( needsLimit ) {
// NOTE : firstRow is zero-based
int first = queryParameters.getRowSelection().getFirstRow() == null
? 0
: queryParameters.getRowSelection().getFirstRow().intValue();
int max = queryParameters.getRowSelection().getMaxRows() == null
? -1
: queryParameters.getRowSelection().getMaxRows().intValue();
final int size = tmp.size();
for ( int x = 0; x < size; x++ ) {
final Object result = tmp.get( x );
if ( ! distinction.add( result ) ) {
final int first = queryParameters.getRowSelection().getFirstRow() == null
? 0
: queryParameters.getRowSelection().getFirstRow();
final int max = queryParameters.getRowSelection().getMaxRows() == null
? -1
: queryParameters.getRowSelection().getMaxRows();
for ( final Object result : tmp ) {
if ( !distinction.add( result ) ) {
continue;
}
includedCount++;
@ -239,14 +237,16 @@ public class HQLQueryPlan implements Serializable {
Iterator[] results = null;
boolean many = translators.length > 1;
if (many) {
if ( many ) {
results = new Iterator[translators.length];
}
Iterator result = null;
for ( int i = 0; i < translators.length; i++ ) {
result = translators[i].iterate( queryParameters, session );
if (many) results[i] = result;
if ( many ) {
results[i] = result;
}
}
return many ? new JoinedIterator(results) : result;
@ -279,8 +279,8 @@ public class HQLQueryPlan implements Serializable {
LOG.splitQueries( getSourceQuery(), translators.length );
}
int result = 0;
for ( int i = 0; i < translators.length; i++ ) {
result += translators[i].executeUpdate( queryParameters, session );
for ( QueryTranslator translator : translators ) {
result += translator.executeUpdate( queryParameters, session );
}
return result;
}
@ -289,7 +289,9 @@ public class HQLQueryPlan implements Serializable {
long start = System.currentTimeMillis();
ParamLocationRecognizer recognizer = ParamLocationRecognizer.parseLocations( hql );
long end = System.currentTimeMillis();
LOG.tracev( "HQL param location recognition took {0} mills ({1})", ( end - start ), hql );
if ( LOG.isTraceEnabled() ) {
LOG.tracev( "HQL param location recognition took {0} mills ({1})", ( end - start ), hql );
}
int ordinalParamCount = parameterTranslations.getOrdinalParameterCount();
int[] locations = ArrayHelper.toIntArray( recognizer.getOrdinalParameterLocationList() );
@ -309,7 +311,7 @@ public class HQLQueryPlan implements Serializable {
}
Iterator itr = recognizer.getNamedParameterDescriptionMap().entrySet().iterator();
Map namedParamDescriptorMap = new HashMap();
Map<String, NamedParameterDescriptor> namedParamDescriptorMap = new HashMap<String, NamedParameterDescriptor>();
while( itr.hasNext() ) {
final Map.Entry entry = ( Map.Entry ) itr.next();
final String name = ( String ) entry.getKey();
@ -328,7 +330,6 @@ public class HQLQueryPlan implements Serializable {
return new ParameterMetadata( ordinalParamDescriptors, namedParamDescriptorMap );
}
public QueryTranslator[] getTranslators() {
QueryTranslator[] copy = new QueryTranslator[translators.length];
System.arraycopy(translators, 0, copy, 0, copy.length);

View File

@ -31,7 +31,6 @@ import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import org.jboss.logging.Logger;
import org.hibernate.MappingException;
@ -41,9 +40,8 @@ import org.hibernate.engine.query.spi.sql.NativeSQLQuerySpecification;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.FilterImpl;
import org.hibernate.internal.util.collections.BoundedConcurrentHashMap;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.internal.util.collections.SimpleMRUCache;
import org.hibernate.internal.util.collections.SoftLimitMRUCache;
import org.hibernate.internal.util.config.ConfigurationHelper;
/**
@ -57,37 +55,64 @@ import org.hibernate.internal.util.config.ConfigurationHelper;
public class QueryPlanCache implements Serializable {
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, QueryPlanCache.class.getName());
private static final boolean isTraceEnabled = LOG.isTraceEnabled();
/**
* The default strong reference count.
*/
public static final int DEFAULT_PARAMETER_METADATA_MAX_COUNT = 128;
/**
* The default soft reference count.
*/
public static final int DEFAULT_QUERY_PLAN_MAX_COUNT = 2048;
private final SessionFactoryImplementor factory;
/**
* the cache of the actual plans...
*/
private final BoundedConcurrentHashMap queryPlanCache;
/**
* simple cache of param metadata based on query string. Ideally, the original "user-supplied query"
* string should be used to obtain this metadata (i.e., not the para-list-expanded query string) to avoid
* unnecessary cache entries.
* <p>
* <p></p>
* Used solely for caching param metadata for native-sql queries, see {@link #getSQLParameterMetadata} for a
* discussion as to why...
*/
private final SimpleMRUCache sqlParamMetadataCache;
/**
* the cache of the actual plans...
*/
private final SoftLimitMRUCache planCache;
private SessionFactoryImplementor factory;
public QueryPlanCache(SessionFactoryImplementor factory) {
int maxStrongReferenceCount = ConfigurationHelper.getInt(
Environment.QUERY_PLAN_CACHE_MAX_STRONG_REFERENCES,
factory.getProperties(),
SoftLimitMRUCache.DEFAULT_STRONG_REF_COUNT
);
int maxSoftReferenceCount = ConfigurationHelper.getInt(
Environment.QUERY_PLAN_CACHE_MAX_SOFT_REFERENCES,
factory.getProperties(),
SoftLimitMRUCache.DEFAULT_SOFT_REF_COUNT
);
private final BoundedConcurrentHashMap<String,ParameterMetadata> parameterMetadataCache;
public QueryPlanCache(final SessionFactoryImplementor factory) {
this.factory = factory;
this.sqlParamMetadataCache = new SimpleMRUCache( maxStrongReferenceCount );
this.planCache = new SoftLimitMRUCache( maxStrongReferenceCount, maxSoftReferenceCount );
Integer maxParameterMetadataCount = ConfigurationHelper.getInteger(
Environment.QUERY_PLAN_CACHE_PARAMETER_METADATA_MAX_SIZE,
factory.getProperties()
);
if ( maxParameterMetadataCount == null ) {
maxParameterMetadataCount = ConfigurationHelper.getInt(
Environment.QUERY_PLAN_CACHE_MAX_STRONG_REFERENCES,
factory.getProperties(),
DEFAULT_PARAMETER_METADATA_MAX_COUNT
);
}
Integer maxQueryPlanCount = ConfigurationHelper.getInteger(
Environment.QUERY_PLAN_CACHE_MAX_SIZE,
factory.getProperties()
);
if ( maxQueryPlanCount == null ) {
maxQueryPlanCount = ConfigurationHelper.getInt(
Environment.QUERY_PLAN_CACHE_MAX_SOFT_REFERENCES,
factory.getProperties(),
DEFAULT_QUERY_PLAN_MAX_COUNT
);
}
queryPlanCache = new BoundedConcurrentHashMap( maxQueryPlanCount, 20, BoundedConcurrentHashMap.Eviction.LIRS );
parameterMetadataCache = new BoundedConcurrentHashMap<String, ParameterMetadata>(
maxParameterMetadataCount,
20,
BoundedConcurrentHashMap.Eviction.LIRS
);
}
/**
@ -100,79 +125,22 @@ public class QueryPlanCache implements Serializable {
* @param query The query
* @return The parameter metadata
*/
public ParameterMetadata getSQLParameterMetadata(String query) {
ParameterMetadata metadata = ( ParameterMetadata ) sqlParamMetadataCache.get( query );
if ( metadata == null ) {
metadata = buildNativeSQLParameterMetadata( query );
sqlParamMetadataCache.put( query, metadata );
public ParameterMetadata getSQLParameterMetadata(final String query) {
ParameterMetadata value = parameterMetadataCache.get( query );
if ( value == null ) {
value = buildParameterMetadata( query );
parameterMetadataCache.putIfAbsent( query, value );
}
return metadata;
return value;
}
public HQLQueryPlan getHQLQueryPlan(String queryString, boolean shallow, Map enabledFilters)
throws QueryException, MappingException {
HQLQueryPlanKey key = new HQLQueryPlanKey( queryString, shallow, enabledFilters );
HQLQueryPlan plan = ( HQLQueryPlan ) planCache.get ( key );
if ( plan == null ) {
LOG.tracev( "Unable to locate HQL query plan in cache; generating ({0})", queryString );
plan = new HQLQueryPlan(queryString, shallow, enabledFilters, factory );
}
else {
LOG.tracev( "Located HQL query plan in cache ({0})", queryString );
}
planCache.put( key, plan );
return plan;
}
public FilterQueryPlan getFilterQueryPlan(String filterString, String collectionRole, boolean shallow, Map enabledFilters)
throws QueryException, MappingException {
FilterQueryPlanKey key = new FilterQueryPlanKey( filterString, collectionRole, shallow, enabledFilters );
FilterQueryPlan plan = ( FilterQueryPlan ) planCache.get ( key );
if ( plan == null ) {
LOG.tracev( "Unable to locate collection-filter query plan in cache; generating ({0} : {1} )",
collectionRole, filterString );
plan = new FilterQueryPlan( filterString, collectionRole, shallow, enabledFilters, factory );
}
else {
LOG.tracev( "Located collection-filter query plan in cache ({0} : {1})", collectionRole, filterString );
}
planCache.put( key, plan );
return plan;
}
public NativeSQLQueryPlan getNativeSQLQueryPlan(NativeSQLQuerySpecification spec) {
NativeSQLQueryPlan plan = ( NativeSQLQueryPlan ) planCache.get( spec );
if ( plan == null ) {
if ( LOG.isTraceEnabled() ) {
LOG.tracev( "Unable to locate native-sql query plan in cache; generating ({0})", spec.getQueryString() );
}
plan = new NativeSQLQueryPlan( spec, factory );
}
else {
if ( LOG.isTraceEnabled() ) {
LOG.tracev( "Located native-sql query plan in cache ({0})", spec.getQueryString() );
}
}
planCache.put( spec, plan );
return plan;
}
@SuppressWarnings({ "UnnecessaryUnboxing" })
private ParameterMetadata buildNativeSQLParameterMetadata(String sqlString) {
ParamLocationRecognizer recognizer = ParamLocationRecognizer.parseLocations( sqlString );
OrdinalParameterDescriptor[] ordinalDescriptors =
new OrdinalParameterDescriptor[ recognizer.getOrdinalParameterLocationList().size() ];
for ( int i = 0; i < recognizer.getOrdinalParameterLocationList().size(); i++ ) {
private ParameterMetadata buildParameterMetadata(String query){
ParamLocationRecognizer recognizer = ParamLocationRecognizer.parseLocations( query );
final int size = recognizer.getOrdinalParameterLocationList().size();
OrdinalParameterDescriptor[] ordinalDescriptors = new OrdinalParameterDescriptor[ size ];
for ( int i = 0; i < size; i++ ) {
final Integer position = ( Integer ) recognizer.getOrdinalParameterLocationList().get( i );
ordinalDescriptors[i] = new OrdinalParameterDescriptor( i, null, position.intValue() );
ordinalDescriptors[i] = new OrdinalParameterDescriptor( i, null, position );
}
Iterator itr = recognizer.getNamedParameterDescriptionMap().entrySet().iterator();
@ -184,13 +152,66 @@ public class QueryPlanCache implements Serializable {
( ParamLocationRecognizer.NamedParameterDescription ) entry.getValue();
namedParamDescriptorMap.put(
name ,
new NamedParameterDescriptor( name, null, description.buildPositionsArray(), description.isJpaStyle() )
new NamedParameterDescriptor( name, null, description.buildPositionsArray(), description.isJpaStyle() )
);
}
return new ParameterMetadata( ordinalDescriptors, namedParamDescriptorMap );
}
public HQLQueryPlan getHQLQueryPlan( String queryString, boolean shallow, Map enabledFilters)
throws QueryException, MappingException {
HQLQueryPlanKey key = new HQLQueryPlanKey( queryString, shallow, enabledFilters );
HQLQueryPlan value = (HQLQueryPlan) queryPlanCache.get( key );
if ( value == null ) {
if( isTraceEnabled ) LOG.tracev( "Unable to locate HQL query plan in cache; generating ({0})", queryString );
value = new HQLQueryPlan( queryString, shallow, enabledFilters, factory );
queryPlanCache.putIfAbsent( key, value );
} else {
if( isTraceEnabled ) LOG.tracev( "Located HQL query plan in cache ({0})", queryString );
}
return value;
}
public FilterQueryPlan getFilterQueryPlan(String filterString, String collectionRole, boolean shallow, Map enabledFilters)
throws QueryException, MappingException {
FilterQueryPlanKey key = new FilterQueryPlanKey( filterString, collectionRole, shallow, enabledFilters );
FilterQueryPlan value = (FilterQueryPlan) queryPlanCache.get( key );
if(value == null){
if( isTraceEnabled ) LOG.tracev( "Unable to locate collection-filter query plan in cache; generating ({0} : {1} )",
collectionRole, filterString );
value = new FilterQueryPlan( filterString, collectionRole, shallow, enabledFilters,factory );
queryPlanCache.putIfAbsent( key, value );
} else {
if( isTraceEnabled ) LOG.tracev( "Located collection-filter query plan in cache ({0} : {1})", collectionRole, filterString );
}
return value;
}
public NativeSQLQueryPlan getNativeSQLQueryPlan(final NativeSQLQuerySpecification spec) {
NativeSQLQueryPlan value = (NativeSQLQueryPlan) queryPlanCache.get( spec );
if(value == null){
if( isTraceEnabled ) LOG.tracev( "Unable to locate native-sql query plan in cache; generating ({0})", spec.getQueryString() );
value = new NativeSQLQueryPlan( spec, factory);
queryPlanCache.putIfAbsent( spec, value );
} else {
if( isTraceEnabled ) LOG.tracev( "Located native-sql query plan in cache ({0})", spec.getQueryString() );
}
return value;
}
//clean up QueryPlanCache when Sessionfactory is closed
public void cleanup() {
if ( isTraceEnabled ) {
LOG.trace( "Cleaning QueryPlan Cache" );
}
queryPlanCache.clear();
parameterMetadataCache.clear();
}
private static class HQLQueryPlanKey implements Serializable {
private final String query;
private final boolean shallow;
@ -200,8 +221,7 @@ public class QueryPlanCache implements Serializable {
public HQLQueryPlanKey(String query, boolean shallow, Map enabledFilters) {
this.query = query;
this.shallow = shallow;
if ( enabledFilters == null || enabledFilters.isEmpty() ) {
if ( CollectionHelper.isEmpty( enabledFilters ) ) {
filterKeys = Collections.emptySet();
}
else {
@ -314,13 +334,14 @@ public class QueryPlanCache implements Serializable {
this.collectionRole = collectionRole;
this.shallow = shallow;
if ( enabledFilters == null || enabledFilters.isEmpty() ) {
filterNames = Collections.emptySet();
if ( CollectionHelper.isEmpty( enabledFilters ) ) {
this.filterNames = Collections.emptySet();
}
else {
Set<String> tmp = new HashSet<String>();
tmp.addAll( enabledFilters.keySet() );
this.filterNames = Collections.unmodifiableSet( tmp );
}
int hash = query.hashCode();

View File

@ -420,8 +420,8 @@ public class QueryTranslatorImpl implements FilterTranslator {
return sql;
}
public List collectSqlStrings() {
ArrayList list = new ArrayList();
public List<String> collectSqlStrings() {
ArrayList<String> list = new ArrayList<String>();
if ( isManipulationStatement() ) {
String[] sqlStatements = statementExecutor.getSqlStatements();
for ( int i = 0; i < sqlStatements.length; i++ ) {

View File

@ -268,7 +268,7 @@ public class QueryTranslatorImpl extends BasicLoader implements FilterTranslator
return sqlString;
}
public List collectSqlStrings() {
public List<String> collectSqlStrings() {
return ArrayHelper.toList( new String[] { sqlString } );
}

View File

@ -127,7 +127,7 @@ public interface QueryTranslator {
*/
String getSQLString();
List collectSqlStrings();
List<String> collectSqlStrings();
/**
* Returns the HQL string processed by the translator.

View File

@ -42,6 +42,7 @@ import org.hibernate.engine.jdbc.LobCreationContext;
import org.hibernate.engine.jdbc.spi.JdbcConnectionAccess;
import org.hibernate.engine.query.spi.HQLQueryPlan;
import org.hibernate.engine.query.spi.NativeSQLQueryPlan;
import org.hibernate.engine.query.spi.ParameterMetadata;
import org.hibernate.engine.query.spi.sql.NativeSQLQuerySpecification;
import org.hibernate.engine.spi.EntityKey;
import org.hibernate.engine.spi.NamedQueryDefinition;
@ -148,10 +149,11 @@ public abstract class AbstractSessionImpl implements Serializable, SharedSession
if ( nsqlqd==null ) {
throw new MappingException( "Named query not known: " + queryName );
}
ParameterMetadata parameterMetadata = factory.getQueryPlanCache().getSQLParameterMetadata( nsqlqd.getQueryString() );
query = new SQLQueryImpl(
nsqlqd,
this,
factory.getQueryPlanCache().getSQLParameterMetadata( nsqlqd.getQueryString() )
parameterMetadata
);
query.setComment( "named native SQL query " + queryName );
nqd = nsqlqd;

View File

@ -1135,6 +1135,8 @@ public final class SessionFactoryImpl
catch ( MappingException e ) {
errors.put( queryName, e );
}
}
if ( LOG.isDebugEnabled() ) {
LOG.debugf( "Checking %s named SQL queries", namedSqlQueries.size() );
@ -1176,6 +1178,7 @@ public final class SessionFactoryImpl
catch ( MappingException e ) {
errors.put( queryName, e );
}
}
return errors;
@ -1265,11 +1268,15 @@ public final class SessionFactoryImpl
}
public Type[] getReturnTypes(String queryString) throws HibernateException {
return queryPlanCache.getHQLQueryPlan( queryString, false, CollectionHelper.EMPTY_MAP ).getReturnMetadata().getReturnTypes();
return queryPlanCache.getHQLQueryPlan( queryString, false, CollectionHelper.EMPTY_MAP )
.getReturnMetadata()
.getReturnTypes();
}
public String[] getReturnAliases(String queryString) throws HibernateException {
return queryPlanCache.getHQLQueryPlan( queryString, false, CollectionHelper.EMPTY_MAP ).getReturnMetadata().getReturnAliases();
return queryPlanCache.getHQLQueryPlan( queryString, false, CollectionHelper.EMPTY_MAP )
.getReturnMetadata()
.getReturnAliases();
}
public ClassMetadata getClassMetadata(Class persistentClass) throws HibernateException {
@ -1431,6 +1438,8 @@ public final class SessionFactoryImpl
settings.getRegionFactory().stop();
queryPlanCache.cleanup();
if ( settings.isAutoDropSchema() ) {
schemaExport.drop( false, true );
}

View File

@ -1,51 +0,0 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2010, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.internal.util.collections;
import java.io.Serializable;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* A simple LRU cache that implements the <code>Map</code> interface. Instances
* are not thread-safe and should be synchronized externally, for instance by
* using {@link java.util.Collections#synchronizedMap}.
*
* @author Manuel Dominguez Sarmiento
*/
public class LRUMap extends LinkedHashMap implements Serializable {
private static final long serialVersionUID = -5522608033020688048L;
private final int maxEntries;
public LRUMap(int maxEntries) {
super( maxEntries, .75f, true );
this.maxEntries = maxEntries;
}
@Override
protected boolean removeEldestEntry(Map.Entry eldest) {
return ( size() > maxEntries );
}
}

View File

@ -1,80 +0,0 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*
*/
package org.hibernate.internal.util.collections;
import java.io.IOException;
import java.io.Serializable;
/**
* Cache following a "Most Recently Used" (MRU) algorithm for maintaining a
* bounded in-memory size; the "Least Recently Used" (LRU) entry is the first
* available for removal from the cache.
* <p/>
* This implementation uses a bounded MRU Map to limit the in-memory size of
* the cache. Thus the size of this cache never grows beyond the stated size.
*
* @author Steve Ebersole
*/
public class SimpleMRUCache implements Serializable {
public static final int DEFAULT_STRONG_REF_COUNT = 128;
private final int strongReferenceCount;
private transient LRUMap cache;
public SimpleMRUCache() {
this( DEFAULT_STRONG_REF_COUNT );
}
public SimpleMRUCache(int strongReferenceCount) {
this.strongReferenceCount = strongReferenceCount;
init();
}
public synchronized Object get(Object key) {
return cache.get( key );
}
public synchronized Object put(Object key, Object value) {
return cache.put( key, value );
}
public synchronized int size() {
return cache.size();
}
private void init() {
cache = new LRUMap( strongReferenceCount );
}
private void readObject(java.io.ObjectInputStream in) throws IOException, ClassNotFoundException {
in.defaultReadObject();
init();
}
public synchronized void clear() {
cache.clear();
}
}

View File

@ -1,231 +0,0 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2010, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.internal.util.collections;
import java.io.IOException;
import java.io.Serializable;
import java.lang.ref.ReferenceQueue;
import java.lang.ref.SoftReference;
/**
* Cache following a "Most Recently Used" (MRU) algorithm for maintaining a
* bounded in-memory size; the "Least Recently Used" (LRU) entry is the first
* available for removal from the cache.
* <p/>
* This implementation uses a "soft limit" to the in-memory size of the cache,
* meaning that all cache entries are kept within a completely
* {@link java.lang.ref.SoftReference}-based map with the most recently utilized
* entries additionally kept in a hard-reference manner to prevent those cache
* entries soft references from becoming enqueued by the garbage collector. Thus
* the actual size of this cache impl can actually grow beyond the stated max
* size bound as long as GC is not actively seeking soft references for
* enqueuement.
* <p/>
* The soft-size is bounded and configurable. This allows controlling memory
* usage which can grow out of control under some circumstances, especially when
* very large heaps are in use. Although memory usage per se should not be a
* problem with soft references, which are cleared when necessary, this can
* trigger extremely slow stop-the-world GC pauses when nearing full heap usage,
* even with CMS concurrent GC (i.e. concurrent mode failure). This is most
* evident when ad-hoc HQL queries are produced by the application, leading to
* poor soft-cache hit ratios. This can also occur with heavy use of SQL IN
* clauses, which will generate multiples SQL queries (even if parameterized),
* one for each collection/array size passed to the IN clause. Many slightly
* different queries will eventually fill the heap and trigger a full GC to
* reclaim space, leading to unacceptable pauses in some cases.
* <p/>
* <strong>Note:</strong> This class is serializable, however all entries are
* discarded on serialization.
*
* @see org.hibernate.cfg.Environment#QUERY_PLAN_CACHE_MAX_STRONG_REFERENCES
* @see org.hibernate.cfg.Environment#QUERY_PLAN_CACHE_MAX_SOFT_REFERENCES
*
* @author Steve Ebersole
* @author Manuel Dominguez Sarmiento
*/
public class SoftLimitMRUCache implements Serializable {
/**
* The default strong reference count.
*/
public static final int DEFAULT_STRONG_REF_COUNT = 128;
/**
* The default soft reference count.
*/
public static final int DEFAULT_SOFT_REF_COUNT = 2048;
private final int strongRefCount;
private final int softRefCount;
private transient LRUMap strongRefCache;
private transient LRUMap softRefCache;
private transient ReferenceQueue referenceQueue;
/**
* Constructs a cache with the default settings.
*
* @see #DEFAULT_STRONG_REF_COUNT
* @see #DEFAULT_SOFT_REF_COUNT
*/
public SoftLimitMRUCache() {
this( DEFAULT_STRONG_REF_COUNT, DEFAULT_SOFT_REF_COUNT );
}
/**
* Constructs a cache with the specified settings.
*
* @param strongRefCount the strong reference count.
* @param softRefCount the soft reference count.
*
* @throws IllegalArgumentException if either of the arguments is less than one, or if the strong
* reference count is higher than the soft reference count.
*/
public SoftLimitMRUCache(int strongRefCount, int softRefCount) {
if ( strongRefCount < 1 || softRefCount < 1 ) {
throw new IllegalArgumentException( "Reference counts must be greater than zero" );
}
if ( strongRefCount > softRefCount ) {
throw new IllegalArgumentException( "Strong reference count cannot exceed soft reference count" );
}
this.strongRefCount = strongRefCount;
this.softRefCount = softRefCount;
init();
}
/**
* Gets an object from the cache.
*
* @param key the cache key.
*
* @return the stored value, or <code>null</code> if no entry exists.
*/
public synchronized Object get(Object key) {
if ( key == null ) {
throw new NullPointerException( "Key to get cannot be null" );
}
clearObsoleteReferences();
SoftReference ref = (SoftReference) softRefCache.get( key );
if ( ref != null ) {
Object refValue = ref.get();
if ( refValue != null ) {
// This ensures recently used entries are strongly-reachable
strongRefCache.put( key, refValue );
return refValue;
}
}
return null;
}
/**
* Puts a value in the cache.
*
* @param key the key.
* @param value the value.
*
* @return the previous value stored in the cache, if any.
*/
public synchronized Object put(Object key, Object value) {
if ( key == null || value == null ) {
throw new NullPointerException(
getClass().getName() + "does not support null key [" + key + "] or value [" + value + "]"
);
}
clearObsoleteReferences();
strongRefCache.put( key, value );
SoftReference ref = (SoftReference) softRefCache.put(
key,
new KeyedSoftReference( key, value, referenceQueue )
);
return ( ref != null ) ? ref.get() : null;
}
/**
* Gets the strong reference cache size.
*
* @return the strong reference cache size.
*/
public synchronized int size() {
clearObsoleteReferences();
return strongRefCache.size();
}
/**
* Gets the soft reference cache size.
*
* @return the soft reference cache size.
*/
public synchronized int softSize() {
clearObsoleteReferences();
return softRefCache.size();
}
/**
* Clears the cache.
*/
public synchronized void clear() {
strongRefCache.clear();
softRefCache.clear();
}
private void init() {
this.strongRefCache = new LRUMap( strongRefCount );
this.softRefCache = new LRUMap( softRefCount );
this.referenceQueue = new ReferenceQueue();
}
private void readObject(java.io.ObjectInputStream in) throws IOException, ClassNotFoundException {
in.defaultReadObject();
init();
}
private void clearObsoleteReferences() {
// Clear entries for soft references removed by garbage collector
KeyedSoftReference obsoleteRef;
while ( ( obsoleteRef = (KeyedSoftReference) referenceQueue.poll() ) != null ) {
Object key = obsoleteRef.getKey();
softRefCache.remove( key );
}
}
private static class KeyedSoftReference extends SoftReference {
private final Object key;
@SuppressWarnings({ "unchecked" })
private KeyedSoftReference(Object key, Object value, ReferenceQueue q) {
super( value, q );
this.key = key;
}
private Object getKey() {
return key;
}
}
}

View File

@ -26,8 +26,8 @@ import java.util.Map;
import org.hibernate.envers.entities.PropertyData;
import org.hibernate.envers.exception.AuditException;
import org.hibernate.envers.tools.ConcurrentReferenceHashMap;
import org.hibernate.envers.tools.Pair;
import org.hibernate.internal.util.collections.ConcurrentReferenceHashMap;
import org.hibernate.property.Getter;
import org.hibernate.property.PropertyAccessor;
import org.hibernate.property.PropertyAccessorFactory;

View File

@ -1,5 +1,6 @@
package org.hibernate.envers.test.integration.merge;
import org.hibernate.envers.AuditTable;
import org.hibernate.envers.Audited;
import javax.persistence.Entity;
@ -10,6 +11,7 @@ import javax.persistence.Id;
*/
@Entity
@Audited
@AuditTable("GIVENIDSTRENTITY_AUD")
public class GivenIdStrEntity {
@Id
private Integer id;