fix HashMap constructor with expected size usage error

This commit is contained in:
Nathan Xu 2020-03-01 14:33:57 -05:00 committed by Steve Ebersole
parent abe1e656f7
commit 38effc6191
33 changed files with 96 additions and 57 deletions

View File

@ -43,6 +43,7 @@ import org.hibernate.event.service.spi.EventListenerGroup;
import org.hibernate.event.service.spi.EventListenerRegistry; import org.hibernate.event.service.spi.EventListenerRegistry;
import org.hibernate.event.spi.EventType; import org.hibernate.event.spi.EventType;
import org.hibernate.id.factory.spi.MutableIdentifierGeneratorFactory; import org.hibernate.id.factory.spi.MutableIdentifierGeneratorFactory;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.mapping.Collection; import org.hibernate.mapping.Collection;
import org.hibernate.mapping.FetchProfile; import org.hibernate.mapping.FetchProfile;
import org.hibernate.mapping.MappedSuperclass; import org.hibernate.mapping.MappedSuperclass;
@ -315,10 +316,10 @@ public class MetadataImpl implements MetadataImplementor, Serializable {
@Override @Override
public NamedQueryRepository buildNamedQueryRepository(SessionFactoryImplementor sessionFactory) { public NamedQueryRepository buildNamedQueryRepository(SessionFactoryImplementor sessionFactory) {
return new NamedQueryRepositoryImpl( return new NamedQueryRepositoryImpl(
new HashMap<>( namedQueryMap.size() ), CollectionHelper.mapOfSize( namedQueryMap.size() ),
new HashMap<>( namedNativeQueryMap.size() ), CollectionHelper.mapOfSize( namedNativeQueryMap.size() ),
new HashMap<>( namedProcedureCallMap.size() ), CollectionHelper.mapOfSize( namedProcedureCallMap.size() ),
new HashMap<>( sqlResultSetMappingMap.size() ) CollectionHelper.mapOfSize( sqlResultSetMappingMap.size() )
); );
} }

View File

@ -7,10 +7,10 @@
package org.hibernate.cache.spi.entry; package org.hibernate.cache.spi.entry;
import java.io.Serializable; import java.io.Serializable;
import java.util.HashMap;
import java.util.Map; import java.util.Map;
import org.hibernate.engine.spi.SessionFactoryImplementor; import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.internal.util.collections.CollectionHelper;
/** /**
* Structured CacheEntry format for persistent Maps. * Structured CacheEntry format for persistent Maps.
@ -28,7 +28,7 @@ public class StructuredMapCacheEntry implements CacheEntryStructure {
public Object structure(Object item) { public Object structure(Object item) {
final CollectionCacheEntry entry = (CollectionCacheEntry) item; final CollectionCacheEntry entry = (CollectionCacheEntry) item;
final Serializable[] state = entry.getState(); final Serializable[] state = entry.getState();
final Map map = new HashMap( state.length ); final Map map = CollectionHelper.mapOfSize( state.length );
int i = 0; int i = 0;
while ( i < state.length ) { while ( i < state.length ) {
map.put( state[i++], state[i++] ); map.put( state[i++], state[i++] );

View File

@ -25,6 +25,7 @@ import org.hibernate.cache.spi.access.CollectionDataAccess;
import org.hibernate.cache.spi.access.EntityDataAccess; import org.hibernate.cache.spi.access.EntityDataAccess;
import org.hibernate.cache.spi.access.NaturalIdDataAccess; import org.hibernate.cache.spi.access.NaturalIdDataAccess;
import org.hibernate.engine.spi.SessionFactoryImplementor; import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.metamodel.model.domain.NavigableRole; import org.hibernate.metamodel.model.domain.NavigableRole;
import org.jboss.logging.Logger; import org.jboss.logging.Logger;
@ -127,7 +128,7 @@ public abstract class AbstractDomainDataRegion extends AbstractRegion implements
return Collections.emptyMap(); return Collections.emptyMap();
} }
final Map<NavigableRole, EntityDataAccess> accessMap = new HashMap<>( entityCaching.size() ); final Map<NavigableRole, EntityDataAccess> accessMap = CollectionHelper.mapOfSize( entityCaching.size() );
for ( EntityDataCachingConfig entityAccessConfig : entityCaching ) { for ( EntityDataCachingConfig entityAccessConfig : entityCaching ) {
accessMap.put( accessMap.put(
entityAccessConfig.getNavigableRole(), entityAccessConfig.getNavigableRole(),
@ -144,7 +145,7 @@ public abstract class AbstractDomainDataRegion extends AbstractRegion implements
return Collections.emptyMap(); return Collections.emptyMap();
} }
final Map<NavigableRole, NaturalIdDataAccess> accessMap = new HashMap<>( naturalIdCaching.size() ); final Map<NavigableRole, NaturalIdDataAccess> accessMap = CollectionHelper.mapOfSize( naturalIdCaching.size() );
for ( NaturalIdDataCachingConfig naturalIdAccessConfig : naturalIdCaching ) { for ( NaturalIdDataCachingConfig naturalIdAccessConfig : naturalIdCaching ) {
accessMap.put( accessMap.put(
naturalIdAccessConfig.getNavigableRole(), naturalIdAccessConfig.getNavigableRole(),
@ -162,7 +163,7 @@ public abstract class AbstractDomainDataRegion extends AbstractRegion implements
return Collections.emptyMap(); return Collections.emptyMap();
} }
final Map<NavigableRole, CollectionDataAccess> accessMap = new HashMap<>( collectionCaching.size() ); final Map<NavigableRole, CollectionDataAccess> accessMap = CollectionHelper.mapOfSize( collectionCaching.size() );
for ( CollectionDataCachingConfig cachingConfig : collectionCaching ) { for ( CollectionDataCachingConfig cachingConfig : collectionCaching ) {
accessMap.put( accessMap.put(
cachingConfig.getNavigableRole(), cachingConfig.getNavigableRole(),

View File

@ -18,6 +18,7 @@ import org.hibernate.annotations.common.reflection.XProperty;
import org.hibernate.boot.spi.MetadataBuildingContext; import org.hibernate.boot.spi.MetadataBuildingContext;
import org.hibernate.cfg.annotations.EntityBinder; import org.hibernate.cfg.annotations.EntityBinder;
import org.hibernate.internal.util.StringHelper; import org.hibernate.internal.util.StringHelper;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.mapping.Component; import org.hibernate.mapping.Component;
import org.hibernate.mapping.Join; import org.hibernate.mapping.Join;
import org.hibernate.mapping.KeyValue; import org.hibernate.mapping.KeyValue;
@ -263,7 +264,7 @@ public class ClassPropertyHolder extends AbstractPropertyHolder {
*/ */
private Map<String, Join> getJoinsPerRealTableName() { private Map<String, Join> getJoinsPerRealTableName() {
if ( joinsPerRealTableName == null ) { if ( joinsPerRealTableName == null ) {
joinsPerRealTableName = new HashMap<>( joins.size() ); joinsPerRealTableName = CollectionHelper.mapOfSize( joins.size() );
for (Join join : joins.values()) { for (Join join : joins.values()) {
joinsPerRealTableName.put( join.getTable().getName(), join ); joinsPerRealTableName.put( join.getTable().getName(), join );
} }

View File

@ -16,6 +16,7 @@ import org.hibernate.AnnotationException;
import org.hibernate.AssertionFailure; import org.hibernate.AssertionFailure;
import org.hibernate.MappingException; import org.hibernate.MappingException;
import org.hibernate.boot.spi.MetadataBuildingContext; import org.hibernate.boot.spi.MetadataBuildingContext;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.mapping.BasicValue; import org.hibernate.mapping.BasicValue;
import org.hibernate.mapping.Column; import org.hibernate.mapping.Column;
import org.hibernate.mapping.Component; import org.hibernate.mapping.Component;
@ -67,7 +68,7 @@ public class CopyIdentifierComponentSecondPass implements SecondPass {
//prepare column name structure //prepare column name structure
boolean isExplicitReference = true; boolean isExplicitReference = true;
Map<String, Ejb3JoinColumn> columnByReferencedName = new HashMap<>(joinColumns.length); Map<String, Ejb3JoinColumn> columnByReferencedName = CollectionHelper.mapOfSize( joinColumns.length);
for (Ejb3JoinColumn joinColumn : joinColumns) { for (Ejb3JoinColumn joinColumn : joinColumns) {
final String referencedColumnName = joinColumn.getReferencedColumn(); final String referencedColumnName = joinColumn.getReferencedColumn();
if ( referencedColumnName == null || BinderHelper.isEmptyAnnotationValue( referencedColumnName ) ) { if ( referencedColumnName == null || BinderHelper.isEmptyAnnotationValue( referencedColumnName ) ) {

View File

@ -30,6 +30,7 @@ import org.hibernate.boot.spi.BootstrapContext;
import org.hibernate.boot.spi.ClassLoaderAccess; import org.hibernate.boot.spi.ClassLoaderAccess;
import org.hibernate.boot.spi.ClassLoaderAccessDelegateImpl; import org.hibernate.boot.spi.ClassLoaderAccessDelegateImpl;
import org.hibernate.boot.spi.MetadataBuildingOptions; import org.hibernate.boot.spi.MetadataBuildingOptions;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.dom4j.Element; import org.dom4j.Element;
@ -53,7 +54,7 @@ public class JPAMetadataProvider implements MetadataProvider {
private final boolean xmlMappingEnabled; private final boolean xmlMappingEnabled;
private Map<Object, Object> defaults; private Map<Object, Object> defaults;
private Map<AnnotatedElement, AnnotationReader> cache = new HashMap<>(100); private Map<AnnotatedElement, AnnotationReader> cache = CollectionHelper.mapOfSize( 100 );
/** /**
* @deprecated Use {@link JPAMetadataProvider#JPAMetadataProvider(BootstrapContext)} instead. * @deprecated Use {@link JPAMetadataProvider#JPAMetadataProvider(BootstrapContext)} instead.

View File

@ -129,6 +129,7 @@ import org.hibernate.boot.spi.ClassLoaderAccess;
import org.hibernate.internal.CoreLogging; import org.hibernate.internal.CoreLogging;
import org.hibernate.internal.CoreMessageLogger; import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.StringHelper; import org.hibernate.internal.util.StringHelper;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.dom4j.Attribute; import org.dom4j.Attribute;
import org.dom4j.Element; import org.dom4j.Element;
@ -348,7 +349,7 @@ public class JPAOverriddenAnnotationReader implements AnnotationReader {
Element tree = xmlContext.getXMLTree( className ); Element tree = xmlContext.getXMLTree( className );
Annotation[] annotations = getPhysicalAnnotations(); Annotation[] annotations = getPhysicalAnnotations();
List<Annotation> annotationList = new ArrayList<>( annotations.length + 5 ); List<Annotation> annotationList = new ArrayList<>( annotations.length + 5 );
annotationsMap = new HashMap<>( annotations.length + 5 ); annotationsMap = CollectionHelper.mapOfSize( annotations.length + 5 );
for ( Annotation annotation : annotations ) { for ( Annotation annotation : annotations ) {
if ( !annotationToXml.containsKey( annotation.annotationType() ) ) { if ( !annotationToXml.containsKey( annotation.annotationType() ) ) {
//unknown annotations are left over //unknown annotations are left over
@ -391,7 +392,7 @@ public class JPAOverriddenAnnotationReader implements AnnotationReader {
Element tree = xmlContext.getXMLTree( className ); Element tree = xmlContext.getXMLTree( className );
Annotation[] annotations = getPhysicalAnnotations(); Annotation[] annotations = getPhysicalAnnotations();
List<Annotation> annotationList = new ArrayList<>( annotations.length + 5 ); List<Annotation> annotationList = new ArrayList<>( annotations.length + 5 );
annotationsMap = new HashMap<>( annotations.length + 5 ); annotationsMap = CollectionHelper.mapOfSize( annotations.length + 5 );
for ( Annotation annotation : annotations ) { for ( Annotation annotation : annotations ) {
if ( !annotationToXml.containsKey( annotation.annotationType() ) ) { if ( !annotationToXml.containsKey( annotation.annotationType() ) ) {
//unknown annotations are left over //unknown annotations are left over
@ -433,7 +434,7 @@ public class JPAOverriddenAnnotationReader implements AnnotationReader {
} }
else { else {
this.annotations = getPhysicalAnnotations(); this.annotations = getPhysicalAnnotations();
annotationsMap = new HashMap<>( annotations.length + 5 ); annotationsMap = CollectionHelper.mapOfSize( annotations.length + 5 );
for ( Annotation ann : this.annotations ) { for ( Annotation ann : this.annotations ) {
annotationsMap.put( ann.annotationType(), ann ); annotationsMap.put( ann.annotationType(), ann );
} }

View File

@ -28,6 +28,7 @@ import org.hibernate.event.spi.PreInsertEventListener;
import org.hibernate.event.spi.PreUpdateEvent; import org.hibernate.event.spi.PreUpdateEvent;
import org.hibernate.event.spi.PreUpdateEventListener; import org.hibernate.event.spi.PreUpdateEventListener;
import org.hibernate.internal.CoreMessageLogger; import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.persister.entity.EntityPersister; import org.hibernate.persister.entity.EntityPersister;
import org.jboss.logging.Logger; import org.jboss.logging.Logger;
@ -114,7 +115,7 @@ public class BeanValidationEventListener
if ( groups.length > 0 ) { if ( groups.length > 0 ) {
final Set<ConstraintViolation<T>> constraintViolations = validator.validate( object, groups ); final Set<ConstraintViolation<T>> constraintViolations = validator.validate( object, groups );
if ( constraintViolations.size() > 0 ) { if ( constraintViolations.size() > 0 ) {
Set<ConstraintViolation<?>> propagatedViolations = new HashSet<>( constraintViolations.size() ); Set<ConstraintViolation<?>> propagatedViolations = CollectionHelper.setOfSize( constraintViolations.size() );
Set<String> classNames = new HashSet<>(); Set<String> classNames = new HashSet<>();
for ( ConstraintViolation<?> violation : constraintViolations ) { for ( ConstraintViolation<?> violation : constraintViolations ) {
LOG.trace( violation ); LOG.trace( violation );

View File

@ -15,6 +15,7 @@ import javax.validation.groups.Default;
import org.hibernate.HibernateException; import org.hibernate.HibernateException;
import org.hibernate.boot.registry.classloading.spi.ClassLoadingException; import org.hibernate.boot.registry.classloading.spi.ClassLoadingException;
import org.hibernate.boot.spi.ClassLoaderAccess; import org.hibernate.boot.spi.ClassLoaderAccess;
import org.hibernate.internal.util.collections.CollectionHelper;
/** /**
* @author Emmanuel Bernard * @author Emmanuel Bernard
@ -26,7 +27,7 @@ public class GroupsPerOperation {
private static final Class<?>[] DEFAULT_GROUPS = new Class<?>[] { Default.class }; private static final Class<?>[] DEFAULT_GROUPS = new Class<?>[] { Default.class };
private static final Class<?>[] EMPTY_GROUPS = new Class<?>[] { }; private static final Class<?>[] EMPTY_GROUPS = new Class<?>[] { };
private Map<Operation, Class<?>[]> groupsPerOperation = new HashMap<>(4); private Map<Operation, Class<?>[]> groupsPerOperation = CollectionHelper.mapOfSize( 4 );
private GroupsPerOperation() { private GroupsPerOperation() {
} }

View File

@ -11,6 +11,7 @@ import java.util.Locale;
import java.util.Set; import java.util.Set;
import org.hibernate.HibernateException; import org.hibernate.HibernateException;
import org.hibernate.internal.util.collections.CollectionHelper;
/** /**
* Duplicates the javax.validation enum (because javax validation might not be on the runtime classpath) * Duplicates the javax.validation enum (because javax validation might not be on the runtime classpath)
@ -30,7 +31,7 @@ public enum ValidationMode {
} }
public static Set<ValidationMode> getModes(Object modeProperty) { public static Set<ValidationMode> getModes(Object modeProperty) {
Set<ValidationMode> modes = new HashSet<>(3); Set<ValidationMode> modes = CollectionHelper.setOfSize( 3);
if (modeProperty == null) { if (modeProperty == null) {
modes.add( ValidationMode.AUTO ); modes.add( ValidationMode.AUTO );
} }

View File

@ -18,6 +18,7 @@ import java.util.Map;
import org.hibernate.HibernateException; import org.hibernate.HibernateException;
import org.hibernate.engine.spi.SessionImplementor; import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor; import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.metamodel.mapping.PluralAttributeMapping; import org.hibernate.metamodel.mapping.PluralAttributeMapping;
import org.hibernate.persister.collection.CollectionPersister; import org.hibernate.persister.collection.CollectionPersister;
import org.hibernate.type.Type; import org.hibernate.type.Type;
@ -353,7 +354,7 @@ public class PersistentIdentifierBag extends AbstractPersistentCollection implem
@Override @Override
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public Serializable getSnapshot(CollectionPersister persister) throws HibernateException { public Serializable getSnapshot(CollectionPersister persister) throws HibernateException {
final HashMap map = new HashMap( values.size() ); final HashMap map = CollectionHelper.mapOfSize( values.size() );
final Iterator iter = values.iterator(); final Iterator iter = values.iterator();
int i=0; int i=0;
while ( iter.hasNext() ) { while ( iter.hasNext() ) {

View File

@ -18,6 +18,7 @@ import java.util.Set;
import org.hibernate.HibernateException; import org.hibernate.HibernateException;
import org.hibernate.engine.spi.SessionImplementor; import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor; import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.metamodel.mapping.PluralAttributeMapping; import org.hibernate.metamodel.mapping.PluralAttributeMapping;
import org.hibernate.persister.collection.CollectionPersister; import org.hibernate.persister.collection.CollectionPersister;
import org.hibernate.type.Type; import org.hibernate.type.Type;
@ -94,7 +95,7 @@ public class PersistentMap extends AbstractPersistentCollection implements Map {
@Override @Override
@SuppressWarnings( {"unchecked"}) @SuppressWarnings( {"unchecked"})
public Serializable getSnapshot(CollectionPersister persister) throws HibernateException { public Serializable getSnapshot(CollectionPersister persister) throws HibernateException {
final HashMap clonedMap = new HashMap( map.size() ); final HashMap clonedMap = CollectionHelper.mapOfSize( map.size() );
for ( Object o : map.entrySet() ) { for ( Object o : map.entrySet() ) {
final Entry e = (Entry) o; final Entry e = (Entry) o;
final Object copy = persister.getElementType().deepCopy( e.getValue(), persister.getFactory() ); final Object copy = persister.getElementType().deepCopy( e.getValue(), persister.getFactory() );

View File

@ -17,6 +17,7 @@ import java.util.Set;
import org.hibernate.HibernateException; import org.hibernate.HibernateException;
import org.hibernate.engine.spi.SessionImplementor; import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor; import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.metamodel.mapping.PluralAttributeMapping; import org.hibernate.metamodel.mapping.PluralAttributeMapping;
import org.hibernate.persister.collection.CollectionPersister; import org.hibernate.persister.collection.CollectionPersister;
import org.hibernate.type.Type; import org.hibernate.type.Type;
@ -97,7 +98,7 @@ public class PersistentSet extends AbstractPersistentCollection implements java.
@Override @Override
@SuppressWarnings( {"unchecked"}) @SuppressWarnings( {"unchecked"})
public Serializable getSnapshot(CollectionPersister persister) throws HibernateException { public Serializable getSnapshot(CollectionPersister persister) throws HibernateException {
final HashMap clonedSet = new HashMap( set.size() ); final HashMap clonedSet = CollectionHelper.mapOfSize( set.size() );
for ( Object aSet : set ) { for ( Object aSet : set ) {
final Object copied = persister.getElementType().deepCopy( aSet, persister.getFactory() ); final Object copied = persister.getElementType().deepCopy( aSet, persister.getFactory() );
clonedSet.put( copied, copied ); clonedSet.put( copied, copied );

View File

@ -10,6 +10,7 @@ import java.util.HashSet;
import java.util.Set; import java.util.Set;
import org.hibernate.engine.spi.SharedSessionContractImplementor; import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.metamodel.CollectionClassification; import org.hibernate.metamodel.CollectionClassification;
import org.hibernate.persister.collection.CollectionPersister; import org.hibernate.persister.collection.CollectionPersister;
@ -34,7 +35,7 @@ public class StandardSetSemantics extends AbstractSetSemantics<Set<?>> {
public Set<?> instantiateRaw( public Set<?> instantiateRaw(
int anticipatedSize, int anticipatedSize,
CollectionPersister collectionDescriptor) { CollectionPersister collectionDescriptor) {
return anticipatedSize < 1 ? new HashSet<>() : new HashSet<>( anticipatedSize ); return anticipatedSize < 1 ? new HashSet<>() : CollectionHelper.setOfSize( anticipatedSize );
} }
@Override @Override

View File

@ -58,6 +58,7 @@ import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.engine.spi.Status; import org.hibernate.engine.spi.Status;
import org.hibernate.event.spi.EventSource; import org.hibernate.event.spi.EventSource;
import org.hibernate.internal.CoreMessageLogger; import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.internal.util.collections.ConcurrentReferenceHashMap; import org.hibernate.internal.util.collections.ConcurrentReferenceHashMap;
import org.hibernate.internal.util.collections.IdentityMap; import org.hibernate.internal.util.collections.IdentityMap;
import org.hibernate.metamodel.spi.MetamodelImplementor; import org.hibernate.metamodel.spi.MetamodelImplementor;
@ -204,7 +205,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
@Override @Override
public void addUnownedCollection(CollectionKey key, PersistentCollection collection) { public void addUnownedCollection(CollectionKey key, PersistentCollection collection) {
if ( unownedCollections == null ) { if ( unownedCollections == null ) {
unownedCollections = new HashMap<>( INIT_COLL_SIZE ); unownedCollections = CollectionHelper.mapOfSize( INIT_COLL_SIZE );
} }
unownedCollections.put( key, collection ); unownedCollections.put( key, collection );
} }
@ -319,7 +320,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
else { else {
final Object[] snapshot = persister.getDatabaseSnapshot( id, session ); final Object[] snapshot = persister.getDatabaseSnapshot( id, session );
if ( entitySnapshotsByKey == null ) { if ( entitySnapshotsByKey == null ) {
entitySnapshotsByKey = new HashMap<>( INIT_COLL_SIZE ); entitySnapshotsByKey = CollectionHelper.mapOfSize( INIT_COLL_SIZE );
} }
entitySnapshotsByKey.put( key, snapshot == null ? NO_ROW : snapshot ); entitySnapshotsByKey.put( key, snapshot == null ? NO_ROW : snapshot );
return snapshot; return snapshot;
@ -392,7 +393,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
@Override @Override
public void addEntity(EntityKey key, Object entity) { public void addEntity(EntityKey key, Object entity) {
if ( entitiesByKey == null ) { if ( entitiesByKey == null ) {
entitiesByKey = new HashMap<>( INIT_COLL_SIZE ); entitiesByKey = CollectionHelper.mapOfSize( INIT_COLL_SIZE );
} }
entitiesByKey.put( key, entity ); entitiesByKey.put( key, entity );
final BatchFetchQueue fetchQueue = this.batchFetchQueue; final BatchFetchQueue fetchQueue = this.batchFetchQueue;
@ -453,7 +454,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
@Override @Override
public void addEntity(EntityUniqueKey euk, Object entity) { public void addEntity(EntityUniqueKey euk, Object entity) {
if ( entitiesByUniqueKey == null ) { if ( entitiesByUniqueKey == null ) {
entitiesByUniqueKey = new HashMap<>( INIT_COLL_SIZE ); entitiesByUniqueKey = CollectionHelper.mapOfSize( INIT_COLL_SIZE );
} }
entitiesByUniqueKey.put( euk, entity ); entitiesByUniqueKey.put( euk, entity );
} }
@ -775,7 +776,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
@Override @Override
public void addEnhancedProxy(EntityKey key, PersistentAttributeInterceptable entity) { public void addEnhancedProxy(EntityKey key, PersistentAttributeInterceptable entity) {
if ( entitiesByKey == null ) { if ( entitiesByKey == null ) {
entitiesByKey = new HashMap<>( INIT_COLL_SIZE ); entitiesByKey = CollectionHelper.mapOfSize( INIT_COLL_SIZE );
} }
entitiesByKey.put( key, entity ); entitiesByKey.put( key, entity );
} }
@ -1449,7 +1450,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
@Override @Override
public void addNullProperty(EntityKey ownerKey, String propertyName) { public void addNullProperty(EntityKey ownerKey, String propertyName) {
if ( nullAssociations == null ) { if ( nullAssociations == null ) {
nullAssociations = new HashSet<>( INIT_COLL_SIZE ); nullAssociations = CollectionHelper.setOfSize( INIT_COLL_SIZE );
} }
nullAssociations.add( new AssociationKey( ownerKey, propertyName ) ); nullAssociations.add( new AssociationKey( ownerKey, propertyName ) );
} }
@ -1711,7 +1712,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
if ( LOG.isTraceEnabled() ) { if ( LOG.isTraceEnabled() ) {
LOG.trace( "Starting deserialization of [" + count + "] entitiesByKey entries" ); LOG.trace( "Starting deserialization of [" + count + "] entitiesByKey entries" );
} }
rtn.entitiesByKey = new HashMap<>( count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count ); rtn.entitiesByKey = CollectionHelper.mapOfSize( count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count );
for ( int i = 0; i < count; i++ ) { for ( int i = 0; i < count; i++ ) {
rtn.entitiesByKey.put( EntityKey.deserialize( ois, sfi ), ois.readObject() ); rtn.entitiesByKey.put( EntityKey.deserialize( ois, sfi ), ois.readObject() );
} }
@ -1721,7 +1722,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
LOG.trace( "Starting deserialization of [" + count + "] entitiesByUniqueKey entries" ); LOG.trace( "Starting deserialization of [" + count + "] entitiesByUniqueKey entries" );
} }
if ( count != 0 ) { if ( count != 0 ) {
rtn.entitiesByUniqueKey = new HashMap<>( count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count ); rtn.entitiesByUniqueKey = CollectionHelper.mapOfSize( count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count );
for ( int i = 0; i < count; i++ ) { for ( int i = 0; i < count; i++ ) {
rtn.entitiesByUniqueKey.put( EntityUniqueKey.deserialize( ois, session ), ois.readObject() ); rtn.entitiesByUniqueKey.put( EntityUniqueKey.deserialize( ois, session ), ois.readObject() );
} }
@ -1750,7 +1751,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
if ( LOG.isTraceEnabled() ) { if ( LOG.isTraceEnabled() ) {
LOG.trace( "Starting deserialization of [" + count + "] entitySnapshotsByKey entries" ); LOG.trace( "Starting deserialization of [" + count + "] entitySnapshotsByKey entries" );
} }
rtn.entitySnapshotsByKey = new HashMap<>( count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count ); rtn.entitySnapshotsByKey = CollectionHelper.mapOfSize( count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count );
for ( int i = 0; i < count; i++ ) { for ( int i = 0; i < count; i++ ) {
rtn.entitySnapshotsByKey.put( EntityKey.deserialize( ois, sfi ), ois.readObject() ); rtn.entitySnapshotsByKey.put( EntityKey.deserialize( ois, sfi ), ois.readObject() );
} }
@ -1761,7 +1762,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
if ( LOG.isTraceEnabled() ) { if ( LOG.isTraceEnabled() ) {
LOG.trace( "Starting deserialization of [" + count + "] collectionsByKey entries" ); LOG.trace( "Starting deserialization of [" + count + "] collectionsByKey entries" );
} }
rtn.collectionsByKey = new HashMap<>( count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count ); rtn.collectionsByKey = CollectionHelper.mapOfSize( count < INIT_COLL_SIZE ? INIT_COLL_SIZE : count );
for ( int i = 0; i < count; i++ ) { for ( int i = 0; i < count; i++ ) {
rtn.collectionsByKey.put( CollectionKey.deserialize( ois, session ), (PersistentCollection) ois.readObject() ); rtn.collectionsByKey.put( CollectionKey.deserialize( ois, session ), (PersistentCollection) ois.readObject() );
} }
@ -1907,7 +1908,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
@Override @Override
public PersistentCollection addCollectionByKey(CollectionKey collectionKey, PersistentCollection persistentCollection) { public PersistentCollection addCollectionByKey(CollectionKey collectionKey, PersistentCollection persistentCollection) {
if ( collectionsByKey == null ) { if ( collectionsByKey == null ) {
collectionsByKey = new HashMap<>( INIT_COLL_SIZE ); collectionsByKey = CollectionHelper.mapOfSize( INIT_COLL_SIZE );
} }
final PersistentCollection old = collectionsByKey.put( collectionKey, persistentCollection ); final PersistentCollection old = collectionsByKey.put( collectionKey, persistentCollection );
return old; return old;

View File

@ -29,6 +29,7 @@ import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.log.DeprecationLogger; import org.hibernate.internal.log.DeprecationLogger;
import org.hibernate.internal.util.StringHelper; import org.hibernate.internal.util.StringHelper;
import org.hibernate.internal.util.beans.BeanInfoHelper; import org.hibernate.internal.util.beans.BeanInfoHelper;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.service.spi.ServiceRegistryImplementor; import org.hibernate.service.spi.ServiceRegistryImplementor;
/** /**
@ -81,7 +82,7 @@ public class ConnectionProviderInitiator implements StandardServiceInitiator<Con
private static final Map<String,String> LEGACY_CONNECTION_PROVIDER_MAPPING; private static final Map<String,String> LEGACY_CONNECTION_PROVIDER_MAPPING;
static { static {
LEGACY_CONNECTION_PROVIDER_MAPPING = new HashMap<>( 5 ); LEGACY_CONNECTION_PROVIDER_MAPPING = CollectionHelper.mapOfSize( 5 );
LEGACY_CONNECTION_PROVIDER_MAPPING.put( LEGACY_CONNECTION_PROVIDER_MAPPING.put(
"org.hibernate.connection.DatasourceConnectionProvider", "org.hibernate.connection.DatasourceConnectionProvider",

View File

@ -17,6 +17,7 @@ import org.hibernate.cache.spi.access.EntityDataAccess;
import org.hibernate.collection.spi.PersistentCollection; import org.hibernate.collection.spi.PersistentCollection;
import org.hibernate.engine.internal.CacheHelper; import org.hibernate.engine.internal.CacheHelper;
import org.hibernate.internal.CoreLogging; import org.hibernate.internal.CoreLogging;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.metamodel.mapping.EntityMappingType; import org.hibernate.metamodel.mapping.EntityMappingType;
import org.hibernate.persister.collection.CollectionPersister; import org.hibernate.persister.collection.CollectionPersister;
import org.hibernate.persister.entity.EntityPersister; import org.hibernate.persister.entity.EntityPersister;
@ -103,7 +104,7 @@ public class BatchFetchQueue {
*/ */
public void addSubselect(EntityKey key, SubselectFetch subquery) { public void addSubselect(EntityKey key, SubselectFetch subquery) {
if ( subselectsByEntityKey == null ) { if ( subselectsByEntityKey == null ) {
subselectsByEntityKey = new HashMap<>( 12 ); subselectsByEntityKey = CollectionHelper.mapOfSize( 12 );
} }
subselectsByEntityKey.put( key, subquery ); subselectsByEntityKey.put( key, subquery );
} }
@ -135,7 +136,7 @@ public class BatchFetchQueue {
public void addBatchLoadableEntityKey(EntityKey key) { public void addBatchLoadableEntityKey(EntityKey key) {
if ( key.isBatchLoadable() ) { if ( key.isBatchLoadable() ) {
if ( batchLoadableEntityKeys == null ) { if ( batchLoadableEntityKeys == null ) {
batchLoadableEntityKeys = new HashMap<>( 12 ); batchLoadableEntityKeys = CollectionHelper.mapOfSize( 12 );
} }
final LinkedHashSet<EntityKey> keysForEntity = batchLoadableEntityKeys.computeIfAbsent( final LinkedHashSet<EntityKey> keysForEntity = batchLoadableEntityKeys.computeIfAbsent(
key.getEntityName(), key.getEntityName(),
@ -254,7 +255,7 @@ public class BatchFetchQueue {
final CollectionPersister persister = ce.getLoadedPersister(); final CollectionPersister persister = ce.getLoadedPersister();
if ( batchLoadableCollections == null ) { if ( batchLoadableCollections == null ) {
batchLoadableCollections = new HashMap<>( 12 ); batchLoadableCollections = CollectionHelper.mapOfSize( 12 );
} }
final LinkedHashMap<CollectionEntry, PersistentCollection> map = batchLoadableCollections.computeIfAbsent( final LinkedHashMap<CollectionEntry, PersistentCollection> map = batchLoadableCollections.computeIfAbsent(

View File

@ -334,7 +334,7 @@ public class ExecutableList<E extends Executable & Comparable & Serializable> im
this.querySpaces = null; this.querySpaces = null;
} }
else { else {
querySpaces = new HashSet<>( CollectionHelper.determineProperSizing( numberOfQuerySpaces ) ); querySpaces = CollectionHelper.setOfSize( numberOfQuerySpaces );
for ( int i = 0; i < numberOfQuerySpaces; i++ ) { for ( int i = 0; i < numberOfQuerySpaces; i++ ) {
querySpaces.add( in.readUTF() ); querySpaces.add( in.readUTF() );
} }

View File

@ -11,6 +11,7 @@ import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.hibernate.graph.AttributeNode; import org.hibernate.graph.AttributeNode;
import org.hibernate.graph.CannotBecomeEntityGraphException; import org.hibernate.graph.CannotBecomeEntityGraphException;
@ -48,7 +49,7 @@ public abstract class AbstractGraph<J> extends AbstractGraphNode<J> implements G
protected AbstractGraph(boolean mutable, GraphImplementor<J> original) { protected AbstractGraph(boolean mutable, GraphImplementor<J> original) {
this( original.getGraphedType(), mutable, original.jpaMetamodel() ); this( original.getGraphedType(), mutable, original.jpaMetamodel() );
this.attrNodeMap = CollectionHelper.concurrentMap( original.getAttributeNodeList().size() ); this.attrNodeMap = new ConcurrentHashMap<>( original.getAttributeNodeList().size() );
original.visitAttributeNodes( original.visitAttributeNodes(
node -> attrNodeMap.put( node -> attrNodeMap.put(
node.getAttributeDescriptor(), node.getAttributeDescriptor(),

View File

@ -1605,7 +1605,7 @@ public class BoundedConcurrentHashMap<K, V> extends AbstractMap<K, V>
evictedCopy = singletonMap( evictedEntry.key, evictedEntry.value ); evictedCopy = singletonMap( evictedEntry.key, evictedEntry.value );
} }
else { else {
evictedCopy = new HashMap<>( evicted.size() ); evictedCopy = CollectionHelper.mapOfSize( evicted.size() );
for ( HashEntry<K, V> he : evicted ) { for ( HashEntry<K, V> he : evicted ) {
evictedCopy.put( he.key, he.value ); evictedCopy.put( he.key, he.value );
} }

View File

@ -56,10 +56,23 @@ public final class CollectionHelper {
* *
* @return The sized map. * @return The sized map.
*/ */
public static <K, V> Map<K, V> mapOfSize(int size) { public static <K, V> HashMap<K, V> mapOfSize(int size) {
return new HashMap<>( determineProperSizing( size ), LOAD_FACTOR ); return new HashMap<>( determineProperSizing( size ), LOAD_FACTOR );
} }
/**
* Build a properly sized set, especially handling load size and load factor to prevent immediate resizing.
* <p/>
* Especially helpful for copy set contents.
*
* @param size The size to make the set.
*
* @return The sized set.
*/
public static <K> HashSet<K> setOfSize(int size) {
return new HashSet<>( determineProperSizing( size ), LOAD_FACTOR );
}
/** /**
* Given a map, determine the proper initial size for a new Map to hold the same number of values. * Given a map, determine the proper initial size for a new Map to hold the same number of values.
* Specifically we want to account for load size and load factor to prevent immediate resizing. * Specifically we want to account for load size and load factor to prevent immediate resizing.
@ -174,8 +187,7 @@ public final class CollectionHelper {
* @return The created map. * @return The created map.
*/ */
public static <K, V> ConcurrentHashMap<K, V> concurrentMap(int expectedNumberOfElements, float loadFactor) { public static <K, V> ConcurrentHashMap<K, V> concurrentMap(int expectedNumberOfElements, float loadFactor) {
final int size = expectedNumberOfElements + 1 + (int) ( expectedNumberOfElements * loadFactor ); return new ConcurrentHashMap<>( expectedNumberOfElements, loadFactor );
return new ConcurrentHashMap<>( size, loadFactor );
} }
public static <T> ArrayList<T> arrayList(int expectedNumberOfElements) { public static <T> ArrayList<T> arrayList(int expectedNumberOfElements) {
@ -188,7 +200,7 @@ public final class CollectionHelper {
} }
final int size = source.size(); final int size = source.size();
final Set<T> copy = new HashSet<>( size + 1 ); final Set<T> copy = CollectionHelper.setOfSize( size + 1 );
copy.addAll( source ); copy.addAll( source );
return copy; return copy;
} }

View File

@ -144,7 +144,7 @@ public final class IdentityMap<K,V> implements Map<K,V> {
@Override @Override
public Set<Entry<K,V>> entrySet() { public Set<Entry<K,V>> entrySet() {
Set<Entry<K,V>> set = new HashSet<>( map.size() ); Set<Entry<K,V>> set = CollectionHelper.setOfSize( map.size() );
for ( Entry<IdentityKey<K>, V> entry : map.entrySet() ) { for ( Entry<IdentityKey<K>, V> entry : map.entrySet() ) {
set.add( new IdentityMapEntry<>( entry.getKey().key, entry.getValue() ) ); set.add( new IdentityMapEntry<>( entry.getKey().key, entry.getValue() ) );
} }

View File

@ -8,6 +8,7 @@ package org.hibernate.metamodel.internal;
import java.util.Iterator; import java.util.Iterator;
import java.util.Map; import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.hibernate.internal.util.collections.CollectionHelper; import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.mapping.Component; import org.hibernate.mapping.Component;
@ -37,7 +38,7 @@ public abstract class AbstractEmbeddableRepresentationStrategy implements Embedd
this.embeddableJavaTypeDescriptor = embeddableJavaTypeDescriptor; this.embeddableJavaTypeDescriptor = embeddableJavaTypeDescriptor;
this.propertyAccesses = new PropertyAccess[ propertySpan ]; this.propertyAccesses = new PropertyAccess[ propertySpan ];
this.attributeNameToPositionMap = CollectionHelper.concurrentMap( propertySpan ); this.attributeNameToPositionMap = new ConcurrentHashMap<>( propertySpan );
boolean foundCustomAccessor = false; boolean foundCustomAccessor = false;
Iterator itr = bootDescriptor.getPropertyIterator(); Iterator itr = bootDescriptor.getPropertyIterator();

View File

@ -209,7 +209,7 @@ public class JpaMetamodelImpl implements JpaMetamodel {
final int setSize = CollectionHelper.determineProperSizing( final int setSize = CollectionHelper.determineProperSizing(
jpaEntityTypeMap.size() + jpaMappedSuperclassTypeMap.size() + jpaEmbeddableDescriptorMap.size() jpaEntityTypeMap.size() + jpaMappedSuperclassTypeMap.size() + jpaEmbeddableDescriptorMap.size()
); );
final Set<ManagedType<?>> managedTypes = new HashSet<>( setSize ); final Set<ManagedType<?>> managedTypes = CollectionHelper.setOfSize( setSize );
managedTypes.addAll( jpaEntityTypeMap.values() ); managedTypes.addAll( jpaEntityTypeMap.values() );
managedTypes.addAll( jpaMappedSuperclassTypeMap.values() ); managedTypes.addAll( jpaMappedSuperclassTypeMap.values() );
managedTypes.addAll( jpaEmbeddableDescriptorMap.values() ); managedTypes.addAll( jpaEmbeddableDescriptorMap.values() );

View File

@ -37,6 +37,7 @@ import org.hibernate.internal.FilterAliasGenerator;
import org.hibernate.internal.util.MarkerObject; import org.hibernate.internal.util.MarkerObject;
import org.hibernate.internal.util.StringHelper; import org.hibernate.internal.util.StringHelper;
import org.hibernate.internal.util.collections.ArrayHelper; import org.hibernate.internal.util.collections.ArrayHelper;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.mapping.Column; import org.hibernate.mapping.Column;
import org.hibernate.mapping.Formula; import org.hibernate.mapping.Formula;
import org.hibernate.mapping.Join; import org.hibernate.mapping.Join;
@ -528,7 +529,7 @@ public class JoinedSubclassEntityPersister extends AbstractEntityPersister {
subclassesByDiscriminatorValue.put( discriminatorValue, getEntityName() ); subclassesByDiscriminatorValue.put( discriminatorValue, getEntityName() );
discriminatorValuesByTableName = new LinkedHashMap<>( subclassSpan + 1 ); discriminatorValuesByTableName = new LinkedHashMap<>( subclassSpan + 1 );
subclassNameByTableName = new HashMap<>( subclassSpan + 1); subclassNameByTableName = CollectionHelper.mapOfSize( subclassSpan + 1);
// We need to convert the `discriminatorSQLString` (which is a String read from boot-mapping) into // We need to convert the `discriminatorSQLString` (which is a String read from boot-mapping) into
// the type indicated by `#discriminatorType` (String -> Integer, e.g.). // the type indicated by `#discriminatorType` (String -> Integer, e.g.).
try { try {

View File

@ -8,6 +8,7 @@ package org.hibernate.query.internal;
import java.util.IdentityHashMap; import java.util.IdentityHashMap;
import java.util.Map; import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.BiConsumer; import java.util.function.BiConsumer;
import org.hibernate.Incubating; import org.hibernate.Incubating;
@ -78,7 +79,7 @@ public class QueryParameterBindingsImpl implements QueryParameterBindings {
this.parameterMetadata = parameterMetadata; this.parameterMetadata = parameterMetadata;
this.queryParametersValidationEnabled = queryParametersValidationEnabled; this.queryParametersValidationEnabled = queryParametersValidationEnabled;
this.parameterBindingMap = CollectionHelper.concurrentMap( parameterMetadata.getParameterCount() ); this.parameterBindingMap = new ConcurrentHashMap<>( parameterMetadata.getParameterCount() );
} }
@SuppressWarnings({"WeakerAccess", "unchecked"}) @SuppressWarnings({"WeakerAccess", "unchecked"})

View File

@ -19,6 +19,7 @@ import java.util.function.Supplier;
import org.hibernate.boot.TempTableDdlTransactionHandling; import org.hibernate.boot.TempTableDdlTransactionHandling;
import org.hibernate.engine.spi.SessionFactoryImplementor; import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor; import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.internal.util.collections.Stack; import org.hibernate.internal.util.collections.Stack;
import org.hibernate.metamodel.MappingMetamodel; import org.hibernate.metamodel.MappingMetamodel;
import org.hibernate.persister.entity.EntityPersister; import org.hibernate.persister.entity.EntityPersister;
@ -142,7 +143,7 @@ public class TableBasedUpdateHandler
// cross-reference the TableReference by alias. The TableGroup already // cross-reference the TableReference by alias. The TableGroup already
// cross-references it by name, bu the ColumnReference only has the alias // cross-references it by name, bu the ColumnReference only has the alias
final Map<String, TableReference> tableReferenceByAlias = new HashMap<>( updatingTableGroup.getTableReferenceJoins().size() + 1 ); final Map<String, TableReference> tableReferenceByAlias = CollectionHelper.mapOfSize( updatingTableGroup.getTableReferenceJoins().size() + 1 );
collectTableReference( updatingTableGroup.getPrimaryTableReference(), tableReferenceByAlias::put ); collectTableReference( updatingTableGroup.getPrimaryTableReference(), tableReferenceByAlias::put );
for ( int i = 0; i < updatingTableGroup.getTableReferenceJoins().size(); i++ ) { for ( int i = 0; i < updatingTableGroup.getTableReferenceJoins().size(); i++ ) {
collectTableReference( updatingTableGroup.getTableReferenceJoins().get( i ), tableReferenceByAlias::put ); collectTableReference( updatingTableGroup.getTableReferenceJoins().get( i ), tableReferenceByAlias::put );

View File

@ -18,6 +18,7 @@ import org.hibernate.boot.TempTableDdlTransactionHandling;
import org.hibernate.engine.jdbc.spi.JdbcServices; import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.engine.spi.SessionFactoryImplementor; import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor; import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.metamodel.mapping.ColumnConsumer; import org.hibernate.metamodel.mapping.ColumnConsumer;
import org.hibernate.metamodel.mapping.EntityMappingType; import org.hibernate.metamodel.mapping.EntityMappingType;
import org.hibernate.metamodel.mapping.ModelPartContainer; import org.hibernate.metamodel.mapping.ModelPartContainer;
@ -99,7 +100,7 @@ public class UpdateExecutionDelegate implements TableBasedUpdateHandler.Executio
this.entityDescriptor = (EntityMappingType) updatingModelPart; this.entityDescriptor = (EntityMappingType) updatingModelPart;
this.assignmentsByTable = new HashMap<>( updatingTableGroup.getTableReferenceJoins().size() + 1 ); this.assignmentsByTable = CollectionHelper.mapOfSize( updatingTableGroup.getTableReferenceJoins().size() + 1 );
jdbcParameterBindings = SqmUtil.createJdbcParameterBindings( jdbcParameterBindings = SqmUtil.createJdbcParameterBindings(
executionContext.getQueryParameterBindings(), executionContext.getQueryParameterBindings(),

View File

@ -29,6 +29,7 @@ import org.hibernate.engine.config.spi.StandardConverters;
import org.hibernate.engine.jdbc.internal.FormatStyle; import org.hibernate.engine.jdbc.internal.FormatStyle;
import org.hibernate.engine.jdbc.internal.Formatter; import org.hibernate.engine.jdbc.internal.Formatter;
import org.hibernate.internal.util.StringHelper; import org.hibernate.internal.util.StringHelper;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.mapping.Constraint; import org.hibernate.mapping.Constraint;
import org.hibernate.mapping.ForeignKey; import org.hibernate.mapping.ForeignKey;
import org.hibernate.mapping.Index; import org.hibernate.mapping.Index;
@ -160,7 +161,7 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
final boolean format = Helper.interpretFormattingEnabled( options.getConfigurationValues() ); final boolean format = Helper.interpretFormattingEnabled( options.getConfigurationValues() );
final Formatter formatter = format ? FormatStyle.DDL.getFormatter() : FormatStyle.NONE.getFormatter(); final Formatter formatter = format ? FormatStyle.DDL.getFormatter() : FormatStyle.NONE.getFormatter();
final Set<String> exportIdentifiers = new HashSet<>( 50 ); final Set<String> exportIdentifiers = CollectionHelper.setOfSize( 50 );
final Database database = metadata.getDatabase(); final Database database = metadata.getDatabase();

View File

@ -34,6 +34,7 @@ import org.hibernate.engine.jdbc.internal.Formatter;
import org.hibernate.internal.CoreLogging; import org.hibernate.internal.CoreLogging;
import org.hibernate.internal.CoreMessageLogger; import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.StringHelper; import org.hibernate.internal.util.StringHelper;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.internal.util.config.ConfigurationHelper; import org.hibernate.internal.util.config.ConfigurationHelper;
import org.hibernate.mapping.ForeignKey; import org.hibernate.mapping.ForeignKey;
import org.hibernate.mapping.Index; import org.hibernate.mapping.Index;
@ -216,7 +217,7 @@ public class SchemaCreatorImpl implements SchemaCreator {
final Database database = metadata.getDatabase(); final Database database = metadata.getDatabase();
final Set<String> exportIdentifiers = new HashSet<>( 50 ); final Set<String> exportIdentifiers = CollectionHelper.setOfSize( 50 );
// first, create each catalog/schema // first, create each catalog/schema
if ( tryToCreateCatalogs || tryToCreateSchemas ) { if ( tryToCreateCatalogs || tryToCreateSchemas ) {

View File

@ -35,6 +35,7 @@ import org.hibernate.engine.jdbc.spi.SqlStatementLogger;
import org.hibernate.internal.CoreLogging; import org.hibernate.internal.CoreLogging;
import org.hibernate.internal.CoreMessageLogger; import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.StringHelper; import org.hibernate.internal.util.StringHelper;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.mapping.ForeignKey; import org.hibernate.mapping.ForeignKey;
import org.hibernate.mapping.Table; import org.hibernate.mapping.Table;
import org.hibernate.resource.transaction.spi.TransactionCoordinatorBuilder; import org.hibernate.resource.transaction.spi.TransactionCoordinatorBuilder;
@ -200,7 +201,7 @@ public class SchemaDropperImpl implements SchemaDropper {
} }
} }
final Set<String> exportIdentifiers = new HashSet<>( 50 ); final Set<String> exportIdentifiers = CollectionHelper.setOfSize( 50 );
// NOTE : init commands are irrelevant for dropping... // NOTE : init commands are irrelevant for dropping...

View File

@ -5,9 +5,10 @@
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>. * See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/ */
package org.hibernate.transform; package org.hibernate.transform;
import java.util.HashMap;
import java.util.Map; import java.util.Map;
import org.hibernate.internal.util.collections.CollectionHelper;
/** /**
* {@link ResultTransformer} implementation which builds a map for each "row", * {@link ResultTransformer} implementation which builds a map for each "row",
* made up of each aliased value where the alias is the map key. * made up of each aliased value where the alias is the map key.
@ -30,7 +31,7 @@ public class AliasToEntityMapResultTransformer extends AliasedTupleSubsetResultT
@Override @Override
public Object transformTuple(Object[] tuple, String[] aliases) { public Object transformTuple(Object[] tuple, String[] aliases) {
Map result = new HashMap(tuple.length); Map result = CollectionHelper.mapOfSize( tuple.length );
for ( int i=0; i<tuple.length; i++ ) { for ( int i=0; i<tuple.length; i++ ) {
String alias = aliases[i]; String alias = aliases[i];
if ( alias!=null ) { if ( alias!=null ) {

View File

@ -11,6 +11,7 @@ import java.util.HashSet;
import org.hibernate.collection.internal.PersistentSet; import org.hibernate.collection.internal.PersistentSet;
import org.hibernate.collection.spi.PersistentCollection; import org.hibernate.collection.spi.PersistentCollection;
import org.hibernate.engine.spi.SharedSessionContractImplementor; import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.persister.collection.CollectionPersister; import org.hibernate.persister.collection.CollectionPersister;
import org.hibernate.type.spi.TypeConfiguration; import org.hibernate.type.spi.TypeConfiguration;
@ -39,7 +40,7 @@ public class SetType extends CollectionType {
public Object instantiate(int anticipatedSize) { public Object instantiate(int anticipatedSize) {
return anticipatedSize <= 0 return anticipatedSize <= 0
? new HashSet() ? new HashSet()
: new HashSet( anticipatedSize + (int)( anticipatedSize * .75f ), .75f ); : CollectionHelper.setOfSize( anticipatedSize );
} }
} }