HHH-13496 Some collection constructors could benefit from size hinting
This commit is contained in:
parent
18e8c04d4d
commit
35b67bd00e
|
@ -108,8 +108,7 @@ public class BulkOperationCleanupAction implements Executable, Serializable {
|
||||||
*/
|
*/
|
||||||
@SuppressWarnings({ "unchecked" })
|
@SuppressWarnings({ "unchecked" })
|
||||||
public BulkOperationCleanupAction(SharedSessionContractImplementor session, Set tableSpaces) {
|
public BulkOperationCleanupAction(SharedSessionContractImplementor session, Set tableSpaces) {
|
||||||
final LinkedHashSet<String> spacesList = new LinkedHashSet<>();
|
final LinkedHashSet<String> spacesList = new LinkedHashSet<>( tableSpaces );
|
||||||
spacesList.addAll( tableSpaces );
|
|
||||||
|
|
||||||
final SessionFactoryImplementor factory = session.getFactory();
|
final SessionFactoryImplementor factory = session.getFactory();
|
||||||
final MetamodelImplementor metamodel = factory.getMetamodel();
|
final MetamodelImplementor metamodel = factory.getMetamodel();
|
||||||
|
|
|
@ -1691,8 +1691,7 @@ public class InFlightMetadataCollectorImpl implements InFlightMetadataCollector
|
||||||
|
|
||||||
ArrayList<CopyIdentifierComponentSecondPass> sorted =
|
ArrayList<CopyIdentifierComponentSecondPass> sorted =
|
||||||
new ArrayList<>( copyIdentifierComponentSecondPasList.size() );
|
new ArrayList<>( copyIdentifierComponentSecondPasList.size() );
|
||||||
Set<CopyIdentifierComponentSecondPass> toSort = new HashSet<>();
|
Set<CopyIdentifierComponentSecondPass> toSort = new HashSet<>( copyIdentifierComponentSecondPasList );
|
||||||
toSort.addAll( copyIdentifierComponentSecondPasList );
|
|
||||||
topologicalSort( sorted, toSort );
|
topologicalSort( sorted, toSort );
|
||||||
copyIdentifierComponentSecondPasList = sorted;
|
copyIdentifierComponentSecondPasList = sorted;
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,8 +33,7 @@ public class CommaSeparatedStringHelper {
|
||||||
return Collections.emptySet();
|
return Collections.emptySet();
|
||||||
}
|
}
|
||||||
|
|
||||||
HashSet<String> set = new HashSet<String>();
|
HashSet<String> set = new HashSet<String>( x );
|
||||||
set.addAll( x );
|
|
||||||
if ( values != null && !values.isEmpty() ) {
|
if ( values != null && !values.isEmpty() ) {
|
||||||
Collections.addAll( set, values.split( "\\s*,\\s*" ) );
|
Collections.addAll( set, values.split( "\\s*,\\s*" ) );
|
||||||
}
|
}
|
||||||
|
|
|
@ -278,8 +278,7 @@ public class StandardServiceRegistryBuilder {
|
||||||
applyServiceContributingIntegrators();
|
applyServiceContributingIntegrators();
|
||||||
applyServiceContributors();
|
applyServiceContributors();
|
||||||
|
|
||||||
final Map settingsCopy = new HashMap();
|
final Map settingsCopy = new HashMap( settings );
|
||||||
settingsCopy.putAll( settings );
|
|
||||||
settingsCopy.put( org.hibernate.boot.cfgxml.spi.CfgXmlAccessService.LOADED_CONFIG_KEY, aggregatedCfgXml );
|
settingsCopy.put( org.hibernate.boot.cfgxml.spi.CfgXmlAccessService.LOADED_CONFIG_KEY, aggregatedCfgXml );
|
||||||
ConfigurationHelper.resolvePlaceHolders( settingsCopy );
|
ConfigurationHelper.resolvePlaceHolders( settingsCopy );
|
||||||
|
|
||||||
|
|
|
@ -105,9 +105,7 @@ public class ClassLoaderServiceImpl implements ClassLoaderService {
|
||||||
|
|
||||||
final Collection<ClassLoader> classLoaders = (Collection<ClassLoader>) configValues.get( AvailableSettings.CLASSLOADERS );
|
final Collection<ClassLoader> classLoaders = (Collection<ClassLoader>) configValues.get( AvailableSettings.CLASSLOADERS );
|
||||||
if ( classLoaders != null ) {
|
if ( classLoaders != null ) {
|
||||||
for ( ClassLoader classLoader : classLoaders ) {
|
providedClassLoaders.addAll( classLoaders );
|
||||||
providedClassLoaders.add( classLoader );
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
addIfSet( providedClassLoaders, AvailableSettings.APP_CLASSLOADER, configValues );
|
addIfSet( providedClassLoaders, AvailableSettings.APP_CLASSLOADER, configValues );
|
||||||
|
|
|
@ -2808,8 +2808,7 @@ public final class AnnotationBinder {
|
||||||
buildingContext.getMetadataCollector().addSecondPass( secondPass );
|
buildingContext.getMetadataCollector().addSecondPass( secondPass );
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
Map<String, IdentifierGeneratorDefinition> localGenerators = new HashMap<>();
|
Map<String, IdentifierGeneratorDefinition> localGenerators = new HashMap<>( buildGenerators( property, buildingContext ) );
|
||||||
localGenerators.putAll( buildGenerators( property, buildingContext ) );
|
|
||||||
BinderHelper.makeIdGenerator(
|
BinderHelper.makeIdGenerator(
|
||||||
(SimpleValue) comp.getProperty( property.getName() ).getValue(),
|
(SimpleValue) comp.getProperty( property.getName() ).getValue(),
|
||||||
property,
|
property,
|
||||||
|
|
|
@ -74,10 +74,7 @@ public class PersistentBag extends AbstractPersistentCollection implements List
|
||||||
bag = (List) coll;
|
bag = (List) coll;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
bag = new ArrayList();
|
bag = new ArrayList( coll );
|
||||||
for ( Object element : coll ) {
|
|
||||||
bag.add( element );
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
setInitialized();
|
setInitialized();
|
||||||
setDirectlyAccessible( true );
|
setDirectlyAccessible( true );
|
||||||
|
|
|
@ -80,10 +80,7 @@ public class PersistentIdentifierBag extends AbstractPersistentCollection implem
|
||||||
values = (List<Object>) coll;
|
values = (List<Object>) coll;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
values = new ArrayList<>();
|
values = new ArrayList<>( coll );
|
||||||
for ( Object element : coll ) {
|
|
||||||
values.add( element );
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
setInitialized();
|
setInitialized();
|
||||||
setDirectlyAccessible( true );
|
setDirectlyAccessible( true );
|
||||||
|
|
|
@ -211,8 +211,7 @@ public class ExtractedDatabaseMetaDataImpl implements ExtractedDatabaseMetaData
|
||||||
return Collections.emptySet();
|
return Collections.emptySet();
|
||||||
}
|
}
|
||||||
|
|
||||||
final Set<String> keywordSet = new HashSet<String>();
|
final Set<String> keywordSet = new HashSet<String>( Arrays.asList( extraKeywordsString.split( "\\s*,\\s*" ) ) );
|
||||||
keywordSet.addAll( Arrays.asList( extraKeywordsString.split( "\\s*,\\s*" ) ) );
|
|
||||||
return keywordSet;
|
return keywordSet;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -416,8 +416,7 @@ public class QueryPlanCache implements Serializable {
|
||||||
this.filterNames = Collections.emptySet();
|
this.filterNames = Collections.emptySet();
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
final Set<String> tmp = new HashSet<String>();
|
final Set<String> tmp = new HashSet<String>( enabledFilters.keySet() );
|
||||||
tmp.addAll( enabledFilters.keySet() );
|
|
||||||
this.filterNames = Collections.unmodifiableSet( tmp );
|
this.filterNames = Collections.unmodifiableSet( tmp );
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -38,8 +38,7 @@ public class NativeSQLQuerySpecification {
|
||||||
this.querySpaces = Collections.EMPTY_SET;
|
this.querySpaces = Collections.EMPTY_SET;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
Set tmp = new HashSet();
|
Set tmp = new HashSet( querySpaces );
|
||||||
tmp.addAll( querySpaces );
|
|
||||||
this.querySpaces = Collections.unmodifiableSet( tmp );
|
this.querySpaces = Collections.unmodifiableSet( tmp );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -40,8 +40,7 @@ public class SQLStateConversionDelegate extends AbstractSQLExceptionConversionDe
|
||||||
|
|
||||||
private static final Set<String> SQL_GRAMMAR_CATEGORIES = buildGrammarCategories();
|
private static final Set<String> SQL_GRAMMAR_CATEGORIES = buildGrammarCategories();
|
||||||
private static Set<String> buildGrammarCategories() {
|
private static Set<String> buildGrammarCategories() {
|
||||||
HashSet<String> categories = new HashSet<String>();
|
HashSet<String> categories = new HashSet<String>(
|
||||||
categories.addAll(
|
|
||||||
Arrays.asList(
|
Arrays.asList(
|
||||||
"07", // "dynamic SQL error"
|
"07", // "dynamic SQL error"
|
||||||
"20",
|
"20",
|
||||||
|
@ -57,8 +56,7 @@ public class SQLStateConversionDelegate extends AbstractSQLExceptionConversionDe
|
||||||
|
|
||||||
private static final Set DATA_CATEGORIES = buildDataCategories();
|
private static final Set DATA_CATEGORIES = buildDataCategories();
|
||||||
private static Set<String> buildDataCategories() {
|
private static Set<String> buildDataCategories() {
|
||||||
HashSet<String> categories = new HashSet<String>();
|
HashSet<String> categories = new HashSet<String>(
|
||||||
categories.addAll(
|
|
||||||
Arrays.asList(
|
Arrays.asList(
|
||||||
"21", // "cardinality violation"
|
"21", // "cardinality violation"
|
||||||
"22" // "data exception"
|
"22" // "data exception"
|
||||||
|
@ -69,8 +67,7 @@ public class SQLStateConversionDelegate extends AbstractSQLExceptionConversionDe
|
||||||
|
|
||||||
private static final Set INTEGRITY_VIOLATION_CATEGORIES = buildContraintCategories();
|
private static final Set INTEGRITY_VIOLATION_CATEGORIES = buildContraintCategories();
|
||||||
private static Set<String> buildContraintCategories() {
|
private static Set<String> buildContraintCategories() {
|
||||||
HashSet<String> categories = new HashSet<String>();
|
HashSet<String> categories = new HashSet<String>(
|
||||||
categories.addAll(
|
|
||||||
Arrays.asList(
|
Arrays.asList(
|
||||||
"23", // "integrity constraint violation"
|
"23", // "integrity constraint violation"
|
||||||
"27", // "triggered data change violation"
|
"27", // "triggered data change violation"
|
||||||
|
|
|
@ -440,11 +440,10 @@ public class BoundedConcurrentHashMap<K, V> extends AbstractMap<K, V>
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Set<HashEntry<K, V>> execute() {
|
public Set<HashEntry<K, V>> execute() {
|
||||||
Set<HashEntry<K, V>> evictedCopy = new HashSet<HashEntry<K, V>>();
|
Set<HashEntry<K, V>> evictedCopy = new HashSet<HashEntry<K, V>>( evicted );
|
||||||
for ( HashEntry<K, V> e : accessQueue ) {
|
for ( HashEntry<K, V> e : accessQueue ) {
|
||||||
put( e, e.value );
|
put( e, e.value );
|
||||||
}
|
}
|
||||||
evictedCopy.addAll( evicted );
|
|
||||||
accessQueue.clear();
|
accessQueue.clear();
|
||||||
evicted.clear();
|
evicted.clear();
|
||||||
return evictedCopy;
|
return evictedCopy;
|
||||||
|
@ -454,8 +453,7 @@ public class BoundedConcurrentHashMap<K, V> extends AbstractMap<K, V>
|
||||||
public Set<HashEntry<K, V>> onEntryMiss(HashEntry<K, V> e) {
|
public Set<HashEntry<K, V>> onEntryMiss(HashEntry<K, V> e) {
|
||||||
put( e, e.value );
|
put( e, e.value );
|
||||||
if ( !evicted.isEmpty() ) {
|
if ( !evicted.isEmpty() ) {
|
||||||
Set<HashEntry<K, V>> evictedCopy = new HashSet<HashEntry<K, V>>();
|
Set<HashEntry<K, V>> evictedCopy = new HashSet<HashEntry<K, V>>( evicted );
|
||||||
evictedCopy.addAll( evicted );
|
|
||||||
evicted.clear();
|
evicted.clear();
|
||||||
return evictedCopy;
|
return evictedCopy;
|
||||||
}
|
}
|
||||||
|
|
|
@ -64,8 +64,7 @@ public abstract class AbstractEntityJoinWalker extends JoinWalker {
|
||||||
final LockOptions lockOptions,
|
final LockOptions lockOptions,
|
||||||
final AssociationInitCallback callback) throws MappingException {
|
final AssociationInitCallback callback) throws MappingException {
|
||||||
walkEntityTree( persister, getAlias() );
|
walkEntityTree( persister, getAlias() );
|
||||||
List allAssociations = new ArrayList();
|
List allAssociations = new ArrayList( associations );
|
||||||
allAssociations.addAll( associations );
|
|
||||||
allAssociations.add( OuterJoinableAssociation.createRoot( persister.getEntityType(), alias, getFactory() ) );
|
allAssociations.add( OuterJoinableAssociation.createRoot( persister.getEntityType(), alias, getFactory() ) );
|
||||||
initPersisters( allAssociations, lockOptions, callback );
|
initPersisters( allAssociations, lockOptions, callback );
|
||||||
initStatementString( whereString, orderByString, lockOptions );
|
initStatementString( whereString, orderByString, lockOptions );
|
||||||
|
|
|
@ -51,8 +51,7 @@ public class BasicCollectionJoinWalker extends CollectionJoinWalker {
|
||||||
|
|
||||||
walkCollectionTree(collectionPersister, alias);
|
walkCollectionTree(collectionPersister, alias);
|
||||||
|
|
||||||
List allAssociations = new ArrayList();
|
List allAssociations = new ArrayList( associations );
|
||||||
allAssociations.addAll(associations);
|
|
||||||
allAssociations.add( OuterJoinableAssociation.createRoot( collectionPersister.getCollectionType(), alias, getFactory() ) );
|
allAssociations.add( OuterJoinableAssociation.createRoot( collectionPersister.getCollectionType(), alias, getFactory() ) );
|
||||||
initPersisters( allAssociations, LockMode.NONE );
|
initPersisters( allAssociations, LockMode.NONE );
|
||||||
initStatementString( alias, batchSize, subquery );
|
initStatementString( alias, batchSize, subquery );
|
||||||
|
|
|
@ -58,8 +58,7 @@ public class OneToManyJoinWalker extends CollectionJoinWalker {
|
||||||
|
|
||||||
walkEntityTree( elementPersister, alias );
|
walkEntityTree( elementPersister, alias );
|
||||||
|
|
||||||
List allAssociations = new ArrayList();
|
List allAssociations = new ArrayList( associations );
|
||||||
allAssociations.addAll( associations );
|
|
||||||
allAssociations.add(
|
allAssociations.add(
|
||||||
OuterJoinableAssociation.createRoot(
|
OuterJoinableAssociation.createRoot(
|
||||||
oneToManyPersister.getCollectionType(),
|
oneToManyPersister.getCollectionType(),
|
||||||
|
|
|
@ -255,9 +255,7 @@ public class SchemaExport {
|
||||||
Metadata metadata,
|
Metadata metadata,
|
||||||
ServiceRegistry serviceRegistry,
|
ServiceRegistry serviceRegistry,
|
||||||
TargetDescriptor targetDescriptor) {
|
TargetDescriptor targetDescriptor) {
|
||||||
Map config = new HashMap();
|
Map config = new HashMap( serviceRegistry.getService( ConfigurationService.class ).getSettings() );
|
||||||
config.putAll( serviceRegistry.getService( ConfigurationService.class ).getSettings() );
|
|
||||||
|
|
||||||
config.put( AvailableSettings.HBM2DDL_DELIMITER, delimiter );
|
config.put( AvailableSettings.HBM2DDL_DELIMITER, delimiter );
|
||||||
config.put( AvailableSettings.FORMAT_SQL, format );
|
config.put( AvailableSettings.FORMAT_SQL, format );
|
||||||
config.put( AvailableSettings.HBM2DDL_IMPORT_FILES, importFiles );
|
config.put( AvailableSettings.HBM2DDL_IMPORT_FILES, importFiles );
|
||||||
|
|
|
@ -72,8 +72,7 @@ public class SchemaUpdate {
|
||||||
exceptions.clear();
|
exceptions.clear();
|
||||||
LOG.runningHbm2ddlSchemaUpdate();
|
LOG.runningHbm2ddlSchemaUpdate();
|
||||||
|
|
||||||
Map config = new HashMap();
|
Map config = new HashMap( serviceRegistry.getService( ConfigurationService.class ).getSettings() );
|
||||||
config.putAll( serviceRegistry.getService( ConfigurationService.class ).getSettings() );
|
|
||||||
config.put( AvailableSettings.HBM2DDL_DELIMITER, delimiter );
|
config.put( AvailableSettings.HBM2DDL_DELIMITER, delimiter );
|
||||||
config.put( AvailableSettings.FORMAT_SQL, format );
|
config.put( AvailableSettings.FORMAT_SQL, format );
|
||||||
|
|
||||||
|
|
|
@ -52,8 +52,7 @@ public class SchemaValidator {
|
||||||
public void validate(Metadata metadata, ServiceRegistry serviceRegistry) {
|
public void validate(Metadata metadata, ServiceRegistry serviceRegistry) {
|
||||||
LOG.runningSchemaValidator();
|
LOG.runningSchemaValidator();
|
||||||
|
|
||||||
Map config = new HashMap();
|
Map config = new HashMap( serviceRegistry.getService( ConfigurationService.class ).getSettings() );
|
||||||
config.putAll( serviceRegistry.getService( ConfigurationService.class ).getSettings() );
|
|
||||||
|
|
||||||
final SchemaManagementTool tool = serviceRegistry.getService( SchemaManagementTool.class );
|
final SchemaManagementTool tool = serviceRegistry.getService( SchemaManagementTool.class );
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue