Merge remote-tracking branch 'upstream/master' into wip/6.0

This commit is contained in:
Andrea Boriero 2020-09-22 11:43:55 +01:00
commit 140fbb45d6
121 changed files with 796 additions and 267 deletions

View File

@ -6,14 +6,15 @@
*/ */
package org.hibernate.userguide.naming; package org.hibernate.userguide.naming;
import java.util.LinkedList; import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Locale; import java.util.Locale;
import java.util.Map; import java.util.Map;
import java.util.TreeMap; import java.util.TreeMap;
import java.util.stream.Collectors;
import org.hibernate.boot.model.naming.Identifier; import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.boot.model.naming.PhysicalNamingStrategy; import org.hibernate.boot.model.naming.PhysicalNamingStrategyStandardImpl;
import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment; import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
@ -27,40 +28,35 @@ import org.apache.commons.lang3.StringUtils;
* Additionally standards call for the replacement of certain words with abbreviations. * Additionally standards call for the replacement of certain words with abbreviations.
* *
* @author Steve Ebersole * @author Steve Ebersole
* @author Nathan Xu
*/ */
public class AcmeCorpPhysicalNamingStrategy implements PhysicalNamingStrategy { public class AcmeCorpPhysicalNamingStrategy extends PhysicalNamingStrategyStandardImpl {
private static final Map<String,String> ABBREVIATIONS = buildAbbreviationMap(); private static final Map<String, String> ABBREVIATIONS;
@Override static {
public Identifier toPhysicalCatalogName(Identifier name, JdbcEnvironment jdbcEnvironment) { ABBREVIATIONS = new TreeMap<>( String.CASE_INSENSITIVE_ORDER );
// Acme naming standards do not apply to catalog names ABBREVIATIONS.put( "account", "acct" );
return name; ABBREVIATIONS.put( "number", "num" );
}
@Override
public Identifier toPhysicalSchemaName(Identifier name, JdbcEnvironment jdbcEnvironment) {
// Acme naming standards do not apply to schema names
return name;
} }
@Override @Override
public Identifier toPhysicalTableName(Identifier name, JdbcEnvironment jdbcEnvironment) { public Identifier toPhysicalTableName(Identifier name, JdbcEnvironment jdbcEnvironment) {
final List<String> parts = splitAndReplace( name.getText() ); final List<String> parts = splitAndReplace( name.getText() );
return jdbcEnvironment.getIdentifierHelper().toIdentifier( return jdbcEnvironment.getIdentifierHelper().toIdentifier(
join( parts ), StringUtils.join( parts, '_' ),
name.isQuoted() name.isQuoted()
); );
} }
@Override @Override
public Identifier toPhysicalSequenceName(Identifier name, JdbcEnvironment jdbcEnvironment) { public Identifier toPhysicalSequenceName(Identifier name, JdbcEnvironment jdbcEnvironment) {
final LinkedList<String> parts = splitAndReplace( name.getText() ); final List<String> parts = splitAndReplace( name.getText() );
// Acme Corp says all sequences should end with _seq // Acme Corp says all sequences should end with _seq
if ( !"seq".equalsIgnoreCase( parts.getLast() ) ) { if ( !"seq".equals( parts.get( parts.size() - 1 ) ) ) {
parts.add( "seq" ); parts.add( "seq" );
} }
return jdbcEnvironment.getIdentifierHelper().toIdentifier( return jdbcEnvironment.getIdentifierHelper().toIdentifier(
join( parts ), StringUtils.join( parts, '_' ),
name.isQuoted() name.isQuoted()
); );
} }
@ -69,50 +65,15 @@ public class AcmeCorpPhysicalNamingStrategy implements PhysicalNamingStrategy {
public Identifier toPhysicalColumnName(Identifier name, JdbcEnvironment jdbcEnvironment) { public Identifier toPhysicalColumnName(Identifier name, JdbcEnvironment jdbcEnvironment) {
final List<String> parts = splitAndReplace( name.getText() ); final List<String> parts = splitAndReplace( name.getText() );
return jdbcEnvironment.getIdentifierHelper().toIdentifier( return jdbcEnvironment.getIdentifierHelper().toIdentifier(
join( parts ), StringUtils.join( parts, '_' ),
name.isQuoted() name.isQuoted()
); );
} }
private static Map<String, String> buildAbbreviationMap() { private List<String> splitAndReplace(String name) {
TreeMap<String,String> abbreviationMap = new TreeMap<> ( String.CASE_INSENSITIVE_ORDER ); return Arrays.stream( StringUtils.splitByCharacterTypeCamelCase( name ) )
abbreviationMap.put( "account", "acct" ); .filter( StringUtils::isNotBlank )
abbreviationMap.put( "number", "num" ); .map( p -> ABBREVIATIONS.getOrDefault( p, p ).toLowerCase( Locale.ROOT ) )
return abbreviationMap; .collect( Collectors.toList() );
}
private LinkedList<String> splitAndReplace(String name) {
LinkedList<String> result = new LinkedList<>();
for ( String part : StringUtils.splitByCharacterTypeCamelCase( name ) ) {
if ( part == null || part.trim().isEmpty() ) {
// skip null and space
continue;
}
part = applyAbbreviationReplacement( part );
result.add( part.toLowerCase( Locale.ROOT ) );
}
return result;
}
private String applyAbbreviationReplacement(String word) {
if ( ABBREVIATIONS.containsKey( word ) ) {
return ABBREVIATIONS.get( word );
}
return word;
}
private String join(List<String> parts) {
boolean firstPass = true;
String separator = "";
StringBuilder joined = new StringBuilder();
for ( String part : parts ) {
joined.append( separator ).append( part );
if ( firstPass ) {
firstPass = false;
separator = "_";
}
}
return joined.toString();
} }
} }

View File

@ -14,7 +14,7 @@ ext {
junit5Version = '5.3.1' junit5Version = '5.3.1'
h2Version = '1.4.199' h2Version = '1.4.199'
bytemanVersion = '4.0.13' //Compatible with JDK14 bytemanVersion = '4.0.13' //Compatible with JDK16
jnpVersion = '5.0.6.CR1' jnpVersion = '5.0.6.CR1'
hibernateCommonsVersion = '5.1.0.Final' hibernateCommonsVersion = '5.1.0.Final'

View File

@ -224,7 +224,7 @@ public class EntityUpdateAction extends EntityAction {
session session
); );
if ( persister.hasUpdateGeneratedProperties() ) { if ( persister.hasUpdateGeneratedProperties() ) {
// this entity defines proeprty generation, so process those generated // this entity defines property generation, so process those generated
// values... // values...
persister.processUpdateGeneratedProperties( id, instance, state, session ); persister.processUpdateGeneratedProperties( id, instance, state, session );
if ( persister.isVersionPropertyGenerated() ) { if ( persister.isVersionPropertyGenerated() ) {

View File

@ -116,7 +116,7 @@ public class MetadataBuilderImpl implements MetadataBuilderImplementor, TypeCont
this.sources = sources; this.sources = sources;
this.options = new MetadataBuildingOptionsImpl( serviceRegistry ); this.options = new MetadataBuildingOptionsImpl( serviceRegistry );
this.bootstrapContext = new BootstrapContextImpl( serviceRegistry, options ); this.bootstrapContext = new BootstrapContextImpl( serviceRegistry, options );
//this is needed only fro implementig deprecated method //this is needed only for implementing deprecated method
options.setBootstrapContext( bootstrapContext ); options.setBootstrapContext( bootstrapContext );
for ( MetadataSourcesContributor contributor : for ( MetadataSourcesContributor contributor :

View File

@ -20,6 +20,7 @@ import org.hibernate.boot.model.convert.spi.AutoApplicableConverterDescriptor;
import org.hibernate.boot.model.convert.spi.ConverterDescriptor; import org.hibernate.boot.model.convert.spi.ConverterDescriptor;
import org.hibernate.boot.spi.MetadataBuildingContext; import org.hibernate.boot.spi.MetadataBuildingContext;
import org.hibernate.cfg.annotations.HCANNHelper; import org.hibernate.cfg.annotations.HCANNHelper;
import org.hibernate.internal.util.type.PrimitiveWrapperHelper;
import com.fasterxml.classmate.ResolvedType; import com.fasterxml.classmate.ResolvedType;
import com.fasterxml.classmate.ResolvedTypeWithMembers; import com.fasterxml.classmate.ResolvedTypeWithMembers;
@ -147,7 +148,11 @@ public class AutoApplicableConverterDescriptorStandardImpl implements AutoApplic
} }
private boolean typesMatch(ResolvedType converterDefinedType, ResolvedType checkType) { private boolean typesMatch(ResolvedType converterDefinedType, ResolvedType checkType) {
if ( !converterDefinedType.getErasedType().isAssignableFrom( checkType.getErasedType() ) ) { Class<?> erasedCheckType = checkType.getErasedType();
if ( erasedCheckType.isPrimitive() ) {
erasedCheckType = PrimitiveWrapperHelper.getDescriptorByPrimitiveType( erasedCheckType ).getWrapperClass();
}
if ( !converterDefinedType.getErasedType().isAssignableFrom( erasedCheckType ) ) {
return false; return false;
} }
@ -180,4 +185,5 @@ public class AutoApplicableConverterDescriptorStandardImpl implements AutoApplic
return true; return true;
} }
} }

View File

@ -208,7 +208,7 @@ public class BootstrapServiceRegistryBuilder {
final ClassLoaderService classLoaderService; final ClassLoaderService classLoaderService;
if ( providedClassLoaderService == null ) { if ( providedClassLoaderService == null ) {
// Use a set. As an example, in JPA, OsgiClassLoader may be in both // Use a set. As an example, in JPA, OsgiClassLoader may be in both
// the providedClassLoaders and the overridenClassLoader. // the providedClassLoaders and the overriddenClassLoader.
final Set<ClassLoader> classLoaders = new HashSet<>(); final Set<ClassLoader> classLoaders = new HashSet<>();
if ( providedClassLoaders != null ) { if ( providedClassLoaders != null ) {

View File

@ -129,7 +129,7 @@ public class EnhancementHelper {
finally { finally {
if ( isTempSession ) { if ( isTempSession ) {
try { try {
// Commit the JDBC transaction is we started one. // Commit the JDBC transaction if we started one.
if ( !isJta ) { if ( !isJta ) {
BytecodeLogger.LOGGER.debug( "Enhancement interception Helper#performWork committing transaction on temporary Session" ); BytecodeLogger.LOGGER.debug( "Enhancement interception Helper#performWork committing transaction on temporary Session" );
session.getTransaction().commit(); session.getTransaction().commit();

View File

@ -117,7 +117,7 @@ public class DomainDataRegionConfigImpl implements DomainDataRegionConfig {
} }
// todo (6.0) : `EntityPersister` and `CollectionPersister` references here should be replaces with `EntityHierarchy` and `PersistentCollectionDescriptor` // todo (6.0) : `EntityPersister` and `CollectionPersister` references here should be replaced with `EntityHierarchy` and `PersistentCollectionDescriptor`
// //
// todo : although ^^, couldn't this just be the boot-time model? Is there a specific need for it to be the run-time model? // todo : although ^^, couldn't this just be the boot-time model? Is there a specific need for it to be the run-time model?
// that would alleviate the difference between 5.3 and 6.0 from the SPI POV // that would alleviate the difference between 5.3 and 6.0 from the SPI POV

View File

@ -86,7 +86,7 @@ public class NaturalIdCacheKey implements Serializable {
@Override @Override
public String initialize() { public String initialize() {
//Complex toString is needed as naturalIds for entities are not simply based on a single value like primary keys //Complex toString is needed as naturalIds for entities are not simply based on a single value like primary keys
//the only same way to differentiate the keys is to included the disassembled values in the string. //the only same way to differentiate the keys is to include the disassembled values in the string.
final StringBuilder toStringBuilder = new StringBuilder().append( entityName ).append( final StringBuilder toStringBuilder = new StringBuilder().append( entityName ).append(
"##NaturalId[" ); "##NaturalId[" );
for ( int i = 0; i < naturalIdValues.length; i++ ) { for ( int i = 0; i < naturalIdValues.length; i++ ) {

View File

@ -83,8 +83,9 @@ public class RegionFactoryInitiator implements StandardServiceInitiator<RegionFa
final Collection<Class<? extends RegionFactory>> implementors = selector.getRegisteredStrategyImplementors( RegionFactory.class ); final Collection<Class<? extends RegionFactory>> implementors = selector.getRegisteredStrategyImplementors( RegionFactory.class );
if ( setting == null && implementors.size() != 1 ) { if ( setting == null && implementors.size() != 1 ) {
// if either are explicitly defined as TRUE we need a RegionFactory // if either is explicitly defined as TRUE we need a RegionFactory
if ( useSecondLevelCache == TRUE || useQueryCache == TRUE ) { if ( ( useSecondLevelCache != null && useSecondLevelCache == TRUE )
|| ( useQueryCache != null && useQueryCache == TRUE ) ) {
throw new CacheException( "Caching was explicitly requested, but no RegionFactory was defined and there is not a single registered RegionFactory" ); throw new CacheException( "Caching was explicitly requested, but no RegionFactory was defined and there is not a single registered RegionFactory" );
} }
} }

View File

@ -77,7 +77,7 @@ public class StandardCacheEntryImpl implements CacheEntry {
@Override @Override
public Serializable[] getDisassembledState() { public Serializable[] getDisassembledState() {
// todo: this was added to support initializing an entity's EntityEntry snapshot during reattach; // todo: this was added to support initializing an entity's EntityEntry snapshot during reattach;
// this should be refactored to instead expose a method to assemble a EntityEntry based on this // this should be refactored to instead expose a method to assemble an EntityEntry based on this
// state for return. // state for return.
return disassembledState; return disassembledState;
} }

View File

@ -569,7 +569,7 @@ public final class AnnotationBinder {
LOG.unsupportedMappedSuperclassWithEntityInheritance( clazzToProcess.getName() ); LOG.unsupportedMappedSuperclassWithEntityInheritance( clazzToProcess.getName() );
} }
//TODO: be more strict with secondarytable allowance (not for ids, not for secondary table join columns etc) //TODO: be more strict with secondary table allowance (not for ids, not for secondary table join columns etc)
InheritanceState inheritanceState = inheritanceStatePerClass.get( clazzToProcess ); InheritanceState inheritanceState = inheritanceStatePerClass.get( clazzToProcess );
AnnotatedClassType classType = context.getMetadataCollector().getClassType( clazzToProcess ); AnnotatedClassType classType = context.getMetadataCollector().getClassType( clazzToProcess );
@ -1097,7 +1097,7 @@ public final class AnnotationBinder {
); );
AccessType propertyAccessor = entityBinder.getPropertyAccessor( compositeClass ); AccessType propertyAccessor = entityBinder.getPropertyAccessor( compositeClass );
//In JPA 2, there is a shortcut if the IdClass is the Pk of the associated class pointed to by the id //In JPA 2, there is a shortcut if the IdClass is the Pk of the associated class pointed to by the id
//it ought to be treated as an embedded and not a real IdClass (at least in the Hibernate's internal way //it ought to be treated as an embedded and not a real IdClass (at least in the Hibernate's internal way)
final boolean isFakeIdClass = isIdClassPkOfTheAssociatedEntity( final boolean isFakeIdClass = isIdClassPkOfTheAssociatedEntity(
elementsToProcess, elementsToProcess,
compositeClass, compositeClass,
@ -1673,7 +1673,7 @@ public final class AnnotationBinder {
} }
} }
if ( isRequiredAnnotationPresent ) { if ( isRequiredAnnotationPresent ) {
//create a PropertyData fpr the specJ property holding the mapping //create a PropertyData for the specJ property holding the mapping
PropertyData specJPropertyData = new PropertyInferredData( PropertyData specJPropertyData = new PropertyInferredData(
declaringClass, declaringClass,
//same dec //same dec
@ -2116,7 +2116,7 @@ public final class AnnotationBinder {
} }
{ {
Column[] keyColumns = null; Column[] keyColumns = null;
//JPA 2 has priority and has different default column values, differenciate legacy from JPA 2 //JPA 2 has priority and has different default column values, differentiate legacy from JPA 2
Boolean isJPA2 = null; Boolean isJPA2 = null;
if ( property.isAnnotationPresent( MapKeyColumn.class ) ) { if ( property.isAnnotationPresent( MapKeyColumn.class ) ) {
isJPA2 = Boolean.TRUE; isJPA2 = Boolean.TRUE;
@ -2147,7 +2147,7 @@ public final class AnnotationBinder {
} }
{ {
JoinColumn[] joinKeyColumns = null; JoinColumn[] joinKeyColumns = null;
//JPA 2 has priority and has different default column values, differenciate legacy from JPA 2 //JPA 2 has priority and has different default column values, differentiate legacy from JPA 2
Boolean isJPA2 = null; Boolean isJPA2 = null;
if ( property.isAnnotationPresent( MapKeyJoinColumns.class ) ) { if ( property.isAnnotationPresent( MapKeyJoinColumns.class ) ) {
isJPA2 = Boolean.TRUE; isJPA2 = Boolean.TRUE;
@ -2649,7 +2649,7 @@ public final class AnnotationBinder {
} }
associationTableBinder.setUniqueConstraints( uniqueConstraints ); associationTableBinder.setUniqueConstraints( uniqueConstraints );
associationTableBinder.setJpaIndex( jpaIndexes ); associationTableBinder.setJpaIndex( jpaIndexes );
//set check constaint in the second pass //set check constraint in the second pass
annJoins = joins.length == 0 ? null : joins; annJoins = joins.length == 0 ? null : joins;
annInverseJoins = inverseJoins == null || inverseJoins.length == 0 ? null : inverseJoins; annInverseJoins = inverseJoins == null || inverseJoins.length == 0 ? null : inverseJoins;
} }
@ -2687,7 +2687,7 @@ public final class AnnotationBinder {
boolean isIdentifierMapper, boolean isIdentifierMapper,
MetadataBuildingContext buildingContext, MetadataBuildingContext buildingContext,
boolean isComponentEmbedded, boolean isComponentEmbedded,
boolean isId, //is a identifier boolean isId, //is an identifier
Map<XClass, InheritanceState> inheritanceStatePerClass, Map<XClass, InheritanceState> inheritanceStatePerClass,
String referencedEntityName, //is a component who is overridden by a @MapsId String referencedEntityName, //is a component who is overridden by a @MapsId
Ejb3JoinColumn[] columns) { Ejb3JoinColumn[] columns) {
@ -2834,7 +2834,7 @@ public final class AnnotationBinder {
//add elements of the embeddable superclass //add elements of the embeddable superclass
XClass superClass = xClassProcessed.getSuperclass(); XClass superClass = xClassProcessed.getSuperclass();
while ( superClass != null && superClass.isAnnotationPresent( MappedSuperclass.class ) ) { while ( superClass != null && superClass.isAnnotationPresent( MappedSuperclass.class ) ) {
//FIXME: proper support of typevariables incl var resolved at upper levels //FIXME: proper support of type variables incl var resolved at upper levels
propContainer = new PropertyContainer( superClass, xClassProcessed, propertyAccessor ); propContainer = new PropertyContainer( superClass, xClassProcessed, propertyAccessor );
addElementsOfClass( classElements, propContainer, buildingContext ); addElementsOfClass( classElements, propContainer, buildingContext );
superClass = superClass.getSuperclass(); superClass = superClass.getSuperclass();
@ -3146,7 +3146,7 @@ public final class AnnotationBinder {
final JoinColumn joinColumn = property.getAnnotation( JoinColumn.class ); final JoinColumn joinColumn = property.getAnnotation( JoinColumn.class );
final JoinColumns joinColumns = property.getAnnotation( JoinColumns.class ); final JoinColumns joinColumns = property.getAnnotation( JoinColumns.class );
//Make sure that JPA1 key-many-to-one columns are read only tooj //Make sure that JPA1 key-many-to-one columns are read only too
boolean hasSpecjManyToOne=false; boolean hasSpecjManyToOne=false;
if ( context.getBuildingOptions().isSpecjProprietarySyntaxEnabled() ) { if ( context.getBuildingOptions().isSpecjProprietarySyntaxEnabled() ) {
String columnName = ""; String columnName = "";
@ -3298,7 +3298,7 @@ public final class AnnotationBinder {
KeyValue identifier = propertyHolder.getIdentifier(); KeyValue identifier = propertyHolder.getIdentifier();
if ( identifier == null ) { if ( identifier == null ) {
//this is a @OneToOne in an @EmbeddedId (the persistentClass.identifier is not set yet, it's being built) //this is a @OneToOne in an @EmbeddedId (the persistentClass.identifier is not set yet, it's being built)
//by definition the PK cannot refers to itself so it cannot map to itself //by definition the PK cannot refer to itself so it cannot map to itself
mapToPK = false; mapToPK = false;
} }
else { else {
@ -3635,7 +3635,7 @@ public final class AnnotationBinder {
InheritanceState state = new InheritanceState( clazz, inheritanceStatePerClass, buildingContext ); InheritanceState state = new InheritanceState( clazz, inheritanceStatePerClass, buildingContext );
if ( superclassState != null ) { if ( superclassState != null ) {
//the classes are ordered thus preventing an NPE //the classes are ordered thus preventing an NPE
//FIXME if an entity has subclasses annotated @MappedSperclass wo sub @Entity this is wrong //FIXME if an entity has subclasses annotated @MappedSuperclass wo sub @Entity this is wrong
superclassState.setHasSiblings( true ); superclassState.setHasSiblings( true );
InheritanceState superEntityState = InheritanceState.getInheritanceStateOfSuperEntity( InheritanceState superEntityState = InheritanceState.getInheritanceStateOfSuperEntity(
clazz, inheritanceStatePerClass clazz, inheritanceStatePerClass

View File

@ -312,7 +312,7 @@ public class BinderHelper {
columnsList.append( ") " ); columnsList.append( ") " );
if ( associatedEntity != null ) { if ( associatedEntity != null ) {
//overidden destination //overridden destination
columnsList.append( "of " ) columnsList.append( "of " )
.append( associatedEntity.getEntityName() ) .append( associatedEntity.getEntityName() )
.append( "." ) .append( "." )
@ -439,7 +439,7 @@ public class BinderHelper {
|| "embedded".equals( property.getPropertyAccessorName() ) ) { || "embedded".equals( property.getPropertyAccessorName() ) ) {
return; return;
} }
// FIXME cannot use subproperties becasue the caller needs top level properties // FIXME cannot use subproperties because the caller needs top level properties
// if ( property.isComposite() ) { // if ( property.isComposite() ) {
// Iterator subProperties = ( (Component) property.getValue() ).getPropertyIterator(); // Iterator subProperties = ( (Component) property.getValue() ).getPropertyIterator();
// while ( subProperties.hasNext() ) { // while ( subProperties.hasNext() ) {
@ -460,7 +460,7 @@ public class BinderHelper {
} }
/** /**
* Retrieve the property by path in a recursive way, including IndetifierProperty in the loop * Retrieve the property by path in a recursive way, including IdentifierProperty in the loop
* If propertyName is null or empty, the IdentifierProperty is returned * If propertyName is null or empty, the IdentifierProperty is returned
*/ */
public static Property findPropertyByName(PersistentClass associatedClass, String propertyName) { public static Property findPropertyByName(PersistentClass associatedClass, String propertyName) {
@ -686,7 +686,7 @@ public class BinderHelper {
if ( gen == null ) { if ( gen == null ) {
throw new AnnotationException( "Unknown named generator (@GeneratedValue#generatorName): " + generatorName ); throw new AnnotationException( "Unknown named generator (@GeneratedValue#generatorName): " + generatorName );
} }
//This is quite vague in the spec but a generator could override the generate choice //This is quite vague in the spec but a generator could override the generator choice
String identifierGeneratorStrategy = gen.getStrategy(); String identifierGeneratorStrategy = gen.getStrategy();
//yuk! this is a hack not to override 'AUTO' even if generator is set //yuk! this is a hack not to override 'AUTO' even if generator is set
final boolean avoidOverriding = final boolean avoidOverriding =

View File

@ -58,7 +58,7 @@ import org.hibernate.mapping.Table;
* @author Emmanuel Bernard * @author Emmanuel Bernard
*/ */
public class ComponentPropertyHolder extends AbstractPropertyHolder { public class ComponentPropertyHolder extends AbstractPropertyHolder {
//TODO introduce a overrideTable() method for columns held by sec table rather than the hack //TODO introduce an overrideTable() method for columns held by sec table rather than the hack
// joinsPerRealTableName in ClassPropertyHolder // joinsPerRealTableName in ClassPropertyHolder
private Component component; private Component component;
private boolean isOrWithinEmbeddedId; private boolean isOrWithinEmbeddedId;

View File

@ -61,8 +61,7 @@ public class DefaultComponentSafeNamingStrategy extends EJB3NamingStrategy {
return tableName; return tableName;
} }
else { else {
//use of a stringbuffer to workaround a JDK bug return new StringBuilder( ownerEntityTable ).append( "_" )
return new StringBuffer( ownerEntityTable ).append( "_" )
.append( .append(
associatedEntityTable != null ? associatedEntityTable != null ?
associatedEntityTable : associatedEntityTable :

View File

@ -96,8 +96,7 @@ public class DefaultNamingStrategy implements NamingStrategy, Serializable {
return tableName; return tableName;
} }
else { else {
//use of a stringbuffer to workaround a JDK bug return new StringBuilder(ownerEntityTable).append("_")
return new StringBuffer(ownerEntityTable).append("_")
.append( .append(
associatedEntityTable != null ? associatedEntityTable != null ?
associatedEntityTable : associatedEntityTable :

View File

@ -72,8 +72,7 @@ public class EJB3NamingStrategy implements NamingStrategy, Serializable {
return tableName; return tableName;
} }
else { else {
//use of a stringbuffer to workaround a JDK bug return new StringBuilder( ownerEntityTable ).append( "_" )
return new StringBuffer( ownerEntityTable ).append( "_" )
.append( .append(
associatedEntityTable != null ? associatedEntityTable != null ?
associatedEntityTable : associatedEntityTable :

View File

@ -630,7 +630,7 @@ public class Ejb3Column {
} }
} }
//must only be called after all setters are defined and before bind //must only be called after all setters are defined and before binding
private void extractDataFromPropertyData(PropertyData inferredData) { private void extractDataFromPropertyData(PropertyData inferredData) {
if ( inferredData != null ) { if ( inferredData != null ) {
XProperty property = inferredData.getProperty(); XProperty property = inferredData.getProperty();

View File

@ -418,7 +418,7 @@ public class Ejb3JoinColumn extends Ejb3Column {
PersistentClass persistentClass, PersistentClass persistentClass,
Map<String, Join> joins, Map<String, Join> joins,
Map<XClass, InheritanceState> inheritanceStatePerClass) { Map<XClass, InheritanceState> inheritanceStatePerClass) {
// TODO shouldn't we deduce the classname from the persistentclasS? // TODO shouldn't we deduce the classname from the persistentClass?
this.propertyHolder = PropertyHolderBuilder.buildPropertyHolder( this.propertyHolder = PropertyHolderBuilder.buildPropertyHolder(
persistentClass, persistentClass,
joins, joins,

View File

@ -110,8 +110,7 @@ public class ImprovedNamingStrategy implements NamingStrategy, Serializable {
return tableName; return tableName;
} }
else { else {
//use of a stringbuffer to workaround a JDK bug return new StringBuilder(ownerEntityTable).append("_")
return new StringBuffer(ownerEntityTable).append("_")
.append( .append(
associatedEntityTable != null ? associatedEntityTable != null ?
associatedEntityTable : associatedEntityTable :

View File

@ -302,7 +302,7 @@ public class InheritanceState {
org.hibernate.mapping.MappedSuperclass parentSuperclass = mappedSuperclass; org.hibernate.mapping.MappedSuperclass parentSuperclass = mappedSuperclass;
final Class<?> type = buildingContext.getBootstrapContext().getReflectionManager() final Class<?> type = buildingContext.getBootstrapContext().getReflectionManager()
.toClass( classesToProcessForMappedSuperclass.get( index ) ); .toClass( classesToProcessForMappedSuperclass.get( index ) );
//add MAppedSuperclass if not already there //add MappedSuperclass if not already there
mappedSuperclass = buildingContext.getMetadataCollector().getMappedSuperclass( type ); mappedSuperclass = buildingContext.getMetadataCollector().getMappedSuperclass( type );
if ( mappedSuperclass == null ) { if ( mappedSuperclass == null ) {
mappedSuperclass = new org.hibernate.mapping.MappedSuperclass( parentSuperclass, superEntity ); mappedSuperclass = new org.hibernate.mapping.MappedSuperclass( parentSuperclass, superEntity );

View File

@ -49,7 +49,7 @@ public class OneToOneSecondPass implements SecondPass {
private String cascadeStrategy; private String cascadeStrategy;
private Ejb3JoinColumn[] joinColumns; private Ejb3JoinColumn[] joinColumns;
//that suck, we should read that from the property mainly //that sucks, we should read that from the property mainly
public OneToOneSecondPass( public OneToOneSecondPass(
String mappedBy, String mappedBy,
String ownerEntity, String ownerEntity,

View File

@ -51,7 +51,7 @@ public class PropertyPreloadedData implements PropertyData {
} }
public XClass getDeclaringClass() { public XClass getDeclaringClass() {
//Preloaded properties are artificial wrapper for colleciton element accesses //Preloaded properties are artificial wrapper for collection element accesses
//and idClass creation, ignore. //and idClass creation, ignore.
return null; return null;
} }

View File

@ -551,11 +551,11 @@ public abstract class CollectionBinder {
collection.setInverse( isMappedBy ); collection.setInverse( isMappedBy );
//many to many may need some second pass informations //many to many may need some second pass information
if ( !oneToMany && isMappedBy ) { if ( !oneToMany && isMappedBy ) {
metadataCollector.addMappedBy( getCollectionType().getName(), mappedBy, propertyName ); metadataCollector.addMappedBy( getCollectionType().getName(), mappedBy, propertyName );
} }
//TODO reducce tableBinder != null and oneToMany //TODO reduce tableBinder != null and oneToMany
XClass collectionType = getCollectionType(); XClass collectionType = getCollectionType();
if ( inheritanceStatePerClass == null) throw new AssertionFailure( "inheritanceStatePerClass not set" ); if ( inheritanceStatePerClass == null) throw new AssertionFailure( "inheritanceStatePerClass not set" );
SecondPass sp = getSecondPass( SecondPass sp = getSecondPass(

View File

@ -187,7 +187,7 @@ public class MapBinder extends CollectionBinder {
} }
else { else {
//this is a true Map mapping //this is a true Map mapping
//TODO ugly copy/pastle from CollectionBinder.bindManyToManySecondPass //TODO ugly copy/paste from CollectionBinder.bindManyToManySecondPass
String mapKeyType; String mapKeyType;
Class target = void.class; Class target = void.class;
/* /*
@ -323,7 +323,7 @@ public class MapBinder extends CollectionBinder {
column.setTable( mapValue.getCollectionTable() ); column.setTable( mapValue.getCollectionTable() );
} }
elementBinder.setColumns( elementColumns ); elementBinder.setColumns( elementColumns );
//do not call setType as it extract the type from @Type //do not call setType as it extracts the type from @Type
//the algorithm generally does not apply for map key anyway //the algorithm generally does not apply for map key anyway
elementBinder.setType( elementBinder.setType(
property, property,
@ -338,7 +338,7 @@ public class MapBinder extends CollectionBinder {
} }
//FIXME pass the Index Entity JoinColumns //FIXME pass the Index Entity JoinColumns
if ( !collection.isOneToMany() ) { if ( !collection.isOneToMany() ) {
//index column shoud not be null //index column should not be null
for (Ejb3JoinColumn col : mapKeyManyToManyColumns) { for (Ejb3JoinColumn col : mapKeyManyToManyColumns) {
col.forceNotNull(); col.forceNotNull();
} }

View File

@ -211,8 +211,8 @@ public class PropertyBinder {
private Property bind(Property prop) { private Property bind(Property prop) {
if (isId) { if (isId) {
final RootClass rootClass = ( RootClass ) holder.getPersistentClass(); final RootClass rootClass = ( RootClass ) holder.getPersistentClass();
//if an xToMany, it as to be wrapped today. //if an xToMany, it has to be wrapped today.
//FIXME this pose a problem as the PK is the class instead of the associated class which is not really compliant with the spec //FIXME this poses a problem as the PK is the class instead of the associated class which is not really compliant with the spec
if ( isXToMany || entityBinder.wrapIdsInEmbeddedComponents() ) { if ( isXToMany || entityBinder.wrapIdsInEmbeddedComponents() ) {
Component identifier = (Component) rootClass.getIdentifier(); Component identifier = (Component) rootClass.getIdentifier();
if (identifier == null) { if (identifier == null) {

View File

@ -1378,7 +1378,7 @@ public class JPAOverriddenAnnotationReader implements AnnotationReader {
} }
} }
if ( elementsForProperty.size() == 0 && defaults.canUseJavaAnnotations() ) { if ( elementsForProperty.size() == 0 && defaults.canUseJavaAnnotations() ) {
//we have nothing, so Java annotations might occurs //we have nothing, so Java annotations might occur
Annotation annotation = getPhysicalAnnotation( Version.class ); Annotation annotation = getPhysicalAnnotation( Version.class );
if ( annotation != null ) { if ( annotation != null ) {
annotationList.add( annotation ); annotationList.add( annotation );
@ -2872,7 +2872,7 @@ public class JPAOverriddenAnnotationReader implements AnnotationReader {
secondaryTables.add( AnnotationFactory.create( annotation ) ); secondaryTables.add( AnnotationFactory.create( annotation ) );
} }
/* /*
* You can't have both secondary table in XML and Java, * You can't have both secondary tables in XML and Java,
* since there would be no way to "remove" a secondary table * since there would be no way to "remove" a secondary table
*/ */
if ( secondaryTables.size() == 0 && defaults.canUseJavaAnnotations() ) { if ( secondaryTables.size() == 0 && defaults.canUseJavaAnnotations() ) {

View File

@ -146,7 +146,7 @@ public class BeanValidationIntegrator implements Integrator {
} }
else { else {
// otherwise check the validation modes // otherwise check the validation modes
// todo : in many ways this duplicates thew checks done on the TypeSafeActivator when a ValidatorFactory could not be obtained // todo : in many ways this duplicates the checks done on the TypeSafeActivator when a ValidatorFactory could not be obtained
validateMissingBeanValidationApi( modes ); validateMissingBeanValidationApi( modes );
} }
} }

View File

@ -762,7 +762,7 @@ public abstract class AbstractPersistentCollection implements Serializable, Pers
// AST in ORM 5+, handling this type of condition is either extremely difficult or impossible. Forcing // AST in ORM 5+, handling this type of condition is either extremely difficult or impossible. Forcing
// recreation isn't ideal, but not really any other option in ORM 4. // recreation isn't ideal, but not really any other option in ORM 4.
// Selecting a type used in where part of update statement // Selecting a type used in where part of update statement
// (must match condidion in org.hibernate.persister.collection.BasicCollectionPersister.doUpdateRows). // (must match condition in org.hibernate.persister.collection.BasicCollectionPersister#doUpdateRows).
// See HHH-9474 // See HHH-9474
Type whereType; Type whereType;
if ( persister.hasIndex() ) { if ( persister.hasIndex() ) {

View File

@ -108,7 +108,7 @@ public class ThreadLocalSessionContext extends AbstractCurrentSessionContext {
} }
private boolean needsWrapping(Session session) { private boolean needsWrapping(Session session) {
// try to make sure we don't wrap and already wrapped session // try to make sure we don't wrap an already wrapped session
if ( Proxy.isProxyClass( session.getClass() ) ) { if ( Proxy.isProxyClass( session.getClass() ) ) {
final InvocationHandler invocationHandler = Proxy.getInvocationHandler( session ); final InvocationHandler invocationHandler = Proxy.getInvocationHandler( session );
if ( TransactionProtectionWrapper.class.isInstance( invocationHandler ) ) { if ( TransactionProtectionWrapper.class.isInstance( invocationHandler ) ) {
@ -182,7 +182,7 @@ public class ThreadLocalSessionContext extends AbstractCurrentSessionContext {
SESSION_PROXY_INTERFACES, SESSION_PROXY_INTERFACES,
wrapper wrapper
); );
// yick! need this for proper serialization/deserialization handling... // yuck! need this for proper serialization/deserialization handling...
wrapper.setWrapped( wrapped ); wrapper.setWrapped( wrapped );
return wrapped; return wrapped;
} }
@ -315,7 +315,7 @@ public class ThreadLocalSessionContext extends AbstractCurrentSessionContext {
else if ( "getStatistics".equals( methodName ) else if ( "getStatistics".equals( methodName )
|| "isOpen".equals( methodName ) || "isOpen".equals( methodName )
|| "getListeners".equals( methodName ) ) { || "getListeners".equals( methodName ) ) {
// allow these to go through the the real session no matter what // allow these to go through the real session no matter what
LOG.tracef( "Allowing invocation [%s] to proceed to real session", methodName ); LOG.tracef( "Allowing invocation [%s] to proceed to real session", methodName );
} }
else if ( !realSession.isOpen() ) { else if ( !realSession.isOpen() ) {

View File

@ -372,7 +372,7 @@ public abstract class AbstractHANADialect extends Dialect {
// stream passed in via // stream passed in via
// PreparedStatement.setCharacterStream(int,Reader,long) // PreparedStatement.setCharacterStream(int,Reader,long)
// after the stream has been processed. this causes problems later if we are // after the stream has been processed. this causes problems later if we are
// using non-contexual lob creation and HANA then closes our StringReader. // using non-contextual lob creation and HANA then closes our StringReader.
// see test case LobLocatorTest // see test case LobLocatorTest
private static class HANAClobTypeDescriptor extends ClobTypeDescriptor { private static class HANAClobTypeDescriptor extends ClobTypeDescriptor {
@ -774,8 +774,7 @@ public abstract class AbstractHANADialect extends Dialect {
registerHanaKeywords(); registerHanaKeywords();
// createBlob() and createClob() are not supported by the HANA JDBC // createBlob() and createClob() are not supported by the HANA JDBC driver
// driver
getDefaultProperties().setProperty( AvailableSettings.NON_CONTEXTUAL_LOB_CREATION, "true" ); getDefaultProperties().setProperty( AvailableSettings.NON_CONTEXTUAL_LOB_CREATION, "true" );
// getGeneratedKeys() is not supported by the HANA JDBC driver // getGeneratedKeys() is not supported by the HANA JDBC driver

View File

@ -148,7 +148,7 @@ abstract class AbstractTransactSQLDialect extends Dialect {
@Override @Override
public String applyLocksToSql(String sql, LockOptions aliasedLockOptions, Map<String, String[]> keyColumnNames) { public String applyLocksToSql(String sql, LockOptions aliasedLockOptions, Map<String, String[]> keyColumnNames) {
// TODO: merge additional lockoptions support in Dialect.applyLocksToSql // TODO: merge additional lock options support in Dialect.applyLocksToSql
final Iterator itr = aliasedLockOptions.getAliasLockIterator(); final Iterator itr = aliasedLockOptions.getAliasLockIterator();
final StringBuilder buffer = new StringBuilder( sql ); final StringBuilder buffer = new StringBuilder( sql );

View File

@ -494,7 +494,7 @@ public class DB2Dialect extends Dialect {
/** /**
* {@inheritDoc} * {@inheritDoc}
* <p/> * <p/>
* NOTE : DB2 is know to support parameters in the <tt>SELECT</tt> clause, but only in casted form * NOTE : DB2 is known to support parameters in the <tt>SELECT</tt> clause, but only in casted form
* (see {@link #requiresCastingOfParametersInSelectClause()}). * (see {@link #requiresCastingOfParametersInSelectClause()}).
*/ */
@Override @Override
@ -646,7 +646,7 @@ public class DB2Dialect extends Dialect {
// we have one of: // we have one of:
// * ASC + NULLS LAST // * ASC + NULLS LAST
// * DESC + NULLS FIRST // * DESC + NULLS FIRST
// so just drop the null precedence. *NOTE: we could pass along the null precedence here, // so just drop the null precedence. *NOTE*: we could pass along the null precedence here,
// but only DB2 9.7 or greater understand it; dropping it is more portable across DB2 versions // but only DB2 9.7 or greater understand it; dropping it is more portable across DB2 versions
return super.renderOrderByElement( expression, collation, order, NullPrecedence.NONE ); return super.renderOrderByElement( expression, collation, order, NullPrecedence.NONE );
} }

View File

@ -597,6 +597,37 @@ public abstract class Dialect implements ConversionContext {
throw new NotYetImplementedFor6Exception(); throw new NotYetImplementedFor6Exception();
} }
/**
* Do the given JDBC type codes, as defined in {@link Types} represent
* essentially the same type in this dialect of SQL? The default
* implementation treats {@link Types#NUMERIC NUMERIC} and
* {@link Types#DECIMAL DECIMAL} as the same type, and
* {@link Types#FLOAT FLOAT}, {@link Types#REAL REAL}, and
* {@link Types#DOUBLE DOUBLE} as essentially the same type, since the
* ANSI SQL specification fails to meaningfully distinguish them.
*
* @param typeCode1 the first JDBC type code
* @param typeCode2 the second JDBC type code
*
* @return {@code true} if the two type codes are equivalent
*/
public boolean equivalentTypes(int typeCode1, int typeCode2) {
return typeCode1==typeCode2
|| isNumericOrDecimal(typeCode1) && isNumericOrDecimal(typeCode2)
|| isFloatOrRealOrDouble(typeCode1) && isFloatOrRealOrDouble(typeCode2);
}
private static boolean isNumericOrDecimal(int typeCode) {
return typeCode == Types.NUMERIC
|| typeCode == Types.DECIMAL;
}
private static boolean isFloatOrRealOrDouble(int typeCode) {
return typeCode == Types.FLOAT
|| typeCode == Types.REAL
|| typeCode == Types.DOUBLE;
}
/** /**
* Get an instance of the dialect specified by the current <tt>System</tt> properties. * Get an instance of the dialect specified by the current <tt>System</tt> properties.
* *

View File

@ -35,5 +35,4 @@ public class FirstLimitHandler extends AbstractNoOffsetLimitHandler {
public boolean bindLimitParametersFirst() { public boolean bindLimitParametersFirst() {
return true; return true;
} }
} }

View File

@ -277,7 +277,7 @@ public final class Collections {
} }
if ( loadedPersister != null ) { if ( loadedPersister != null ) {
// we will need to remove ye olde entries // we will need to remove the old entries
entry.setDoremove( true ); entry.setDoremove( true );
if ( entry.isDorecreate() ) { if ( entry.isDorecreate() ) {
LOG.trace( "Forcing collection initialization" ); LOG.trace( "Forcing collection initialization" );

View File

@ -256,7 +256,7 @@ public final class ForeignKeys {
return true; return true;
} }
// todo : shouldnt assumed be revered here? // todo : shouldn't assumed be reversed here?
return !isTransient( entityName, entity, assumed, session ); return !isTransient( entityName, entity, assumed, session );
} }

View File

@ -81,9 +81,9 @@ public final class Nullability {
* *
* *
* In the previous implementation, not-null stuffs where checked * In the previous implementation, not-null stuffs where checked
* filtering by level one only updateable * filtering by level one only updatable
* or insertable columns. So setting a sub component as update="false" * or insertable columns. So setting a sub component as update="false"
* has no effect on not-null check if the main component had good checkeability * has no effect on not-null check if the main component had good checkability
* In this implementation, we keep this feature. * In this implementation, we keep this feature.
* However, I never see any documentation mentioning that, but it's for * However, I never see any documentation mentioning that, but it's for
* sure a limitation. * sure a limitation.
@ -104,7 +104,7 @@ public final class Nullability {
GenerationTiming.NEVER == inMemoryValueGenerationStrategies[i].getGenerationTiming() ) { GenerationTiming.NEVER == inMemoryValueGenerationStrategies[i].getGenerationTiming() ) {
final Object value = values[i]; final Object value = values[i];
if ( !nullability[i] && value == null ) { if ( !nullability[i] && value == null ) {
//check basic level one nullablilty //check basic level one nullability
throw new PropertyValueException( throw new PropertyValueException(
"not-null property references a null or transient value", "not-null property references a null or transient value",
persister.getEntityName(), persister.getEntityName(),

View File

@ -353,7 +353,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
return dbValue; return dbValue;
} }
else { else {
// for a mutable natural there is a likelihood that the the information will already be // for a mutable natural id there is a likelihood that the information will already be
// snapshot-cached. // snapshot-cached.
final int[] props = persister.getNaturalIdentifierProperties(); final int[] props = persister.getNaturalIdentifierProperties();
final Object[] entitySnapshot = getDatabaseSnapshot( id, persister ); final Object[] entitySnapshot = getDatabaseSnapshot( id, persister );
@ -531,8 +531,8 @@ public class StatefulPersistenceContext implements PersistenceContext {
When a virtual method is called via an interface the JVM needs to resolve which concrete When a virtual method is called via an interface the JVM needs to resolve which concrete
implementation to call. This takes CPU cycles and is a performance penalty. It also prevents method implementation to call. This takes CPU cycles and is a performance penalty. It also prevents method
in-ling which further degrades performance. Casting to an implementation and making a direct method call inlining which further degrades performance. Casting to an implementation and making a direct method call
removes the virtual call, and allows the methods to be in-lined. In this critical code path, it has a very removes the virtual call, and allows the methods to be inlined. In this critical code path, it has a very
large impact on performance to make virtual method calls. large impact on performance to make virtual method calls.
*/ */
if (persister.getEntityEntryFactory() instanceof MutableEntityEntryFactory) { if (persister.getEntityEntryFactory() instanceof MutableEntityEntryFactory) {
@ -1958,7 +1958,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
persister = locateProperPersister( persister ); persister = locateProperPersister( persister );
// 'justAddedLocally' is meant to handle the case where we would get double stats jounaling // 'justAddedLocally' is meant to handle the case where we would get double stats journaling
// from a single load event. The first put journal would come from the natural id resolution; // from a single load event. The first put journal would come from the natural id resolution;
// the second comes from the entity loading. In this condition, we want to avoid the multiple // the second comes from the entity loading. In this condition, we want to avoid the multiple
// 'put' stats incrementing. // 'put' stats incrementing.
@ -2165,7 +2165,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
// todo : couple of things wrong here: // todo : couple of things wrong here:
// 1) should be using access strategy, not plain evict.. // 1) should be using access strategy, not plain evict..
// 2) should prefer session-cached values if any (requires interaction from removeLocalNaturalIdCrossReference // 2) should prefer session-cached values if any (requires interaction from removeLocalNaturalIdCrossReference)
persister = locateProperPersister( persister ); persister = locateProperPersister( persister );
final NaturalIdDataAccess naturalIdCacheAccessStrategy = persister.getNaturalIdCacheAccessStrategy(); final NaturalIdDataAccess naturalIdCacheAccessStrategy = persister.getNaturalIdCacheAccessStrategy();

View File

@ -403,27 +403,40 @@ public final class TwoPhaseLoad {
} }
/** /**
* Check if eager of the association is overriden by anything. * Check if eager of the association is overridden (i.e. skipping metamodel strategy), including (order sensitive):
* <ol>
* <li>fetch graph</li>
* <li>fetch profile</li>
* </ol>
* *
* @param session session * @param session session
* @param entityName entity name * @param entityName entity name
* @param associationName association name * @param associationName association name
* * @param associationType association type
* @param isDebugEnabled if debug log level enabled
* @return null if there is no overriding, true if it is overridden to eager and false if it is overridden to lazy * @return null if there is no overriding, true if it is overridden to eager and false if it is overridden to lazy
*/ */
private static Boolean getOverridingEager( private static Boolean getOverridingEager(
final SharedSessionContractImplementor session, final SharedSessionContractImplementor session,
final String entityName, final String entityName,
final String associationName, final String associationName,
final Type type, final Type associationType,
final boolean isDebugEnabled) { final boolean isDebugEnabled) {
// Performance: check type.isCollectionType() first, as type.isAssociationType() is megamorphic // Performance: check type.isCollectionType() first, as type.isAssociationType() is megamorphic
if ( type.isCollectionType() || type.isAssociationType() ) { if ( associationType.isCollectionType() || associationType.isAssociationType() ) {
// we can return false invariably for if the entity has been covered by entity graph,
// its associated JOIN has been present in the SQL generated and hence it would be loaded anyway
if ( session.isEnforcingFetchGraph() ) {
return false;
}
// check 'fetch profile' next; skip 'metamodel' if 'fetch profile' takes effect
final Boolean overridingEager = isEagerFetchProfile( session, entityName, associationName ); final Boolean overridingEager = isEagerFetchProfile( session, entityName, associationName );
//This method is very hot, and private so let's piggy back on the fact that the caller already knows the debugging state. if ( overridingEager != null ) {
if ( isDebugEnabled ) { //This method is very hot, and private so let's piggy back on the fact that the caller already knows the debugging state.
if ( overridingEager != null ) { if ( isDebugEnabled ) {
LOG.debugf( LOG.debugf(
"Overriding eager fetching using active fetch profile. EntityName: %s, associationName: %s, eager fetching: %s", "Overriding eager fetching using active fetch profile. EntityName: %s, associationName: %s, eager fetching: %s",
entityName, entityName,
@ -431,10 +444,10 @@ public final class TwoPhaseLoad {
overridingEager overridingEager
); );
} }
return overridingEager;
} }
return overridingEager;
} }
// let 'metamodel' decide eagerness
return null; return null;
} }

View File

@ -26,7 +26,6 @@ import org.hibernate.engine.jdbc.LobCreationContext;
import org.hibernate.engine.jdbc.spi.JdbcCoordinator; import org.hibernate.engine.jdbc.spi.JdbcCoordinator;
import org.hibernate.engine.jdbc.spi.JdbcServices; import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.engine.query.spi.sql.NativeSQLQuerySpecification; import org.hibernate.engine.query.spi.sql.NativeSQLQuerySpecification;
import org.hibernate.graph.spi.GraphImplementor;
import org.hibernate.internal.util.config.ConfigurationHelper; import org.hibernate.internal.util.config.ConfigurationHelper;
import org.hibernate.persister.entity.EntityPersister; import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.query.spi.QueryProducerImplementor; import org.hibernate.query.spi.QueryProducerImplementor;
@ -467,4 +466,11 @@ public interface SharedSessionContractImplementor
*/ */
PersistenceContext getPersistenceContextInternal(); PersistenceContext getPersistenceContextInternal();
default boolean isEnforcingFetchGraph() {
return false;
}
default void setEnforcingFetchGraph(boolean enforcingFetchGraph) {
}
} }

View File

@ -529,7 +529,7 @@ public class DefaultFlushEntityEventListener implements FlushEntityEventListener
int[] dirty = persister.resolveAttributeIndexes( ( (SelfDirtinessTracker) entity ).$$_hibernate_getDirtyAttributes() ); int[] dirty = persister.resolveAttributeIndexes( ( (SelfDirtinessTracker) entity ).$$_hibernate_getDirtyAttributes() );
// HHH-12051 - filter non-updatable attributes // HHH-12051 - filter non-updatable attributes
// TODO: add Updateability to EnhancementContext and skip dirty tracking of those attributes // TODO: add Updatability to EnhancementContext and skip dirty tracking of those attributes
int count = 0; int count = 0;
for ( int i : dirty ) { for ( int i : dirty ) {
if ( persister.getPropertyUpdateability()[i] ) { if ( persister.getPropertyUpdateability()[i] ) {
@ -573,7 +573,7 @@ public class DefaultFlushEntityEventListener implements FlushEntityEventListener
boolean dirtyCheckPossible = true; boolean dirtyCheckPossible = true;
if ( dirtyProperties == null ) { if ( dirtyProperties == null ) {
// Interceptor returned null, so do the dirtycheck ourself, if possible // Interceptor returned null, so do the dirty check ourself, if possible
try { try {
session.getEventListenerManager().dirtyCalculationStart(); session.getEventListenerManager().dirtyCalculationStart();

View File

@ -416,7 +416,7 @@ public class DefaultMergeEventListener extends AbstractSaveEventListener impleme
// //
// This second condition is a special case which allows // This second condition is a special case which allows
// an entity to be merged during the same transaction // an entity to be merged during the same transaction
// (though during a seperate operation) in which it was // (though during a separate operation) in which it was
// originally persisted/saved // originally persisted/saved
boolean changed = !persister.getVersionType().isSame( boolean changed = !persister.getVersionType().isSame(
persister.getVersion( target ), persister.getVersion( target ),

View File

@ -104,7 +104,7 @@ public class DefaultReplicateEventListener extends AbstractSaveEventListener imp
); );
// if can replicate, will result in a SQL UPDATE // if can replicate, will result in a SQL UPDATE
// else do nothing (don't even reassociate object!) // else do nothing (don't even re-associate object!)
if ( canReplicate ) { if ( canReplicate ) {
performReplication( entity, id, realOldVersion, persister, replicationMode, source ); performReplication( entity, id, realOldVersion, persister, replicationMode, source );
} }

View File

@ -40,12 +40,12 @@ public class DirtyCollectionSearchVisitor extends AbstractVisitor {
final PersistentCollection persistentCollection; final PersistentCollection persistentCollection;
if ( type.isArrayType() ) { if ( type.isArrayType() ) {
persistentCollection = session.getPersistenceContextInternal().getCollectionHolder( collection ); persistentCollection = session.getPersistenceContextInternal().getCollectionHolder( collection );
// if no array holder we found an unwrappered array (this can't occur, // if no array holder we found an unwrapped array (this can't occur,
// because we now always call wrap() before getting to here) // because we now always call wrap() before getting to here)
// return (ah==null) ? true : searchForDirtyCollections(ah, type); // return (ah==null) ? true : searchForDirtyCollections(ah, type);
} }
else { else {
// if not wrappered yet, its dirty (this can't occur, because // if not wrapped yet, its dirty (this can't occur, because
// we now always call wrap() before getting to here) // we now always call wrap() before getting to here)
// return ( ! (obj instanceof PersistentCollection) ) ? // return ( ! (obj instanceof PersistentCollection) ) ?
//true : searchForDirtyCollections( (PersistentCollection) obj, type ); //true : searchForDirtyCollections( (PersistentCollection) obj, type );

View File

@ -43,26 +43,26 @@ public class OnLockVisitor extends ReattachVisitor {
if ( isOwnerUnchanged( persister, extractCollectionKeyFromOwner( persister ), persistentCollection ) ) { if ( isOwnerUnchanged( persister, extractCollectionKeyFromOwner( persister ), persistentCollection ) ) {
// a "detached" collection that originally belonged to the same entity // a "detached" collection that originally belonged to the same entity
if ( persistentCollection.isDirty() ) { if ( persistentCollection.isDirty() ) {
throw new HibernateException( "reassociated object has dirty collection" ); throw new HibernateException( "re-associated object has dirty collection" );
} }
reattachCollection( persistentCollection, type ); reattachCollection( persistentCollection, type );
} }
else { else {
// a "detached" collection that belonged to a different entity // a "detached" collection that belonged to a different entity
throw new HibernateException( "reassociated object has dirty collection reference" ); throw new HibernateException( "re-associated object has dirty collection reference" );
} }
} }
else { else {
// a collection loaded in the current session // a collection loaded in the current session
// can not possibly be the collection belonging // can not possibly be the collection belonging
// to the entity passed to update() // to the entity passed to update()
throw new HibernateException( "reassociated object has dirty collection reference" ); throw new HibernateException( "re-associated object has dirty collection reference" );
} }
} }
else { else {
// brand new collection // brand new collection
//TODO: or an array!! we can't lock objects with arrays now?? //TODO: or an array!! we can't lock objects with arrays now??
throw new HibernateException( "reassociated object has dirty collection reference (or an array)" ); throw new HibernateException( "re-associated object has dirty collection reference (or an array)" );
} }
return null; return null;

View File

@ -68,7 +68,7 @@ public abstract class ProxyVisitor extends AbstractVisitor {
} }
else { else {
if ( !isCollectionSnapshotValid( collection ) ) { if ( !isCollectionSnapshotValid( collection ) ) {
throw new HibernateException( "could not reassociate uninitialized transient collection" ); throw new HibernateException( "could not re-associate uninitialized transient collection" );
} }
CollectionPersister collectionPersister = session.getFactory() CollectionPersister collectionPersister = session.getFactory()
.getCollectionPersister( collection.getRole() ); .getCollectionPersister( collection.getRole() );

View File

@ -40,7 +40,7 @@ public abstract class AbstractUUIDGenerator implements IdentifierGenerator {
/** /**
* Unique across JVMs on this machine (unless they load this class * Unique across JVMs on this machine (unless they load this class
* in the same quater second - very unlikely) * in the same quarter second - very unlikely)
*/ */
protected int getJVM() { protected int getJVM() {
return JVM; return JVM;

View File

@ -318,7 +318,7 @@ public class MultipleHiLoPerTableGenerator implements PersistentIdentifierGenera
if ( table == null ) { if ( table == null ) {
table = namespace.createTable( qualifiedTableName.getObjectName(), false ); table = namespace.createTable( qualifiedTableName.getObjectName(), false );
// todo : note sure the best solution here. do we add the columns if missing? other? // todo : not sure the best solution here. do we add the columns if missing? other?
table.setPrimaryKey( new PrimaryKey( table ) ); table.setPrimaryKey( new PrimaryKey( table ) );
final Column pkColumn = new ExportableColumn( final Column pkColumn = new ExportableColumn(

View File

@ -89,7 +89,7 @@ public class HiLoOptimizer extends AbstractOptimizer {
} }
// upperLimit defines the upper end of the bucket values // upperLimit defines the upper end of the bucket values
generationState.upperLimit = generationState.lastSourceValue.copy().multiplyBy( incrementSize ).increment(); generationState.upperLimit = generationState.lastSourceValue.copy().multiplyBy( incrementSize ).increment();
// initialize value to the low end of the bucket // initialize value to the lower end of the bucket
generationState.value = generationState.upperLimit.copy().subtract( incrementSize ); generationState.value = generationState.upperLimit.copy().subtract( incrementSize );
} }
else if ( ! generationState.upperLimit.gt( generationState.value ) ) { else if ( ! generationState.upperLimit.gt( generationState.value ) ) {

View File

@ -34,7 +34,7 @@ public final class NoopOptimizer extends AbstractOptimizer {
// IMPL NOTE : this method is called concurrently and is // IMPL NOTE : this method is called concurrently and is
// not synchronized. It is very important to work on the // not synchronized. It is very important to work on the
// local variable: the field lastSourceValue is not // local variable: the field lastSourceValue is not
// reliable as it might be mutated by multipled threads. // reliable as it might be mutated by multiple threads.
// The lastSourceValue field is only accessed by tests, // The lastSourceValue field is only accessed by tests,
// so this is not a concern. // so this is not a concern.
IntegralDataTypeHolder value = callback.getNextValue(); IntegralDataTypeHolder value = callback.getNextValue();

View File

@ -72,7 +72,7 @@ public class PooledOptimizer extends AbstractOptimizer implements InitialValueAw
if ( generationState.hiValue == null ) { if ( generationState.hiValue == null ) {
generationState.value = callback.getNextValue(); generationState.value = callback.getNextValue();
// unfortunately not really safe to normalize this // unfortunately not really safe to normalize this
// to 1 as an initial value like we do the others // to 1 as an initial value like we do for the others
// because we would not be able to control this if // because we would not be able to control this if
// we are using a sequence... // we are using a sequence...
if ( generationState.value.lt( 1 ) ) { if ( generationState.value.lt( 1 ) ) {

View File

@ -737,7 +737,7 @@ public class TableGenerator implements PersistentIdentifierGenerator, Configurab
if ( table == null ) { if ( table == null ) {
table = namespace.createTable( qualifiedTableName.getObjectName(), false ); table = namespace.createTable( qualifiedTableName.getObjectName(), false );
// todo : note sure the best solution here. do we add the columns if missing? other? // todo : not sure the best solution here. do we add the columns if missing? other?
final Column segmentColumn = new ExportableColumn( final Column segmentColumn = new ExportableColumn(
database, database,
table, table,

View File

@ -249,8 +249,10 @@ public class TableStructure implements DatabaseStructure {
); );
Table table = namespace.locateTable( logicalQualifiedTableName.getObjectName() ); Table table = namespace.locateTable( logicalQualifiedTableName.getObjectName() );
boolean tableCreated = false;
if ( table == null ) { if ( table == null ) {
table = namespace.createTable( logicalQualifiedTableName.getObjectName(), false ); table = namespace.createTable( logicalQualifiedTableName.getObjectName(), false );
tableCreated = true;
} }
this.tableNameText = jdbcEnvironment.getQualifiedObjectNameFormatter().format( this.tableNameText = jdbcEnvironment.getQualifiedObjectNameFormatter().format(
@ -268,17 +270,19 @@ public class TableStructure implements DatabaseStructure {
this.updateQuery = "update " + tableNameText + this.updateQuery = "update " + tableNameText +
" set " + valueColumnNameText + "= ?" + " set " + valueColumnNameText + "= ?" +
" where " + valueColumnNameText + "=?"; " where " + valueColumnNameText + "=?";
if ( tableCreated ) {
ExportableColumn valueColumn = new ExportableColumn(
database,
table,
valueColumnNameText,
LongType.INSTANCE
);
ExportableColumn valueColumn = new ExportableColumn( table.addColumn( valueColumn );
database,
table,
valueColumnNameText,
LongType.INSTANCE
);
table.addColumn( valueColumn );
table.addInitCommand( table.addInitCommand(
new InitCommand( "insert into " + tableNameText + " values ( " + initialValue + " )" ) new InitCommand( "insert into " + tableNameText + " values ( " + initialValue + " )" )
); );
}
} }
} }

View File

@ -499,7 +499,7 @@ public class SessionFactoryImpl implements SessionFactoryImplementor {
public Session openSession() throws HibernateException { public Session openSession() throws HibernateException {
final CurrentTenantIdentifierResolver currentTenantIdentifierResolver = getCurrentTenantIdentifierResolver(); final CurrentTenantIdentifierResolver currentTenantIdentifierResolver = getCurrentTenantIdentifierResolver();
//We can only use reuse the defaultSessionOpenOptions as a constant when there is no TenantIdentifierResolver //We can only reuse the defaultSessionOpenOptions as a constant when there is no TenantIdentifierResolver
if ( currentTenantIdentifierResolver != null ) { if ( currentTenantIdentifierResolver != null ) {
return this.withOptions().openSession(); return this.withOptions().openSession();
} }
@ -510,7 +510,7 @@ public class SessionFactoryImpl implements SessionFactoryImplementor {
public Session openTemporarySession() throws HibernateException { public Session openTemporarySession() throws HibernateException {
final CurrentTenantIdentifierResolver currentTenantIdentifierResolver = getCurrentTenantIdentifierResolver(); final CurrentTenantIdentifierResolver currentTenantIdentifierResolver = getCurrentTenantIdentifierResolver();
//We can only use reuse the defaultSessionOpenOptions as a constant when there is no TenantIdentifierResolver //We can only reuse the defaultSessionOpenOptions as a constant when there is no TenantIdentifierResolver
if ( currentTenantIdentifierResolver != null ) { if ( currentTenantIdentifierResolver != null ) {
return buildTemporarySessionOpenOptions() return buildTemporarySessionOpenOptions()
.openSession(); .openSession();
@ -1055,7 +1055,7 @@ public class SessionFactoryImpl implements SessionFactoryImplementor {
return interceptor; return interceptor;
} }
// prefer the SF-scoped interceptor, prefer that to any Session-scoped interceptor prototype // prefer the SessionFactory-scoped interceptor, prefer that to any Session-scoped interceptor prototype
final Interceptor optionsInterceptor = options.getInterceptor(); final Interceptor optionsInterceptor = options.getInterceptor();
if ( optionsInterceptor != null && optionsInterceptor != EmptyInterceptor.INSTANCE ) { if ( optionsInterceptor != null && optionsInterceptor != EmptyInterceptor.INSTANCE ) {
return optionsInterceptor; return optionsInterceptor;
@ -1266,7 +1266,7 @@ public class SessionFactoryImpl implements SessionFactoryImplementor {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public T connectionReleaseMode(ConnectionReleaseMode connectionReleaseMode) { public T connectionReleaseMode(ConnectionReleaseMode connectionReleaseMode) {
// NOTE : Legacy behavior (when only ConnectionReleaseMode was exposed) was to always acquire a // NOTE : Legacy behavior (when only ConnectionReleaseMode was exposed) was to always acquire a
// Connection using ConnectionAcquisitionMode.AS_NEEDED.. // Connection using ConnectionAcquisitionMode.AS_NEEDED.
final PhysicalConnectionHandlingMode handlingMode = PhysicalConnectionHandlingMode.interpret( final PhysicalConnectionHandlingMode handlingMode = PhysicalConnectionHandlingMode.interpret(
ConnectionAcquisitionMode.AS_NEEDED, ConnectionAcquisitionMode.AS_NEEDED,
@ -1334,7 +1334,7 @@ public class SessionFactoryImpl implements SessionFactoryImplementor {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public T clearEventListeners() { public T clearEventListeners() {
if ( listeners == null ) { if ( listeners == null ) {
//Needs to initialize explicitly to an empty list as otherwise "null" immplies the default listeners will be applied //Needs to initialize explicitly to an empty list as otherwise "null" implies the default listeners will be applied
this.listeners = new ArrayList<>( 3 ); this.listeners = new ArrayList<>( 3 );
} }
else { else {

View File

@ -186,6 +186,8 @@ public class SessionImpl
private transient TransactionObserver transactionObserver; private transient TransactionObserver transactionObserver;
private transient boolean isEnforcingFetchGraph;
public SessionImpl(SessionFactoryImpl factory, SessionCreationOptions options) { public SessionImpl(SessionFactoryImpl factory, SessionCreationOptions options) {
super( factory, options ); super( factory, options );
@ -2758,10 +2760,14 @@ public class SessionImpl
loadAccess.with( lockOptions ); loadAccess.with( lockOptions );
} }
if ( getLoadQueryInfluencers().getEffectiveEntityGraph().getSemantic() == GraphSemantic.FETCH ) {
setEnforcingFetchGraph( true );
}
return loadAccess.load( primaryKey ); return loadAccess.load( primaryKey );
} }
catch ( EntityNotFoundException ignored ) { catch ( EntityNotFoundException ignored ) {
// DefaultLoadEventListener.returnNarrowedProxy may throw ENFE (see HHH-7861 for details), // DefaultLoadEventListener#returnNarrowedProxy() may throw ENFE (see HHH-7861 for details),
// which find() should not throw. Find() should return null if the entity was not found. // which find() should not throw. Find() should return null if the entity was not found.
if ( log.isDebugEnabled() ) { if ( log.isDebugEnabled() ) {
String entityName = entityClass != null ? entityClass.getName(): null; String entityName = entityClass != null ? entityClass.getName(): null;
@ -2783,7 +2789,7 @@ public class SessionImpl
} }
catch ( JDBCException e ) { catch ( JDBCException e ) {
if ( accessTransaction().isActive() && accessTransaction().getRollbackOnly() ) { if ( accessTransaction().isActive() && accessTransaction().getRollbackOnly() ) {
// Assume this is the similar to the WildFly / IronJacamar "feature" described under HHH-12472. // Assume this is similar to the WildFly / IronJacamar "feature" described under HHH-12472.
// Just log the exception and return null. // Just log the exception and return null.
if ( log.isDebugEnabled() ) { if ( log.isDebugEnabled() ) {
log.debug( "JDBCException was thrown for a transaction marked for rollback; " + log.debug( "JDBCException was thrown for a transaction marked for rollback; " +
@ -2802,6 +2808,7 @@ public class SessionImpl
finally { finally {
getLoadQueryInfluencers().getEffectiveEntityGraph().clear(); getLoadQueryInfluencers().getEffectiveEntityGraph().clear();
getLoadQueryInfluencers().setReadOnly( null ); getLoadQueryInfluencers().setReadOnly( null );
setEnforcingFetchGraph( false );
} }
} }
@ -3209,9 +3216,9 @@ public class SessionImpl
loadQueryInfluencers = (LoadQueryInfluencers) ois.readObject(); loadQueryInfluencers = (LoadQueryInfluencers) ois.readObject();
// LoadQueryInfluencers.getEnabledFilters() tries to validate each enabled // LoadQueryInfluencers#getEnabledFilters() tries to validate each enabled
// filter, which will fail when called before FilterImpl.afterDeserialize( factory ); // filter, which will fail when called before FilterImpl#afterDeserialize( factory );
// Instead lookup the filter by name and then call FilterImpl.afterDeserialize( factory ). // Instead lookup the filter by name and then call FilterImpl#afterDeserialize( factory ).
for ( String filterName : loadQueryInfluencers.getEnabledFilterNames() ) { for ( String filterName : loadQueryInfluencers.getEnabledFilterNames() ) {
( (FilterImpl) loadQueryInfluencers.getEnabledFilter( filterName ) ).afterDeserialize( getFactory() ); ( (FilterImpl) loadQueryInfluencers.getEnabledFilter( filterName ) ).afterDeserialize( getFactory() );
} }
@ -3224,4 +3231,15 @@ public class SessionImpl
} }
return readOnly; return readOnly;
} }
@Override
public boolean isEnforcingFetchGraph() {
return this.isEnforcingFetchGraph;
}
@Override
public void setEnforcingFetchGraph(boolean isEnforcingFetchGraph) {
this.isEnforcingFetchGraph = isEnforcingFetchGraph;
}
} }

View File

@ -200,7 +200,7 @@ public final class StringHelper {
// enclosed in parentheses (HHH-10383) // enclosed in parentheses (HHH-10383)
// Examples: // Examples:
// 1) "... IN (?1", we assume that "?1" does not need to be enclosed because there // 1) "... IN (?1", we assume that "?1" does not need to be enclosed because there
// there is already a right-parenthesis; we assume there will be a matching right-parenthesis. // is already a right-parenthesis; we assume there will be a matching right-parenthesis.
// 2) "... IN ?1", we assume that "?1" needs to be enclosed in parentheses, because there // 2) "... IN ?1", we assume that "?1" needs to be enclosed in parentheses, because there
// is no left-parenthesis. // is no left-parenthesis.
@ -508,8 +508,8 @@ public final class StringHelper {
if ( string == null ) { if ( string == null ) {
return 0; return 0;
} }
// Impl note: takes advantage of the fact that an escpaed single quote // Impl note: takes advantage of the fact that an escaped single quote
// embedded within a quote-block can really be handled as two seperate // embedded within a quote-block can really be handled as two separate
// quote-blocks for the purposes of this method... // quote-blocks for the purposes of this method...
int count = 0; int count = 0;
int stringLength = string.length(); int stringLength = string.length();
@ -668,7 +668,7 @@ public final class StringHelper {
*/ */
private static String cleanAlias(String alias) { private static String cleanAlias(String alias) {
char[] chars = alias.toCharArray(); char[] chars = alias.toCharArray();
// short cut check... // shortcut check...
if ( !Character.isLetter( chars[0] ) ) { if ( !Character.isLetter( chars[0] ) ) {
for ( int i = 1; i < chars.length; i++ ) { for ( int i = 1; i < chars.length; i++ ) {
// as soon as we encounter our first letter, return the substring // as soon as we encounter our first letter, return the substring

View File

@ -242,7 +242,7 @@ public final class PrimitiveWrapperHelper {
return (PrimitiveWrapperDescriptor<X>) DoubleDescriptor.INSTANCE; return (PrimitiveWrapperDescriptor<X>) DoubleDescriptor.INSTANCE;
} }
// most likely void.class, which we can't really handle here // most likely Void.class, which we can't really handle here
throw new IllegalArgumentException( "Unrecognized wrapper type class : " + wrapperClass.getName() ); throw new IllegalArgumentException( "Unrecognized wrapper type class : " + wrapperClass.getName() );
} }

View File

@ -31,7 +31,7 @@ public class ErrorLogger implements ErrorHandler, Serializable {
ErrorLogger.class.getName() ErrorLogger.class.getName()
); );
// lazily initalized // lazily initialized
private List<SAXParseException> errors; private List<SAXParseException> errors;
private String file; private String file;

View File

@ -200,7 +200,7 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil
Map mergedIntegrationSettings = null; Map mergedIntegrationSettings = null;
Properties properties = persistenceUnit.getProperties(); Properties properties = persistenceUnit.getProperties();
if ( properties != null ) { if ( properties != null ) {
// original integratin setting entries take precedence // original integration setting entries take precedence
mergedIntegrationSettings = new HashMap( properties ); mergedIntegrationSettings = new HashMap( properties );
mergedIntegrationSettings.putAll( integrationSettings ); mergedIntegrationSettings.putAll( integrationSettings );
} }
@ -886,7 +886,7 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil
// `IS_JTA_TXN_COORD` is a value set during `#normalizeTransactionCoordinator` to indicate whether // `IS_JTA_TXN_COORD` is a value set during `#normalizeTransactionCoordinator` to indicate whether
// the execution environment "is JTA" as best as it can tell.. // the execution environment "is JTA" as best as it can tell..
// //
// we use this value when JTA was not explicitly specified in regards the DataSource // we use this value when JTA was not explicitly specified in regards to the DataSource
final boolean isJtaTransactionCoordinator = (boolean) mergedSettings.configurationValues.remove( IS_JTA_TXN_COORD ); final boolean isJtaTransactionCoordinator = (boolean) mergedSettings.configurationValues.remove( IS_JTA_TXN_COORD );
final boolean isJta = useJtaDataSource == null ? isJtaTransactionCoordinator : useJtaDataSource; final boolean isJta = useJtaDataSource == null ? isJtaTransactionCoordinator : useJtaDataSource;

View File

@ -36,7 +36,7 @@ public class EnhancingClassTransformerImpl implements ClassTransformer {
byte[] classfileBuffer) throws IllegalClassFormatException { byte[] classfileBuffer) throws IllegalClassFormatException {
// The first design had the enhancer as a class variable. That approach had some goods and bads. // The first design had the enhancer as a class variable. That approach had some goods and bads.
// We don't had to create an enhancer for each class, but on the other end it would stay in memory forever. // We don't have to create an enhancer for each class, but on the other end it would stay in memory forever.
// It also assumed that all calls come from the same class loader, which is fair, but this makes it more robust. // It also assumed that all calls come from the same class loader, which is fair, but this makes it more robust.
try { try {

View File

@ -38,7 +38,7 @@ public final class XmlHelper {
return null; return null;
} }
// getElementsByTagName gives the corresponding elements in the whole // getElementsByTagName gives the corresponding elements in the whole
// descendance. We want only children // descendants. We want only children
NodeList children = element.getChildNodes(); NodeList children = element.getChildNodes();
ArrayList goodChildren = new ArrayList(); ArrayList goodChildren = new ArrayList();

View File

@ -333,8 +333,8 @@ public abstract class Collection implements Fetchable, Value, Filterable {
int i = 0; int i = 0;
while ( iterator.hasNext() ) { while ( iterator.hasNext() ) {
Selectable s = iterator.next(); Selectable s = iterator.next();
// exclude formulas and coluns that are not insertable or updatable // exclude formulas and columns that are not insertable or updatable
// since these values can be be repeated (HHH-5393) // since these values can be repeated (HHH-5393)
if ( !s.isFormula() && ( insertability[i] || updatability[i] ) ) { if ( !s.isFormula() && ( insertability[i] || updatability[i] ) ) {
Column col = (Column) s; Column col = (Column) s;
if ( !distinctColumns.add( col.getName() ) ) { if ( !distinctColumns.add( col.getName() ) ) {

View File

@ -222,10 +222,10 @@ public class Column implements Selectable, Serializable, Cloneable {
} }
/** /**
* Returns the underlying columns sqltypecode. * Returns the underlying columns SqlTypeCode.
* If null, it is because the sqltype code is unknown. * If null, it is because the SqlTypeCode is unknown.
* <p/> * <p/>
* Use #getSqlTypeCode(Mapping) to retrieve the sqltypecode used * Use #getSqlTypeCode(Mapping) to retrieve the SqlTypeCode used
* for the columns associated Value/Type. * for the columns associated Value/Type.
* *
* @return sqlTypeCode if it is set, otherwise null. * @return sqlTypeCode if it is set, otherwise null.

View File

@ -121,7 +121,7 @@ public class ForeignKey extends Constraint {
} }
/** /**
* Validates that columnspan of the foreignkey and the primarykey is the same. * Validates that column span of the foreign key and the primary key is the same.
* <p/> * <p/>
* Furthermore it aligns the length of the underlying tables columns. * Furthermore it aligns the length of the underlying tables columns.
*/ */

View File

@ -96,7 +96,7 @@ public class MappedSuperclass {
} }
public Property getIdentifierProperty() { public Property getIdentifierProperty() {
//get direct identifiermapper or the one from the super mappedSuperclass //get direct identifierMapper or the one from the super mappedSuperclass
// or the one from the super persistentClass // or the one from the super persistentClass
Property propagatedIdentifierProp = identifierProperty; Property propagatedIdentifierProp = identifierProperty;
if ( propagatedIdentifierProp == null ) { if ( propagatedIdentifierProp == null ) {
@ -142,7 +142,7 @@ public class MappedSuperclass {
} }
public Component getIdentifierMapper() { public Component getIdentifierMapper() {
//get direct identifiermapper or the one from the super mappedSuperclass //get direct identifierMapper or the one from the super mappedSuperclass
// or the one from the super persistentClass // or the one from the super persistentClass
Component propagatedMapper = identifierMapper; Component propagatedMapper = identifierMapper;
if ( propagatedMapper == null ) { if ( propagatedMapper == null ) {

View File

@ -66,7 +66,7 @@ public class OneToMany implements Value {
} }
public void createForeignKey() { public void createForeignKey() {
// no foreign key element of for a one-to-many // no foreign key element for a one-to-many
} }
public Iterator<Selectable> getColumnIterator() { public Iterator<Selectable> getColumnIterator() {

View File

@ -1008,6 +1008,6 @@ public abstract class PersistentClass implements AttributeContainer, Serializabl
this.superMappedSuperclass = superMappedSuperclass; this.superMappedSuperclass = superMappedSuperclass;
} }
// End of @Mappedsuperclass support // End of @MappedSuperclass support
} }

View File

@ -161,7 +161,7 @@ public class Property implements Serializable, MetaAttributable {
public boolean isUpdateable() { public boolean isUpdateable() {
// if the property mapping consists of all formulas, // if the property mapping consists of all formulas,
// make it non-updateable // make it non-updatable
return updateable && value.hasAnyUpdatableColumns(); return updateable && value.hasAnyUpdatableColumns();
} }

View File

@ -74,7 +74,7 @@ public class Set extends Collection {
} }
if ( pk.getColumnSpan() == getKey().getColumnSpan() ) { if ( pk.getColumnSpan() == getKey().getColumnSpan() ) {
//for backward compatibility, allow a set with no not-null //for backward compatibility, allow a set with no not-null
//element columns, using all columns in the row locater SQL //element columns, using all columns in the row locator SQL
//TODO: create an implicit not null constraint on all cols? //TODO: create an implicit not null constraint on all cols?
} }
else { else {

View File

@ -524,7 +524,7 @@ public abstract class SimpleValue implements KeyValue {
.getServiceRegistry() .getServiceRegistry()
.getService( ClassLoaderService.class ) .getService( ClassLoaderService.class )
).getName(); ).getName();
// todo : to fully support isNationalized here we need do the process hinted at above // todo : to fully support isNationalized here we need to do the process hinted at above
// essentially, much of the logic from #buildAttributeConverterTypeAdapter wrt resolving // essentially, much of the logic from #buildAttributeConverterTypeAdapter wrt resolving
// a (1) SqlTypeDescriptor, a (2) JavaTypeDescriptor and dynamically building a BasicType // a (1) SqlTypeDescriptor, a (2) JavaTypeDescriptor and dynamically building a BasicType
// combining them. // combining them.
@ -626,7 +626,7 @@ public abstract class SimpleValue implements KeyValue {
jdbcTypeCode = NationalizedTypeMappings.toNationalizedTypeCode( jdbcTypeCode ); jdbcTypeCode = NationalizedTypeMappings.toNationalizedTypeCode( jdbcTypeCode );
} }
// find the standard SqlTypeDescriptor for that JDBC type code (allow itr to be remapped if needed!) // find the standard SqlTypeDescriptor for that JDBC type code (allow it to be remapped if needed!)
final SqlTypeDescriptor sqlTypeDescriptor = getMetadata() final SqlTypeDescriptor sqlTypeDescriptor = getMetadata()
.getMetadataBuildingOptions() .getMetadataBuildingOptions()
.getServiceRegistry() .getServiceRegistry()

View File

@ -422,9 +422,10 @@ public class Table implements RelationalModel, Serializable, Exportable {
throw new HibernateException( "Missing column: " + col.getName() + " in " + Table.qualify( tableInfo.getCatalog(), tableInfo.getSchema(), tableInfo.getName())); throw new HibernateException( "Missing column: " + col.getName() + " in " + Table.qualify( tableInfo.getCatalog(), tableInfo.getSchema(), tableInfo.getName()));
} }
else { else {
final boolean typesMatch = col.getSqlType( dialect, mapping ).toLowerCase(Locale.ROOT) final boolean typesMatch =
.startsWith( columnInfo.getTypeName().toLowerCase(Locale.ROOT) ) dialect.equivalentTypes( columnInfo.getTypeCode(), col.getSqlTypeCode( mapping ) )
|| columnInfo.getTypeCode() == col.getSqlTypeCode( mapping ); || col.getSqlType( dialect, mapping ).toLowerCase(Locale.ROOT)
.startsWith( columnInfo.getTypeName().toLowerCase(Locale.ROOT) );
if ( !typesMatch ) { if ( !typesMatch ) {
throw new HibernateException( throw new HibernateException(
"Wrong column type in " + "Wrong column type in " +

View File

@ -405,7 +405,7 @@ public class AttributeFactory {
final boolean isManyToMany = isManyToMany( member ); final boolean isManyToMany = isManyToMany( member );
// First, determine the type of the elements and use that to help determine the // First, determine the type of the elements and use that to help determine the
// collection type) // collection type
final AttributeClassification elementClassification; final AttributeClassification elementClassification;
final AttributeClassification attributeClassification; final AttributeClassification attributeClassification;
if ( elementType.isAnyType() ) { if ( elementType.isAnyType() ) {

View File

@ -480,7 +480,7 @@ public class MetadataContext {
// nothing to do... // nothing to do...
} }
// todo : this does not account for @MappeSuperclass, mainly because this is not being tracked in our // todo : this does not account for @MappedSuperclass, mainly because this is not being tracked in our
// internal metamodel as populated from the annotations properly // internal metamodel as populated from the annotations properly
ManagedDomainType<? super X> superType = managedType.getSuperType(); ManagedDomainType<? super X> superType = managedType.getSuperType();
if ( superType != null ) { if ( superType != null ) {
@ -529,7 +529,7 @@ public class MetadataContext {
// appropriate attribute declarer in such cases and so the incoming metamodelClass most likely // appropriate attribute declarer in such cases and so the incoming metamodelClass most likely
// does not represent the declarer in such cases. // does not represent the declarer in such cases.
// //
// As a result, in the case of embeddable classes we simply use getField rather than get // As a result, in the case of embeddable classes we simply use getField rather than
// getDeclaredField // getDeclaredField
final boolean allowNonDeclaredFieldReference = final boolean allowNonDeclaredFieldReference =
attribute.getPersistentAttributeType() == Attribute.PersistentAttributeType.EMBEDDED attribute.getPersistentAttributeType() == Attribute.PersistentAttributeType.EMBEDDED
@ -552,7 +552,7 @@ public class MetadataContext {
} }
catch (IllegalArgumentException e) { catch (IllegalArgumentException e) {
// most likely a mismatch in the type we are injecting and the defined field; this represents a // most likely a mismatch in the type we are injecting and the defined field; this represents a
// mismatch in how the annotation processor interpretted the attribute and how our metamodel // mismatch in how the annotation processor interpreted the attribute and how our metamodel
// and/or annotation binder did. // and/or annotation binder did.
// This is particularly the case as arrays are nto handled propery by the StaticMetamodel generator // This is particularly the case as arrays are nto handled propery by the StaticMetamodel generator

View File

@ -174,7 +174,7 @@ public abstract class AbstractPropertyMapping implements PropertyMapping {
logDuplicateRegistration( path, existingType, type ); logDuplicateRegistration( path, existingType, type );
} }
else if ( !( existingType instanceof AssociationType ) ) { else if ( !( existingType instanceof AssociationType ) ) {
// Workaround for org.hibernate.cfg.annotations.PropertyBinder.bind() adding a component for *ToOne ids // Workaround for org.hibernate.cfg.annotations.PropertyBinder#bind() adding a component for *ToOne ids
logDuplicateRegistration( path, existingType, type ); logDuplicateRegistration( path, existingType, type );
} }
else { else {
@ -215,7 +215,7 @@ public abstract class AbstractPropertyMapping implements PropertyMapping {
duplicateIncompatiblePaths.add( path ); duplicateIncompatiblePaths.add( path );
typesByPropertyPath.remove( path ); typesByPropertyPath.remove( path );
// Set everything to empty to signal action has to be taken! // Set everything to empty to signal action has to be taken!
// org.hibernate.hql.internal.ast.tree.DotNode.dereferenceEntityJoin() is reacting to this // org.hibernate.hql.internal.ast.tree.DotNode#dereferenceEntityJoin() is reacting to this
String[] empty = new String[0]; String[] empty = new String[0];
columnsByPropertyPath.put( path, empty ); columnsByPropertyPath.put( path, empty );
columnReadersByPropertyPath.put( path, empty ); columnReadersByPropertyPath.put( path, empty );
@ -319,7 +319,7 @@ public abstract class AbstractPropertyMapping implements PropertyMapping {
// referenced property in the mapping file (ok?) // referenced property in the mapping file (ok?)
columns = columnsByPropertyPath.get( foreignKeyProperty ); columns = columnsByPropertyPath.get( foreignKeyProperty );
if ( columns == null ) { if ( columns == null ) {
return; //get em on the second pass! return; //get 'em on the second pass!
} }
columnReaders = columnReadersByPropertyPath.get( foreignKeyProperty ); columnReaders = columnReadersByPropertyPath.get( foreignKeyProperty );
columnReaderTemplates = columnReaderTemplatesByPropertyPath.get( foreignKeyProperty ); columnReaderTemplates = columnReaderTemplatesByPropertyPath.get( foreignKeyProperty );

View File

@ -163,7 +163,7 @@ public class UnionSubclassEntityPersister extends AbstractEntityPersister {
} }
//SPACES //SPACES
//TODO: i'm not sure, but perhaps we should exclude //TODO: I'm not sure, but perhaps we should exclude
// abstract denormalized tables? // abstract denormalized tables?
int spacesSize = 1 + persistentClass.getSynchronizedTables().size(); int spacesSize = 1 + persistentClass.getSynchronizedTables().size();

View File

@ -172,7 +172,7 @@ public final class CompositionSingularSubAttributesHelper {
"Cannot build AnyMappingDefinition from non-any-typed attribute" "Cannot build AnyMappingDefinition from non-any-typed attribute"
); );
} }
// todo : not sure how lazy is propogated into the component for a subattribute of type any // todo : not sure how lazy is propagated into the component for a sub-attribute of type any
return new StandardAnyTypeDefinition( (AnyType) aType, false ); return new StandardAnyTypeDefinition( (AnyType) aType, false );
} }

View File

@ -345,7 +345,7 @@ public final class MessageHelper {
SessionFactoryImplementor factory, SessionFactoryImplementor factory,
StringBuilder s ) { StringBuilder s ) {
// Need to use the identifier type of the collection owner // Need to use the identifier type of the collection owner
// since the incoming is value is actually the owner's id. // since the incoming value is actually the owner's id.
// Using the collection's key type causes problems with // Using the collection's key type causes problems with
// property-ref keys. // property-ref keys.
// Also need to check that the expected identifier type matches // Also need to check that the expected identifier type matches

View File

@ -45,7 +45,7 @@ public class EnhancedSetterImpl extends SetterFieldImpl {
( (CompositeTracker) value ).$$_hibernate_setOwner( propertyName, (CompositeOwner) target ); ( (CompositeTracker) value ).$$_hibernate_setOwner( propertyName, (CompositeOwner) target );
} }
// This marks the attribute as initialized, so it doesn't get lazy loaded afterwards // This marks the attribute as initialized, so it doesn't get lazily loaded afterwards
if ( target instanceof PersistentAttributeInterceptable ) { if ( target instanceof PersistentAttributeInterceptable ) {
PersistentAttributeInterceptor interceptor = ( (PersistentAttributeInterceptable) target ).$$_hibernate_getInterceptor(); PersistentAttributeInterceptor interceptor = ( (PersistentAttributeInterceptable) target ).$$_hibernate_getInterceptor();
if ( interceptor instanceof BytecodeLazyAttributeInterceptor ) { if ( interceptor instanceof BytecodeLazyAttributeInterceptor ) {

View File

@ -185,7 +185,7 @@ public abstract class AbstractLazyInitializer implements LazyInitializer {
protected void permissiveInitialization() { protected void permissiveInitialization() {
if ( session == null ) { if ( session == null ) {
//we have a detached collection thats set to null, reattach //we have a detached collection that is set to null, reattach
if ( sessionFactoryUuid == null ) { if ( sessionFactoryUuid == null ) {
throw new LazyInitializationException( "could not initialize proxy [" + entityName + "#" + id + "] - no Session" ); throw new LazyInitializationException( "could not initialize proxy [" + entityName + "#" + id + "] - no Session" );
} }

View File

@ -1372,6 +1372,9 @@ public abstract class AbstractProducedQuery<R> implements QueryImplementor<R> {
sessionCacheMode = getSession().getCacheMode(); sessionCacheMode = getSession().getCacheMode();
getSession().setCacheMode( effectiveCacheMode ); getSession().setCacheMode( effectiveCacheMode );
} }
if ( entityGraphQueryHint != null && entityGraphQueryHint.getSemantic() == GraphSemantic.FETCH ) {
getSession().setEnforcingFetchGraph( true );
}
} }
protected void afterQuery() { protected void afterQuery() {
@ -1383,6 +1386,7 @@ public abstract class AbstractProducedQuery<R> implements QueryImplementor<R> {
getSession().setCacheMode( sessionCacheMode ); getSession().setCacheMode( sessionCacheMode );
sessionCacheMode = null; sessionCacheMode = null;
} }
getSession().setEnforcingFetchGraph( false );
} }
@Override @Override

View File

@ -44,7 +44,7 @@ public final class ResourceRegistryStandardImpl implements ResourceRegistry {
// Dummy value to associate with an Object in the backing Map when we use it as a set: // Dummy value to associate with an Object in the backing Map when we use it as a set:
private static final Object PRESENT = new Object(); private static final Object PRESENT = new Object();
//Used instead of Collections.EMPTY_SET to avoid polymorhic calls on xref; //Used instead of Collections.EMPTY_SET to avoid polymorphic calls on xref;
//Also, uses an HashMap as it were an HashSet, as technically we just need the Set semantics //Also, uses an HashMap as it were an HashSet, as technically we just need the Set semantics
//but in this case the overhead of HashSet is not negligible. //but in this case the overhead of HashSet is not negligible.
private static final HashMap<ResultSet,Object> EMPTY = new HashMap<>( 1, 0.2f ); private static final HashMap<ResultSet,Object> EMPTY = new HashMap<>( 1, 0.2f );

View File

@ -101,7 +101,7 @@ public class ForUpdateFragment {
} }
return ""; return "";
} }
// TODO: pass lockmode // TODO: pass lock mode
if(isNowaitEnabled) { if(isNowaitEnabled) {
return dialect.getForUpdateNowaitString( aliases.toString() ); return dialect.getForUpdateNowaitString( aliases.toString() );
} }

View File

@ -262,7 +262,7 @@ public final class Template {
result.append( trimOperands.from ).append( ' ' ); result.append( trimOperands.from ).append( ' ' );
} }
else if ( trimOperands.trimSpec != null || trimOperands.trimChar != null ) { else if ( trimOperands.trimSpec != null || trimOperands.trimChar != null ) {
// I think ANSI SQL says that the 'from' is not optional if either trim-spec or trim-char are specified // I think ANSI SQL says that the 'from' is not optional if either trim-spec or trim-char is specified
result.append( "from " ); result.append( "from " );
} }

View File

@ -152,7 +152,7 @@ public abstract class AbstractSchemaValidator implements SchemaValidator {
Metadata metadata, Metadata metadata,
ExecutionOptions options, ExecutionOptions options,
Dialect dialect) { Dialect dialect) {
boolean typesMatch = column.getSqlTypeCode( metadata ) == columnInformation.getTypeCode() boolean typesMatch = dialect.equivalentTypes( column.getSqlTypeCode( metadata ), columnInformation.getTypeCode() )
|| column.getSqlType( dialect, metadata ).toLowerCase(Locale.ROOT).startsWith( columnInformation.getTypeName().toLowerCase(Locale.ROOT) ); || column.getSqlType( dialect, metadata ).toLowerCase(Locale.ROOT).startsWith( columnInformation.getTypeName().toLowerCase(Locale.ROOT) );
if ( !typesMatch ) { if ( !typesMatch ) {
throw new SchemaManagementException( throw new SchemaManagementException(

View File

@ -436,7 +436,7 @@ public class SchemaManagementToolCoordinator {
*/ */
public static final MigrateSettingSelector INSTANCE = new MigrateSettingSelector(); public static final MigrateSettingSelector INSTANCE = new MigrateSettingSelector();
// todo : should this define new migrattor-specific settings? // todo : should this define new migrator-specific settings?
// for now we reuse the CREATE settings where applicable // for now we reuse the CREATE settings where applicable
@Override @Override

View File

@ -31,7 +31,7 @@ public class CacheableResultTransformer implements ResultTransformer {
// array with the i-th element indicating whether the i-th // array with the i-th element indicating whether the i-th
// expression returned by a query is included in the tuple; // expression returned by a query is included in the tuple;
// IMPLLEMENTATION NOTE: // IMPLEMENTATION NOTE:
// "joined" and "fetched" associations may use the same SQL, // "joined" and "fetched" associations may use the same SQL,
// but result in different tuple and cached values. This is // but result in different tuple and cached values. This is
// because "fetched" associations are excluded from the tuple. // because "fetched" associations are excluded from the tuple.

View File

@ -42,7 +42,7 @@ public class PassThroughResultTransformer extends BasicTransformerAdapter implem
/* package-protected */ /* package-protected */
List untransformToTuples(List results, boolean isSingleResult) { List untransformToTuples(List results, boolean isSingleResult) {
// untransform only if necessary; if transformed, do it in place; // un-transform only if necessary; if transformed, do it in place;
if ( isSingleResult ) { if ( isSingleResult ) {
for ( int i = 0 ; i < results.size() ; i++ ) { for ( int i = 0 ; i < results.size() ; i++ ) {
Object[] tuple = untransformToTuple( results.get( i ), isSingleResult); Object[] tuple = untransformToTuple( results.get( i ), isSingleResult);

View File

@ -23,7 +23,7 @@ final public class Transformers {
public static final ToListResultTransformer TO_LIST = ToListResultTransformer.INSTANCE; public static final ToListResultTransformer TO_LIST = ToListResultTransformer.INSTANCE;
/** /**
* Creates a resulttransformer that will inject aliased values into * Creates a ResultTransformer that will inject aliased values into
* instances of Class via property methods or fields. * instances of Class via property methods or fields.
*/ */
public static ResultTransformer aliasToBean(Class target) { public static ResultTransformer aliasToBean(Class target) {

View File

@ -248,7 +248,7 @@ public abstract class AbstractEntityTuplizer implements EntityTuplizer {
@Override @Override
public void setIdentifier(Object entity, Serializable id) throws HibernateException { public void setIdentifier(Object entity, Serializable id) throws HibernateException {
// 99% of the time the session is not needed. Its only needed for certain brain-dead // 99% of the time the session is not needed. It's only needed for certain brain-dead
// interpretations of JPA 2 "derived identity" support // interpretations of JPA 2 "derived identity" support
setIdentifier( entity, id, null ); setIdentifier( entity, id, null );
} }
@ -506,7 +506,7 @@ public abstract class AbstractEntityTuplizer implements EntityTuplizer {
@Override @Override
public void resetIdentifier(Object entity, Serializable currentId, Object currentVersion) { public void resetIdentifier(Object entity, Serializable currentId, Object currentVersion) {
// 99% of the time the session is not needed. Its only needed for certain brain-dead // 99% of the time the session is not needed. It's only needed for certain brain-dead
// interpretations of JPA 2 "derived identity" support // interpretations of JPA 2 "derived identity" support
resetIdentifier( entity, currentId, currentVersion, null ); resetIdentifier( entity, currentId, currentVersion, null );
} }
@ -696,7 +696,7 @@ public abstract class AbstractEntityTuplizer implements EntityTuplizer {
@Override @Override
public final Object instantiate(Serializable id) throws HibernateException { public final Object instantiate(Serializable id) throws HibernateException {
// 99% of the time the session is not needed. Its only needed for certain brain-dead // 99% of the time the session is not needed. It's only needed for certain brain-dead
// interpretations of JPA 2 "derived identity" support // interpretations of JPA 2 "derived identity" support
return instantiate( id, null ); return instantiate( id, null );
} }

View File

@ -145,7 +145,7 @@ public final class BytecodeEnhancementMetadataPojoImpl implements BytecodeEnhanc
final PersistentAttributeInterceptable entity = (PersistentAttributeInterceptable) entityTuplizer final PersistentAttributeInterceptable entity = (PersistentAttributeInterceptable) entityTuplizer
.instantiate( identifier, session ); .instantiate( identifier, session );
// clear the fields that are marked as dirty in the dirtyness tracker // clear the fields that are marked as dirty in the dirtiness tracker
if ( entity instanceof SelfDirtinessTracker ) { if ( entity instanceof SelfDirtinessTracker ) {
( (SelfDirtinessTracker) entity ).$$_hibernate_clearDirtyAttributes(); ( (SelfDirtinessTracker) entity ).$$_hibernate_clearDirtyAttributes();
} }

View File

@ -223,7 +223,7 @@ public class PojoEntityTuplizer extends AbstractEntityTuplizer {
} }
} }
// clear the fields that are marked as dirty in the dirtyness tracker // clear the fields that are marked as dirty in the dirtiness tracker
if ( entity instanceof SelfDirtinessTracker ) { if ( entity instanceof SelfDirtinessTracker ) {
( (SelfDirtinessTracker) entity ).$$_hibernate_clearDirtyAttributes(); ( (SelfDirtinessTracker) entity ).$$_hibernate_clearDirtyAttributes();
} }

View File

@ -351,7 +351,7 @@ public abstract class CollectionType extends AbstractType implements Association
public boolean isDirty(Object old, Object current, SharedSessionContractImplementor session) public boolean isDirty(Object old, Object current, SharedSessionContractImplementor session)
throws HibernateException { throws HibernateException {
// collections don't dirty an unversioned parent entity // collections don't dirty an un-versioned parent entity
// TODO: I don't really like this implementation; it would be better if // TODO: I don't really like this implementation; it would be better if
// this was handled by searchForDirtyCollections() // this was handled by searchForDirtyCollections()

View File

@ -112,7 +112,7 @@ public class ComponentType extends AbstractType implements CompositeType, Proced
@Override @Override
public int[] sqlTypes(Mapping mapping) throws MappingException { public int[] sqlTypes(Mapping mapping) throws MappingException {
//Not called at runtime so doesn't matter if its slow :) //Not called at runtime so doesn't matter if it's slow :)
int[] sqlTypes = new int[getColumnSpan( mapping )]; int[] sqlTypes = new int[getColumnSpan( mapping )];
int n = 0; int n = 0;
for ( int i = 0; i < propertySpan; i++ ) { for ( int i = 0; i < propertySpan; i++ ) {
@ -126,7 +126,7 @@ public class ComponentType extends AbstractType implements CompositeType, Proced
@Override @Override
public Size[] dictatedSizes(Mapping mapping) throws MappingException { public Size[] dictatedSizes(Mapping mapping) throws MappingException {
//Not called at runtime so doesn't matter if its slow :) //Not called at runtime so doesn't matter if it's slow :)
final Size[] sizes = new Size[getColumnSpan( mapping )]; final Size[] sizes = new Size[getColumnSpan( mapping )];
int soFar = 0; int soFar = 0;
for ( Type propertyType : propertyTypes ) { for ( Type propertyType : propertyTypes ) {
@ -139,7 +139,7 @@ public class ComponentType extends AbstractType implements CompositeType, Proced
@Override @Override
public Size[] defaultSizes(Mapping mapping) throws MappingException { public Size[] defaultSizes(Mapping mapping) throws MappingException {
//Not called at runtime so doesn't matter if its slow :) //Not called at runtime so doesn't matter if it's slow :)
final Size[] sizes = new Size[getColumnSpan( mapping )]; final Size[] sizes = new Size[getColumnSpan( mapping )];
int soFar = 0; int soFar = 0;
for ( Type propertyType : propertyTypes ) { for ( Type propertyType : propertyTypes ) {
@ -687,7 +687,7 @@ public class ComponentType extends AbstractType implements CompositeType, Proced
if ( value != null ) { if ( value != null ) {
Object result = instantiate( owner, session ); Object result = instantiate( owner, session );
Object[] values = (Object[]) value; Object[] values = (Object[]) value;
Object[] resolvedValues = new Object[values.length]; //only really need new array during semiresolve! Object[] resolvedValues = new Object[values.length]; //only really need new array during semi-resolve!
for ( int i = 0; i < values.length; i++ ) { for ( int i = 0; i < values.length; i++ ) {
resolvedValues[i] = propertyTypes[i].resolve( values[i], session, owner ); resolvedValues[i] = propertyTypes[i].resolve( values[i], session, owner );
} }

View File

@ -484,7 +484,7 @@ public abstract class EntityType extends AbstractType implements AssociationType
Object propertyValue = entityPersister.getPropertyValue( value, uniqueKeyPropertyName ); Object propertyValue = entityPersister.getPropertyValue( value, uniqueKeyPropertyName );
// We now have the value of the property-ref we reference. However, // We now have the value of the property-ref we reference. However,
// we need to dig a little deeper, as that property might also be // we need to dig a little deeper, as that property might also be
// an entity type, in which case we need to resolve its identitifier // an entity type, in which case we need to resolve its identifier
Type type = entityPersister.getPropertyType( uniqueKeyPropertyName ); Type type = entityPersister.getPropertyType( uniqueKeyPropertyName );
if ( type.isEntityType() ) { if ( type.isEntityType() ) {
propertyValue = ( (EntityType) type ).getIdentifier( propertyValue, session ); propertyValue = ( (EntityType) type ).getIdentifier( propertyValue, session );

View File

@ -85,7 +85,7 @@ public class ManyToOneType extends EntityType {
@Override @Override
public boolean isAlwaysDirtyChecked() { public boolean isAlwaysDirtyChecked() {
// always need to dirty-check, even when non-updateable; // always need to dirty-check, even when non-updatable;
// this ensures that when the association is updated, // this ensures that when the association is updated,
// the entity containing this association will be updated // the entity containing this association will be updated
// in the cache // in the cache

View File

@ -19,7 +19,7 @@ public class ArrayMutabilityPlan<T> extends MutableMutabilityPlan<T> {
@SuppressWarnings({ "unchecked", "SuspiciousSystemArraycopy" }) @SuppressWarnings({ "unchecked", "SuspiciousSystemArraycopy" })
public T deepCopyNotNull(T value) { public T deepCopyNotNull(T value) {
if ( ! value.getClass().isArray() ) { if ( ! value.getClass().isArray() ) {
// ugh! cannot find a way to properly define the type signature here to // ugh! cannot find a way to properly define the type signature here
throw new IllegalArgumentException( "Value was not an array [" + value.getClass().getName() + "]" ); throw new IllegalArgumentException( "Value was not an array [" + value.getClass().getName() + "]" );
} }
final int length = Array.getLength( value ); final int length = Array.getLength( value );

View File

@ -0,0 +1,86 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.id.enhanced;
import static org.junit.Assert.assertEquals;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.MappedSuperclass;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.boot.Metadata;
import org.hibernate.boot.MetadataSources;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.dialect.MySQLDialect;
import org.hibernate.query.Query;
import org.hibernate.testing.RequiresDialect;
import org.hibernate.testing.TestForIssue;
import org.hibernate.testing.junit4.CustomRunner;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
/**
* {@inheritDoc}
*
* @author Yanming Zhou
*/
@TestForIssue(jiraKey = "HHH-14219")
@RunWith(CustomRunner.class)
@RequiresDialect(MySQLDialect.class)
public class HHH14219 {
private SessionFactory sf;
@Before
public void setup() {
StandardServiceRegistryBuilder srb = new StandardServiceRegistryBuilder()
.applySetting("hibernate.show_sql", "true").applySetting("hibernate.format_sql", "true")
.applySetting("hibernate.hbm2ddl.auto", "create-drop");
Metadata metadata = new MetadataSources(srb.build()).addAnnotatedClass(BaseEntity.class)
.addAnnotatedClass(Foo.class).addAnnotatedClass(Bar.class).buildMetadata();
sf = metadata.buildSessionFactory();
}
@Test
public void testSequenceTableContainsOnlyOneRow() throws Exception {
try (Session session = sf.openSession()) {
@SuppressWarnings("unchecked")
Query<Number> q = session.createNativeQuery("select count(*) from " + BaseEntity.SHARED_SEQ_NAME);
assertEquals(1, q.uniqueResult().intValue());
}
}
@MappedSuperclass
public static class BaseEntity {
public static final String SHARED_SEQ_NAME = "shared_seq";
@Id
@GeneratedValue(strategy = GenerationType.AUTO, generator = SHARED_SEQ_NAME)
protected Long id;
}
@Entity
public static class Foo extends BaseEntity {
}
@Entity
public static class Bar extends BaseEntity {
}
}

View File

@ -49,7 +49,7 @@ public class WidgetId implements Serializable {
@Override @Override
public String toString( ) { public String toString( ) {
StringBuffer buf = new StringBuffer( "[id:" ); StringBuilder buf = new StringBuilder( "[id:" );
buf.append( ( this.getCode( ) == null ) ? "null" : this.getCode( ).toString( ) ); buf.append( ( this.getCode( ) == null ) ? "null" : this.getCode( ).toString( ) );
buf.append( ";code:" ); buf.append( ";code:" );
buf.append( ( this.getDivision( ) == null ) ? "null" : this.getDivision( ) ); buf.append( ( this.getDivision( ) == null ) ? "null" : this.getDivision( ) );

View File

@ -0,0 +1,176 @@
package org.hibernate.jpa.test.graphs;
import java.util.Arrays;
import java.util.List;
import javax.persistence.CascadeType;
import javax.persistence.Entity;
import javax.persistence.EntityGraph;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.ManyToOne;
import javax.persistence.OneToMany;
import javax.persistence.Table;
import org.hibernate.Hibernate;
import org.hibernate.annotations.Fetch;
import org.hibernate.annotations.FetchMode;
import org.hibernate.graph.GraphParser;
import org.hibernate.graph.GraphSemantic;
import org.hibernate.jpa.test.BaseEntityManagerFunctionalTestCase;
import org.hibernate.testing.TestForIssue;
import org.junit.Before;
import org.junit.Test;
import static org.hamcrest.CoreMatchers.is;
import static org.hibernate.testing.transaction.TransactionUtil.doInJPA;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
/**
* @author Yaroslav Prokipchyn
* @author Nathan Xu
*/
@TestForIssue( jiraKey = "HHH-14212" )
public class FetchGraphTest extends BaseEntityManagerFunctionalTestCase {
@Override
protected Class<?>[] getAnnotatedClasses() {
return new Class<?>[] {
LedgerRecord.class,
LedgerRecordItem.class,
BudgetRecord.class,
Trigger.class,
FinanceEntity.class
};
}
@Before
public void setUp() {
doInJPA( this::entityManagerFactory, entityManager -> {
Trigger trigger = new Trigger();
entityManager.persist( trigger );
BudgetRecord budgetRecord = new BudgetRecord();
budgetRecord.amount = 100;
budgetRecord.trigger = trigger;
entityManager.persist( budgetRecord );
FinanceEntity client = new FinanceEntity();
client.name = "client";
FinanceEntity vendor = new FinanceEntity();
vendor.name = "vendor";
entityManager.persist( client );
entityManager.persist( vendor );
LedgerRecordItem item1 = new LedgerRecordItem();
item1.financeEntity = client;
LedgerRecordItem item2 = new LedgerRecordItem();
item2.financeEntity = vendor;
entityManager.persist( item1 );
entityManager.persist( item2 );
LedgerRecord ledgerRecord = new LedgerRecord();
ledgerRecord.budgetRecord = budgetRecord;
ledgerRecord.trigger = trigger;
ledgerRecord.ledgerRecordItems= Arrays.asList( item1, item2 );
item1.ledgerRecord = ledgerRecord;
item2.ledgerRecord = ledgerRecord;
entityManager.persist( ledgerRecord );
} );
}
@Test
public void testCollectionEntityGraph() {
doInJPA( this::entityManagerFactory, entityManager -> {
final EntityGraph<LedgerRecord> entityGraph = GraphParser.parse( LedgerRecord.class, "budgetRecord, ledgerRecordItems.value(financeEntity)", entityManager );
final List<LedgerRecord> records = entityManager.createQuery( "from LedgerRecord", LedgerRecord.class )
.setHint( GraphSemantic.FETCH.getJpaHintName(), entityGraph )
.getResultList();
assertThat( records.size(), is( 1 ) );
records.forEach( record -> {
assertFalse( Hibernate.isInitialized( record.trigger ) );
assertTrue( Hibernate.isInitialized( record.budgetRecord ) );
assertFalse( Hibernate.isInitialized( record.budgetRecord.trigger ) );
assertTrue( Hibernate.isInitialized( record.ledgerRecordItems) );
assertThat( record.ledgerRecordItems.size(), is( 2 ) );
record.ledgerRecordItems.forEach( item -> {
assertSame( record, item.ledgerRecord );
assertTrue( Hibernate.isInitialized( item.financeEntity ) );
} );
} );
} );
}
@Entity(name = "LedgerRecord")
@Table(name = "LedgerRecord")
static class LedgerRecord {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
Integer id;
@ManyToOne
BudgetRecord budgetRecord;
@OneToMany(mappedBy = "ledgerRecord", fetch = FetchType.EAGER, cascade = CascadeType.ALL)
@Fetch(FetchMode.SUBSELECT)
List<LedgerRecordItem> ledgerRecordItems;
@ManyToOne
Trigger trigger;
}
@Entity(name = "LedgerRecordItem")
@Table(name = "LedgerRecordItem")
static class LedgerRecordItem {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
Integer id;
@ManyToOne
LedgerRecord ledgerRecord;
@ManyToOne
FinanceEntity financeEntity;
}
@Entity(name = "BudgetRecord")
@Table(name = "BudgetRecord")
static class BudgetRecord {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
Integer id;
int amount;
@ManyToOne
Trigger trigger;
}
@Entity(name = "Trigger")
@Table(name = "Trigger")
static class Trigger {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
Integer id;
String name;
}
@Entity(name = "FinanceEntity")
@Table(name = "FinanceEntity")
static class FinanceEntity {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
Integer id;
String name;
}
}

Some files were not shown because too many files have changed in this diff Show More