HHH-14218 fix some typos in code comments

This commit is contained in:
Nathan Xu 2020-09-16 22:58:50 -04:00 committed by Sanne Grinovero
parent 0db25441b0
commit dd74a613cc
144 changed files with 252 additions and 253 deletions

View File

@ -470,7 +470,7 @@ aggregateExpr
// Establishes the list of aliases being used by this query.
fromClause {
// NOTE: This references the INPUT AST! (see http://www.antlr.org/doc/trees.html#Action%20Translation)
// the ouput AST (#fromClause) has not been built yet.
// the output AST (#fromClause) has not been built yet.
prepareFromClauseInputTree(#fromClause_in);
}
: #(f:FROM { pushFromClause(#fromClause,f); handleClauseStart( FROM ); } fromElementList ) {

View File

@ -162,7 +162,7 @@ tokens
}
/**
* This method is overriden in the sub class in order to provide the
* This method is overridden in the sub class in order to provide the
* 'keyword as identifier' hack.
* @param token The token to retry as an identifier.
* @param ex The exception to throw if it cannot be retried as an identifier.
@ -547,7 +547,7 @@ aliasedExpression
//
// Note that the above precedence levels map to the rules below...
// Once you have a precedence chart, writing the appropriate rules as below
// is usually very straightfoward
// is usually very straightforward
logicalExpression
: expression
@ -1032,7 +1032,7 @@ NUM_INT
( // hex
// the 'e'|'E' and float suffix stuff look
// like hex digits, hence the (...)+ doesn't
// know when to stop: ambig. ANTLR resolves
// know when to stop: ambiguous. ANTLR resolves
// it correctly by matching immediately. It
// is therefore ok to hush warning.
options { warnWhenFollowAmbig=false; }

View File

@ -177,7 +177,7 @@ insertStatement
;
setClause
// Simply re-use comparisionExpr, because it already correctly defines the EQ rule the
// Simply re-use comparisonExpr, because it already correctly defines the EQ rule the
// way it is needed here; not the most aptly named, but ah
: #( SET { out(" set "); } comparisonExpr[false] ( { out(", "); } comparisonExpr[false] )* )
;

View File

@ -20,7 +20,7 @@ public class UnknownProfileException extends HibernateException {
* @param name The profile name that was unknown.
*/
public UnknownProfileException(String name) {
super( "Unknow fetch profile [" + name + "]" );
super( "Unknown fetch profile [" + name + "]" );
this.name = name;
}

View File

@ -228,7 +228,7 @@ public class EntityUpdateAction extends EntityAction {
session
);
if ( persister.hasUpdateGeneratedProperties() ) {
// this entity defines proeprty generation, so process those generated
// this entity defines property generation, so process those generated
// values...
persister.processUpdateGeneratedProperties( id, instance, state, session );
if ( persister.isVersionPropertyGenerated() ) {

View File

@ -121,7 +121,7 @@ public class MetadataBuilderImpl implements MetadataBuilderImplementor, TypeCont
this.sources = sources;
this.options = new MetadataBuildingOptionsImpl( serviceRegistry );
this.bootstrapContext = new BootstrapContextImpl( serviceRegistry, options );
//this is needed only fro implementig deprecated method
//this is needed only for implementing deprecated method
options.setBootstrapContext( bootstrapContext );
for ( MetadataSourcesContributor contributor :

View File

@ -164,7 +164,7 @@ public abstract class ResultSetMappingBinder {
int queryReturnPosition) {
String alias = rtnSource.getAlias();
if ( StringHelper.isEmpty( alias ) ) {
// hack-around as sqlquery impl depend on having a key.
// hack-around as sqlquery impl depends on having a key.
alias = "alias_" + queryReturnPosition;
}
final String entityName = context.determineEntityName(

View File

@ -208,7 +208,7 @@ public class BootstrapServiceRegistryBuilder {
final ClassLoaderService classLoaderService;
if ( providedClassLoaderService == null ) {
// Use a set. As an example, in JPA, OsgiClassLoader may be in both
// the providedClassLoaders and the overridenClassLoader.
// the providedClassLoaders and the overriddenClassLoader.
final Set<ClassLoader> classLoaders = new HashSet<ClassLoader>();
if ( providedClassLoaders != null ) {

View File

@ -32,7 +32,7 @@ public class MappedSuperclassEnhancer extends PersistentAttributesEnhancer {
super.enhance( managedCtClass );
}
// Generate 'template' methods for each attribute. This will be overriden by the actual entities
// Generate 'template' methods for each attribute. This will be overridden by the actual entities
@Override
protected CtMethod generateFieldReader(

View File

@ -400,7 +400,7 @@ public class PersistentAttributesEnhancer extends EnhancerImpl {
String toArrayMethod = isMap ? "values().toArray()" : "toArray()";
// only remove elements not in the new collection or else we would loose those elements
// don't use iterator to avoid ConcurrentModException
// don't use iterator to avoid ConcurrentModificationException
fieldWriter.insertBefore(
String.format(
" if (this.%3$s != null && %1$s) {%n" +
@ -561,7 +561,7 @@ public class PersistentAttributesEnhancer extends EnhancerImpl {
managedCtClass.addInterface( compositeOwnerCtClass );
if ( enhancementContext.isCompositeClass( managedCtClass ) ) {
// if a composite have a embedded field we need to implement the TRACKER_CHANGER_NAME method as well
// if a composite has an embedded field we need to implement the TRACKER_CHANGER_NAME method as well
MethodWriter.write(
managedCtClass,
"public void %1$s(String name) {%n" +

View File

@ -129,7 +129,7 @@ public class EnhancementHelper {
finally {
if ( isTempSession ) {
try {
// Commit the JDBC transaction is we started one.
// Commit the JDBC transaction if we started one.
if ( !isJta ) {
BytecodeLogger.LOGGER.debug( "Enhancement interception Helper#performWork committing transaction on temporary Session" );
session.getTransaction().commit();

View File

@ -117,7 +117,7 @@ public class DomainDataRegionConfigImpl implements DomainDataRegionConfig {
}
// todo (6.0) : `EntityPersister` and `CollectionPersister` references here should be replaces with `EntityHierarchy` and `PersistentCollectionDescriptor`
// todo (6.0) : `EntityPersister` and `CollectionPersister` references here should be replaced with `EntityHierarchy` and `PersistentCollectionDescriptor`
//
// todo : although ^^, couldn't this just be the boot-time model? Is there a specific need for it to be the run-time model?
// that would alleviate the difference between 5.3 and 6.0 from the SPI POV

View File

@ -86,7 +86,7 @@ public class NaturalIdCacheKey implements Serializable {
@Override
public String initialize() {
//Complex toString is needed as naturalIds for entities are not simply based on a single value like primary keys
//the only same way to differentiate the keys is to included the disassembled values in the string.
//the only same way to differentiate the keys is to include the disassembled values in the string.
final StringBuilder toStringBuilder = new StringBuilder().append( entityName ).append(
"##NaturalId[" );
for ( int i = 0; i < naturalIdValues.length; i++ ) {

View File

@ -83,7 +83,7 @@ public class RegionFactoryInitiator implements StandardServiceInitiator<RegionFa
final Collection<Class<? extends RegionFactory>> implementors = selector.getRegisteredStrategyImplementors( RegionFactory.class );
if ( setting == null && implementors.size() != 1 ) {
// if either are explicitly defined as TRUE we need a RegionFactory
// if either is explicitly defined as TRUE we need a RegionFactory
if ( ( useSecondLevelCache != null && useSecondLevelCache == TRUE )
|| ( useQueryCache != null && useQueryCache == TRUE ) ) {
throw new CacheException( "Caching was explicitly requested, but no RegionFactory was defined and there is not a single registered RegionFactory" );

View File

@ -77,7 +77,7 @@ public class StandardCacheEntryImpl implements CacheEntry {
@Override
public Serializable[] getDisassembledState() {
// todo: this was added to support initializing an entity's EntityEntry snapshot during reattach;
// this should be refactored to instead expose a method to assemble a EntityEntry based on this
// this should be refactored to instead expose a method to assemble an EntityEntry based on this
// state for return.
return disassembledState;
}

View File

@ -537,7 +537,7 @@ public final class AnnotationBinder {
LOG.unsupportedMappedSuperclassWithEntityInheritance( clazzToProcess.getName() );
}
//TODO: be more strict with secondarytable allowance (not for ids, not for secondary table join columns etc)
//TODO: be more strict with secondary table allowance (not for ids, not for secondary table join columns etc)
InheritanceState inheritanceState = inheritanceStatePerClass.get( clazzToProcess );
AnnotatedClassType classType = context.getMetadataCollector().getClassType( clazzToProcess );
@ -1032,7 +1032,7 @@ public final class AnnotationBinder {
);
AccessType propertyAccessor = entityBinder.getPropertyAccessor( compositeClass );
//In JPA 2, there is a shortcut if the IdClass is the Pk of the associated class pointed to by the id
//it ought to be treated as an embedded and not a real IdClass (at least in the Hibernate's internal way
//it ought to be treated as an embedded and not a real IdClass (at least in the Hibernate's internal way)
final boolean isFakeIdClass = isIdClassPkOfTheAssociatedEntity(
elementsToProcess,
compositeClass,
@ -1589,7 +1589,7 @@ public final class AnnotationBinder {
}
}
if ( isRequiredAnnotationPresent ) {
//create a PropertyData fpr the specJ property holding the mapping
//create a PropertyData for the specJ property holding the mapping
PropertyData specJPropertyData = new PropertyInferredData(
declaringClass,
//same dec
@ -2032,7 +2032,7 @@ public final class AnnotationBinder {
}
{
Column[] keyColumns = null;
//JPA 2 has priority and has different default column values, differenciate legacy from JPA 2
//JPA 2 has priority and has different default column values, differentiate legacy from JPA 2
Boolean isJPA2 = null;
if ( property.isAnnotationPresent( MapKeyColumn.class ) ) {
isJPA2 = Boolean.TRUE;
@ -2063,7 +2063,7 @@ public final class AnnotationBinder {
}
{
JoinColumn[] joinKeyColumns = null;
//JPA 2 has priority and has different default column values, differenciate legacy from JPA 2
//JPA 2 has priority and has different default column values, differentiate legacy from JPA 2
Boolean isJPA2 = null;
if ( property.isAnnotationPresent( MapKeyJoinColumns.class ) ) {
isJPA2 = Boolean.TRUE;
@ -2565,7 +2565,7 @@ public final class AnnotationBinder {
}
associationTableBinder.setUniqueConstraints( uniqueConstraints );
associationTableBinder.setJpaIndex( jpaIndexes );
//set check constaint in the second pass
//set check constraint in the second pass
annJoins = joins.length == 0 ? null : joins;
annInverseJoins = inverseJoins == null || inverseJoins.length == 0 ? null : inverseJoins;
}
@ -2603,7 +2603,7 @@ public final class AnnotationBinder {
boolean isIdentifierMapper,
MetadataBuildingContext buildingContext,
boolean isComponentEmbedded,
boolean isId, //is a identifier
boolean isId, //is an identifier
Map<XClass, InheritanceState> inheritanceStatePerClass,
String referencedEntityName, //is a component who is overridden by a @MapsId
Ejb3JoinColumn[] columns) {
@ -2750,7 +2750,7 @@ public final class AnnotationBinder {
//add elements of the embeddable superclass
XClass superClass = xClassProcessed.getSuperclass();
while ( superClass != null && superClass.isAnnotationPresent( MappedSuperclass.class ) ) {
//FIXME: proper support of typevariables incl var resolved at upper levels
//FIXME: proper support of type variables incl var resolved at upper levels
propContainer = new PropertyContainer( superClass, xClassProcessed, propertyAccessor );
addElementsOfClass( classElements, propContainer, buildingContext );
superClass = superClass.getSuperclass();
@ -3061,7 +3061,7 @@ public final class AnnotationBinder {
final JoinColumn joinColumn = property.getAnnotation( JoinColumn.class );
final JoinColumns joinColumns = property.getAnnotation( JoinColumns.class );
//Make sure that JPA1 key-many-to-one columns are read only tooj
//Make sure that JPA1 key-many-to-one columns are read only too
boolean hasSpecjManyToOne=false;
if ( context.getBuildingOptions().isSpecjProprietarySyntaxEnabled() ) {
String columnName = "";
@ -3213,7 +3213,7 @@ public final class AnnotationBinder {
KeyValue identifier = propertyHolder.getIdentifier();
if ( identifier == null ) {
//this is a @OneToOne in an @EmbeddedId (the persistentClass.identifier is not set yet, it's being built)
//by definition the PK cannot refers to itself so it cannot map to itself
//by definition the PK cannot refer to itself so it cannot map to itself
mapToPK = false;
}
else {
@ -3550,7 +3550,7 @@ public final class AnnotationBinder {
InheritanceState state = new InheritanceState( clazz, inheritanceStatePerClass, buildingContext );
if ( superclassState != null ) {
//the classes are ordered thus preventing an NPE
//FIXME if an entity has subclasses annotated @MappedSperclass wo sub @Entity this is wrong
//FIXME if an entity has subclasses annotated @MappedSuperclass wo sub @Entity this is wrong
superclassState.setHasSiblings( true );
InheritanceState superEntityState = InheritanceState.getInheritanceStateOfSuperEntity(
clazz, inheritanceStatePerClass

View File

@ -311,7 +311,7 @@ public class BinderHelper {
columnsList.append( ") " );
if ( associatedEntity != null ) {
//overidden destination
//overridden destination
columnsList.append( "of " )
.append( associatedEntity.getEntityName() )
.append( "." )
@ -438,7 +438,7 @@ public class BinderHelper {
|| "embedded".equals( property.getPropertyAccessorName() ) ) {
return;
}
// FIXME cannot use subproperties becasue the caller needs top level properties
// FIXME cannot use subproperties because the caller needs top level properties
// if ( property.isComposite() ) {
// Iterator subProperties = ( (Component) property.getValue() ).getPropertyIterator();
// while ( subProperties.hasNext() ) {
@ -459,7 +459,7 @@ public class BinderHelper {
}
/**
* Retrieve the property by path in a recursive way, including IndetifierProperty in the loop
* Retrieve the property by path in a recursive way, including IdentifierProperty in the loop
* If propertyName is null or empty, the IdentifierProperty is returned
*/
public static Property findPropertyByName(PersistentClass associatedClass, String propertyName) {
@ -685,7 +685,7 @@ public class BinderHelper {
if ( gen == null ) {
throw new AnnotationException( "Unknown named generator (@GeneratedValue#generatorName): " + generatorName );
}
//This is quite vague in the spec but a generator could override the generate choice
//This is quite vague in the spec but a generator could override the generator choice
String identifierGeneratorStrategy = gen.getStrategy();
//yuk! this is a hack not to override 'AUTO' even if generator is set
final boolean avoidOverriding =

View File

@ -58,7 +58,7 @@ import org.hibernate.mapping.Table;
* @author Emmanuel Bernard
*/
public class ComponentPropertyHolder extends AbstractPropertyHolder {
//TODO introduce a overrideTable() method for columns held by sec table rather than the hack
//TODO introduce an overrideTable() method for columns held by sec table rather than the hack
// joinsPerRealTableName in ClassPropertyHolder
private Component component;
private boolean isOrWithinEmbeddedId;

View File

@ -632,7 +632,7 @@ public class Ejb3Column {
}
}
//must only be called after all setters are defined and before bind
//must only be called after all setters are defined and before binding
private void extractDataFromPropertyData(PropertyData inferredData) {
if ( inferredData != null ) {
XProperty property = inferredData.getProperty();

View File

@ -419,7 +419,7 @@ public class Ejb3JoinColumn extends Ejb3Column {
PersistentClass persistentClass,
Map<String, Join> joins,
Map<XClass, InheritanceState> inheritanceStatePerClass) {
// TODO shouldn't we deduce the classname from the persistentclasS?
// TODO shouldn't we deduce the classname from the persistentClass?
this.propertyHolder = PropertyHolderBuilder.buildPropertyHolder(
persistentClass,
joins,

View File

@ -302,7 +302,7 @@ public class InheritanceState {
org.hibernate.mapping.MappedSuperclass parentSuperclass = mappedSuperclass;
final Class<?> type = buildingContext.getBootstrapContext().getReflectionManager()
.toClass( classesToProcessForMappedSuperclass.get( index ) );
//add MAppedSuperclass if not already there
//add MappedSuperclass if not already there
mappedSuperclass = buildingContext.getMetadataCollector().getMappedSuperclass( type );
if ( mappedSuperclass == null ) {
mappedSuperclass = new org.hibernate.mapping.MappedSuperclass( parentSuperclass, superEntity );

View File

@ -48,7 +48,7 @@ public class OneToOneSecondPass implements SecondPass {
private String cascadeStrategy;
private Ejb3JoinColumn[] joinColumns;
//that suck, we should read that from the property mainly
//that sucks, we should read that from the property mainly
public OneToOneSecondPass(
String mappedBy,
String ownerEntity,

View File

@ -51,7 +51,7 @@ public class PropertyPreloadedData implements PropertyData {
}
public XClass getDeclaringClass() {
//Preloaded properties are artificial wrapper for colleciton element accesses
//Preloaded properties are artificial wrapper for collection element accesses
//and idClass creation, ignore.
return null;
}

View File

@ -554,11 +554,11 @@ public abstract class CollectionBinder {
collection.setInverse( isMappedBy );
//many to many may need some second pass informations
//many to many may need some second pass information
if ( !oneToMany && isMappedBy ) {
metadataCollector.addMappedBy( getCollectionType().getName(), mappedBy, propertyName );
}
//TODO reducce tableBinder != null and oneToMany
//TODO reduce tableBinder != null and oneToMany
XClass collectionType = getCollectionType();
if ( inheritanceStatePerClass == null) throw new AssertionFailure( "inheritanceStatePerClass not set" );
SecondPass sp = getSecondPass(

View File

@ -185,7 +185,7 @@ public class MapBinder extends CollectionBinder {
}
else {
//this is a true Map mapping
//TODO ugly copy/pastle from CollectionBinder.bindManyToManySecondPass
//TODO ugly copy/paste from CollectionBinder.bindManyToManySecondPass
String mapKeyType;
Class target = void.class;
/*
@ -322,7 +322,7 @@ public class MapBinder extends CollectionBinder {
column.setTable( mapValue.getCollectionTable() );
}
elementBinder.setColumns( elementColumns );
//do not call setType as it extract the type from @Type
//do not call setType as it extracts the type from @Type
//the algorithm generally does not apply for map key anyway
elementBinder.setKey(true);
elementBinder.setType(
@ -338,7 +338,7 @@ public class MapBinder extends CollectionBinder {
}
//FIXME pass the Index Entity JoinColumns
if ( !collection.isOneToMany() ) {
//index column shoud not be null
//index column should not be null
for (Ejb3JoinColumn col : mapKeyManyToManyColumns) {
col.forceNotNull();
}

View File

@ -212,8 +212,8 @@ public class PropertyBinder {
private Property bind(Property prop) {
if (isId) {
final RootClass rootClass = ( RootClass ) holder.getPersistentClass();
//if an xToMany, it as to be wrapped today.
//FIXME this pose a problem as the PK is the class instead of the associated class which is not really compliant with the spec
//if an xToMany, it has to be wrapped today.
//FIXME this poses a problem as the PK is the class instead of the associated class which is not really compliant with the spec
if ( isXToMany || entityBinder.wrapIdsInEmbeddedComponents() ) {
Component identifier = (Component) rootClass.getIdentifier();
if (identifier == null) {

View File

@ -384,7 +384,7 @@ public class SimpleValueBinder {
this.explicitType = explicitType;
}
//FIXME raise an assertion failure if setResolvedTypeMapping(String) and setResolvedTypeMapping(Type) are use at the same time
//FIXME raise an assertion failure if setResolvedTypeMapping(String) and setResolvedTypeMapping(Type) are used at the same time
public void setExplicitType(Type typeAnn) {
if ( typeAnn != null ) {
@ -500,7 +500,7 @@ public class SimpleValueBinder {
simpleValue.setTypeParameters( typeDef.getParametersAsProperties() );
}
if ( typeParameters != null && typeParameters.size() != 0 ) {
//explicit type params takes precedence over type def params
//explicit type params take precedence over type def params
simpleValue.setTypeParameters( typeParameters );
}
simpleValue.setTypeName( type );

View File

@ -1377,7 +1377,7 @@ public class JPAOverriddenAnnotationReader implements AnnotationReader {
}
}
if ( elementsForProperty.size() == 0 && defaults.canUseJavaAnnotations() ) {
//we have nothing, so Java annotations might occurs
//we have nothing, so Java annotations might occur
Annotation annotation = getPhysicalAnnotation( Version.class );
if ( annotation != null ) {
annotationList.add( annotation );
@ -2614,7 +2614,7 @@ public class JPAOverriddenAnnotationReader implements AnnotationReader {
}
else {
throw new AnnotationException(
"Unknown DiscrimiatorType in XML: " + value + " (" + SCHEMA_VALIDATION + ")"
"Unknown DiscriminatorType in XML: " + value + " (" + SCHEMA_VALIDATION + ")"
);
}
}
@ -2869,7 +2869,7 @@ public class JPAOverriddenAnnotationReader implements AnnotationReader {
secondaryTables.add( AnnotationFactory.create( annotation ) );
}
/*
* You can't have both secondary table in XML and Java,
* You can't have both secondary tables in XML and Java,
* since there would be no way to "remove" a secondary table
*/
if ( secondaryTables.size() == 0 && defaults.canUseJavaAnnotations() ) {

View File

@ -146,7 +146,7 @@ public class BeanValidationIntegrator implements Integrator {
}
else {
// otherwise check the validation modes
// todo : in many ways this duplicates thew checks done on the TypeSafeActivator when a ValidatorFactory could not be obtained
// todo : in many ways this duplicates the checks done on the TypeSafeActivator when a ValidatorFactory could not be obtained
validateMissingBeanValidationApi( modes );
}
}

View File

@ -752,7 +752,7 @@ public abstract class AbstractPersistentCollection implements Serializable, Pers
// AST in ORM 5+, handling this type of condition is either extremely difficult or impossible. Forcing
// recreation isn't ideal, but not really any other option in ORM 4.
// Selecting a type used in where part of update statement
// (must match condidion in org.hibernate.persister.collection.BasicCollectionPersister.doUpdateRows).
// (must match condition in org.hibernate.persister.collection.BasicCollectionPersister#doUpdateRows).
// See HHH-9474
Type whereType;
if ( persister.hasIndex() ) {

View File

@ -108,7 +108,7 @@ public class ThreadLocalSessionContext extends AbstractCurrentSessionContext {
}
private boolean needsWrapping(Session session) {
// try to make sure we don't wrap and already wrapped session
// try to make sure we don't wrap an already wrapped session
if ( Proxy.isProxyClass( session.getClass() ) ) {
final InvocationHandler invocationHandler = Proxy.getInvocationHandler( session );
if ( invocationHandler != null && TransactionProtectionWrapper.class.isInstance( invocationHandler ) ) {
@ -182,7 +182,7 @@ public class ThreadLocalSessionContext extends AbstractCurrentSessionContext {
SESSION_PROXY_INTERFACES,
wrapper
);
// yick! need this for proper serialization/deserialization handling...
// yuck! need this for proper serialization/deserialization handling...
wrapper.setWrapped( wrapped );
return wrapped;
}
@ -315,7 +315,7 @@ public class ThreadLocalSessionContext extends AbstractCurrentSessionContext {
else if ( "getStatistics".equals( methodName )
|| "isOpen".equals( methodName )
|| "getListeners".equals( methodName ) ) {
// allow these to go through the the real session no matter what
// allow these to go through the real session no matter what
LOG.tracef( "Allowing invocation [%s] to proceed to real session", methodName );
}
else if ( !realSession.isOpen() ) {

View File

@ -428,7 +428,7 @@ public abstract class AbstractHANADialect extends Dialect {
// stream passed in via
// PreparedStatement.setCharacterStream(int,Reader,long)
// after the stream has been processed. this causes problems later if we are
// using non-contexual lob creation and HANA then closes our StringReader.
// using non-contextual lob creation and HANA then closes our StringReader.
// see test case LobLocatorTest
private static class HANAClobTypeDescriptor extends ClobTypeDescriptor {
@ -932,8 +932,7 @@ public abstract class AbstractHANADialect extends Dialect {
registerHanaKeywords();
// createBlob() and createClob() are not supported by the HANA JDBC
// driver
// createBlob() and createClob() are not supported by the HANA JDBC driver
getDefaultProperties().setProperty( AvailableSettings.NON_CONTEXTUAL_LOB_CREATION, "true" );
// getGeneratedKeys() is not supported by the HANA JDBC driver

View File

@ -143,7 +143,7 @@ abstract class AbstractTransactSQLDialect extends Dialect {
@Override
public String applyLocksToSql(String sql, LockOptions aliasedLockOptions, Map<String, String[]> keyColumnNames) {
// TODO: merge additional lockoptions support in Dialect.applyLocksToSql
// TODO: merge additional lock options support in Dialect.applyLocksToSql
final Iterator itr = aliasedLockOptions.getAliasLockIterator();
final StringBuilder buffer = new StringBuilder( sql );

View File

@ -438,7 +438,7 @@ public class DB2Dialect extends Dialect {
/**
* {@inheritDoc}
* <p/>
* NOTE : DB2 is know to support parameters in the <tt>SELECT</tt> clause, but only in casted form
* NOTE : DB2 is known to support parameters in the <tt>SELECT</tt> clause, but only in casted form
* (see {@link #requiresCastingOfParametersInSelectClause()}).
*/
@Override
@ -566,7 +566,7 @@ public class DB2Dialect extends Dialect {
// we have one of:
// * ASC + NULLS LAST
// * DESC + NULLS FIRST
// so just drop the null precedence. *NOTE: we could pass along the null precedence here,
// so just drop the null precedence. *NOTE*: we could pass along the null precedence here,
// but only DB2 9.7 or greater understand it; dropping it is more portable across DB2 versions
return super.renderOrderByElement( expression, collation, order, NullPrecedence.NONE );
}

View File

@ -163,7 +163,7 @@ public class IngresDialect extends Dialect {
getDefaultProperties().setProperty( Environment.USE_GET_GENERATED_KEYS, "false" );
// There is no support for a native boolean type that accepts values
// of true, false or unknown. Using the tinyint type requires
// substitions of true and false.
// substitutions of true and false.
getDefaultProperties().setProperty( Environment.QUERY_SUBSTITUTIONS, "true=1,false=0" );
}

View File

@ -42,7 +42,7 @@ public class InterbaseDialect extends Dialect {
};
/**
* Constructs a InterbaseDialect
* Constructs an InterbaseDialect
*/
public InterbaseDialect() {
super();

View File

@ -60,7 +60,7 @@ public class SybaseASE15Dialect extends SybaseDialect {
registerFunction( "coalesce", new VarArgsSQLFunction( "coalesce(", ",", ")" ) );
registerFunction( "col_length", new SQLFunctionTemplate( StandardBasicTypes.INTEGER, "col_length(?1, ?2)" ) );
registerFunction( "col_name", new SQLFunctionTemplate( StandardBasicTypes.STRING, "col_name(?1, ?2)" ) );
// Sybase has created current_date and current_time inplace of getdate()
// Sybase has created current_date and current_time in place of getdate()
registerFunction( "current_time", new NoArgSQLFunction( "current_time", StandardBasicTypes.TIME ) );
registerFunction( "current_date", new NoArgSQLFunction( "current_date", StandardBasicTypes.DATE ) );

View File

@ -44,7 +44,7 @@ public abstract class AbstractAnsiTrimEmulationFunction implements SQLFunction {
@Override
public final String render(Type argumentType, List args, SessionFactoryImplementor factory) throws QueryException {
// According to both the ANSI-SQL and JPA specs, trim takes a variable number of parameters between 1 and 4.
// at least one paramer (trimSource) is required. From the SQL spec:
// at least one parameter (trimSource) is required. From the SQL spec:
//
// <trim function> ::=
// TRIM <left paren> <trim operands> <right paren>

View File

@ -157,7 +157,7 @@ public class StandardAnsiSqlAggregationFunctions {
public Type getReturnType(Type firstArgumentType, Mapping mapping) {
final int jdbcType = determineJdbcTypeCode( firstArgumentType, mapping );
// First allow the actual type to control the return value; the underlying sqltype could
// First allow the actual type to control the return value; the underlying sql type could
// actually be different
if ( firstArgumentType == StandardBasicTypes.BIG_INTEGER ) {
return StandardBasicTypes.BIG_INTEGER;

View File

@ -19,7 +19,7 @@ public class Chache71IdentityColumnSupport extends IdentityColumnSupportImpl {
@Override
public boolean hasDataTypeInIdentityColumn() {
// Whether this dialect has an Identity clause added to the data type or a completely seperate identity
// Whether this dialect has an Identity clause added to the data type or a completely separate identity
// data type
return true;
}

View File

@ -18,7 +18,7 @@ public class CUBRIDLimitHandler extends AbstractLimitHandler {
public static final CUBRIDLimitHandler INSTANCE = new CUBRIDLimitHandler();
private CUBRIDLimitHandler() {
// NOP
// NOOP
}
@Override

View File

@ -17,7 +17,7 @@ public class FirstLimitHandler extends LegacyFirstLimitHandler {
public static final FirstLimitHandler INSTANCE = new FirstLimitHandler();
private FirstLimitHandler() {
// NOP
// NOOP
}
@Override

View File

@ -18,7 +18,7 @@ public class LegacyFirstLimitHandler extends AbstractLimitHandler {
public static final LegacyFirstLimitHandler INSTANCE = new LegacyFirstLimitHandler();
LegacyFirstLimitHandler() {
// NOP
// NOOP
}
@Override

View File

@ -21,7 +21,7 @@ public class NoopLimitHandler extends AbstractLimitHandler {
public static final NoopLimitHandler INSTANCE = new NoopLimitHandler();
private NoopLimitHandler() {
// NOP
// NOOP
}
@Override

View File

@ -21,7 +21,7 @@ public class SQL2008StandardLimitHandler extends AbstractLimitHandler {
* Constructs a SQL2008StandardLimitHandler
*/
private SQL2008StandardLimitHandler() {
// NOP
// NOOP
}
@Override

View File

@ -60,7 +60,7 @@ public class SQLServer2005LimitHandler extends AbstractLimitHandler {
* Constructs a SQLServer2005LimitHandler
*/
public SQLServer2005LimitHandler() {
// NOP
// NOOP
}
@Override
@ -363,7 +363,7 @@ public class SQLServer2005LimitHandler extends AbstractLimitHandler {
}
else {
// rather than taking the first match, we now iterate all matches
// until we determine a match that isn't considered "ignorable'.
// until we determine a match that isn't considered "ignorable".
while ( matcher.find() && matcher.groupCount() > 0 ) {
final int position = matcher.start();
if ( !isPositionIgnorable( ignoreRangeList, position ) ) {

View File

@ -279,7 +279,7 @@ public final class Collections {
}
if ( loadedPersister != null ) {
// we will need to remove ye olde entries
// we will need to remove the old entries
entry.setDoremove( true );
if ( entry.isDorecreate() ) {
LOG.trace( "Forcing collection initialization" );

View File

@ -258,7 +258,7 @@ public final class ForeignKeys {
return true;
}
// todo : shouldnt assumed be revered here?
// todo : shouldn't assumed be reversed here?
return !isTransient( entityName, entity, assumed, session );
}

View File

@ -81,9 +81,9 @@ public final class Nullability {
*
*
* In the previous implementation, not-null stuffs where checked
* filtering by level one only updateable
* filtering by level one only updatable
* or insertable columns. So setting a sub component as update="false"
* has no effect on not-null check if the main component had good checkeability
* has no effect on not-null check if the main component had good checkability
* In this implementation, we keep this feature.
* However, I never see any documentation mentioning that, but it's for
* sure a limitation.
@ -104,7 +104,7 @@ public final class Nullability {
GenerationTiming.NEVER == inMemoryValueGenerationStrategies[i].getGenerationTiming() ) {
final Object value = values[i];
if ( !nullability[i] && value == null ) {
//check basic level one nullablilty
//check basic level one nullability
throw new PropertyValueException(
"not-null property references a null or transient value",
persister.getEntityName(),

View File

@ -352,7 +352,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
return dbValue;
}
else {
// for a mutable natural there is a likelihood that the the information will already be
// for a mutable natural id there is a likelihood that the information will already be
// snapshot-cached.
final int[] props = persister.getNaturalIdentifierProperties();
final Object[] entitySnapshot = getDatabaseSnapshot( id, persister );
@ -530,8 +530,8 @@ public class StatefulPersistenceContext implements PersistenceContext {
When a virtual method is called via an interface the JVM needs to resolve which concrete
implementation to call. This takes CPU cycles and is a performance penalty. It also prevents method
in-ling which further degrades performance. Casting to an implementation and making a direct method call
removes the virtual call, and allows the methods to be in-lined. In this critical code path, it has a very
inlining which further degrades performance. Casting to an implementation and making a direct method call
removes the virtual call, and allows the methods to be inlined. In this critical code path, it has a very
large impact on performance to make virtual method calls.
*/
if (persister.getEntityEntryFactory() instanceof MutableEntityEntryFactory) {
@ -1957,7 +1957,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
persister = locateProperPersister( persister );
// 'justAddedLocally' is meant to handle the case where we would get double stats jounaling
// 'justAddedLocally' is meant to handle the case where we would get double stats journaling
// from a single load event. The first put journal would come from the natural id resolution;
// the second comes from the entity loading. In this condition, we want to avoid the multiple
// 'put' stats incrementing.
@ -2164,7 +2164,7 @@ public class StatefulPersistenceContext implements PersistenceContext {
// todo : couple of things wrong here:
// 1) should be using access strategy, not plain evict..
// 2) should prefer session-cached values if any (requires interaction from removeLocalNaturalIdCrossReference
// 2) should prefer session-cached values if any (requires interaction from removeLocalNaturalIdCrossReference)
persister = locateProperPersister( persister );
final NaturalIdDataAccess naturalIdCacheAccessStrategy = persister.getNaturalIdCacheAccessStrategy();

View File

@ -529,7 +529,7 @@ public class DefaultFlushEntityEventListener implements FlushEntityEventListener
int[] dirty = persister.resolveAttributeIndexes( ( (SelfDirtinessTracker) entity ).$$_hibernate_getDirtyAttributes() );
// HHH-12051 - filter non-updatable attributes
// TODO: add Updateability to EnhancementContext and skip dirty tracking of those attributes
// TODO: add Updatability to EnhancementContext and skip dirty tracking of those attributes
int count = 0;
for ( int i : dirty ) {
if ( persister.getPropertyUpdateability()[i] ) {
@ -573,7 +573,7 @@ public class DefaultFlushEntityEventListener implements FlushEntityEventListener
boolean dirtyCheckPossible = true;
if ( dirtyProperties == null ) {
// Interceptor returned null, so do the dirtycheck ourself, if possible
// Interceptor returned null, so do the dirty check ourself, if possible
try {
session.getEventListenerManager().dirtyCalculationStart();

View File

@ -415,7 +415,7 @@ public class DefaultMergeEventListener extends AbstractSaveEventListener impleme
//
// This second condition is a special case which allows
// an entity to be merged during the same transaction
// (though during a seperate operation) in which it was
// (though during a separate operation) in which it was
// originally persisted/saved
boolean changed = !persister.getVersionType().isSame(
persister.getVersion( target ),

View File

@ -106,7 +106,7 @@ public class DefaultReplicateEventListener extends AbstractSaveEventListener imp
);
// if can replicate, will result in a SQL UPDATE
// else do nothing (don't even reassociate object!)
// else do nothing (don't even re-associate object!)
if ( canReplicate ) {
performReplication( entity, id, realOldVersion, persister, replicationMode, source );
}

View File

@ -40,12 +40,12 @@ public class DirtyCollectionSearchVisitor extends AbstractVisitor {
final PersistentCollection persistentCollection;
if ( type.isArrayType() ) {
persistentCollection = session.getPersistenceContextInternal().getCollectionHolder( collection );
// if no array holder we found an unwrappered array (this can't occur,
// if no array holder we found an unwrapped array (this can't occur,
// because we now always call wrap() before getting to here)
// return (ah==null) ? true : searchForDirtyCollections(ah, type);
}
else {
// if not wrappered yet, its dirty (this can't occur, because
// if not wrapped yet, its dirty (this can't occur, because
// we now always call wrap() before getting to here)
// return ( ! (obj instanceof PersistentCollection) ) ?
//true : searchForDirtyCollections( (PersistentCollection) obj, type );

View File

@ -45,26 +45,26 @@ public class OnLockVisitor extends ReattachVisitor {
if ( isOwnerUnchanged( persistentCollection, persister, extractCollectionKeyFromOwner( persister ) ) ) {
// a "detached" collection that originally belonged to the same entity
if ( persistentCollection.isDirty() ) {
throw new HibernateException( "reassociated object has dirty collection" );
throw new HibernateException( "re-associated object has dirty collection" );
}
reattachCollection( persistentCollection, type );
}
else {
// a "detached" collection that belonged to a different entity
throw new HibernateException( "reassociated object has dirty collection reference" );
throw new HibernateException( "re-associated object has dirty collection reference" );
}
}
else {
// a collection loaded in the current session
// can not possibly be the collection belonging
// to the entity passed to update()
throw new HibernateException( "reassociated object has dirty collection reference" );
throw new HibernateException( "re-associated object has dirty collection reference" );
}
}
else {
// brand new collection
//TODO: or an array!! we can't lock objects with arrays now??
throw new HibernateException( "reassociated object has dirty collection reference (or an array)" );
throw new HibernateException( "re-associated object has dirty collection reference (or an array)" );
}
return null;

View File

@ -72,7 +72,7 @@ public abstract class ProxyVisitor extends AbstractVisitor {
}
else {
if ( !isCollectionSnapshotValid( collection ) ) {
throw new HibernateException( "could not reassociate uninitialized transient collection" );
throw new HibernateException( "could not re-associate uninitialized transient collection" );
}
CollectionPersister collectionPersister = session.getFactory()
.getCollectionPersister( collection.getRole() );

View File

@ -44,13 +44,13 @@ class HqlLexer extends HqlBaseLexer {
@Override
public void panic() {
//overriden to avoid System.exit
//overridden to avoid System.exit
panic( "CharScanner: panic" );
}
@Override
public void panic(String s) {
//overriden to avoid System.exit
//overridden to avoid System.exit
throw new QueryException( s );
}
}

View File

@ -795,7 +795,7 @@ public class HqlSqlWalker extends HqlSqlBaseWalker implements ErrorReporter, Par
}
// After that, process the JOINs.
// Invoke a delegate to do the work, as this is farily complex.
// Invoke a delegate to do the work, as this is fairly complex.
JoinProcessor joinProcessor = new JoinProcessor( this );
joinProcessor.processJoins( qn );
@ -952,7 +952,7 @@ public class HqlSqlWalker extends HqlSqlBaseWalker implements ErrorReporter, Par
parameterSpecs.add( 0, paramSpec );
if ( sessionFactoryHelper.getFactory().getDialect().requiresCastingOfParametersInSelectClause() ) {
// we need to wrtap the param in a cast()
// we need to wrap the param in a cast()
MethodNode versionMethodNode = (MethodNode) getASTFactory().create(
HqlSqlTokenTypes.METHOD_CALL,
"("
@ -1348,7 +1348,7 @@ public class HqlSqlWalker extends HqlSqlBaseWalker implements ErrorReporter, Par
}
public boolean isShallowQuery() {
// select clauses for insert statements should alwasy be treated as shallow
// select clauses for insert statements should always be treated as shallow
return getStatementType() == INSERT || queryTranslatorImpl.isShallowQuery();
}

View File

@ -198,7 +198,7 @@ public class QueryTranslatorImpl implements FilterTranslator {
// command executions.
//
// Possible to just move the sql generation for dml stuff, but for
// consistency-sake probably best to just move responsiblity for
// consistency-sake probably best to just move responsibility for
// the generation phase completely into the delegates
// (QueryLoader/StatementExecutor) themselves. Also, not sure why
// QueryLoader currently even has a dependency on this at all; does
@ -538,7 +538,7 @@ public class QueryTranslatorImpl implements FilterTranslator {
@Override
public void validateScrollability() throws HibernateException {
// Impl Note: allows multiple collection fetches as long as the
// entire fecthed graph still "points back" to a single
// entire fetched graph still "points back" to a single
// root entity for return
errorIfDML();

View File

@ -337,7 +337,7 @@ public class SqlGenerator extends SqlGeneratorBase implements ErrorReporter {
@Override
protected void fromFragmentSeparator(AST a) {
// check two "adjecent" nodes at the top of the from-clause tree
// check two "adjacent" nodes at the top of the from-clause tree
AST next = a.getNextSibling();
if ( next == null || !hasText( a ) ) {
return;
@ -373,7 +373,7 @@ public class SqlGenerator extends SqlGeneratorBase implements ErrorReporter {
else if ( right.getRealOrigin() == left ||
( right.getRealOrigin() != null && right.getRealOrigin() == left.getRealOrigin() ) ) {
// right represents a joins originating from left; or
// both right and left reprersent joins originating from the same FromElement
// both right and left represent joins originating from the same FromElement
if ( right.getJoinSequence() != null && right.getJoinSequence().isThetaStyle() ) {
writeCrossJoinSeparator();
}

View File

@ -25,7 +25,7 @@ public abstract class AbstractNullnessCheckNode extends UnaryLogicOperatorNode {
@Override
public void initialize() {
// TODO : this really needs to be delayed until after we definitively know the operand node type;
// where this is currently a problem is parameters for which where we cannot unequivocally
// where this is currently a problem is parameters for which we cannot unequivocally
// resolve an expected type
Type operandType = extractDataType( getOperand() );
if ( operandType == null ) {
@ -135,7 +135,7 @@ public abstract class AbstractNullnessCheckNode extends UnaryLogicOperatorNode {
return splits;
}
else {
throw new HibernateException( "dont know how to extract row value elements from node : " + operand );
throw new HibernateException( "don't know how to extract row value elements from node : " + operand );
}
}
}

View File

@ -167,7 +167,7 @@ public class BinaryArithmeticOperatorNode extends AbstractSelectExpression
boolean rhsIsDateTime = isDateTimeType( rhType );
// handle the (assumed) valid cases:
// #1 - the only valid datetime addition synatx is one or the other is a datetime (but not both)
// #1 - the only valid datetime addition syntax is one or the other is a datetime (but not both)
if ( getType() == HqlSqlTokenTypes.PLUS ) {
// one or the other needs to be a datetime for us to get into this method in the first place...
return lhsIsDateTime ? lhType : rhType;

View File

@ -67,7 +67,7 @@ public class BinaryLogicOperatorNode extends AbstractSelectExpression implements
protected final void mutateRowValueConstructorSyntaxesIfNecessary(Type lhsType, Type rhsType) {
// TODO : this really needs to be delayed until after we definitively know all node types
// where this is currently a problem is parameters for which where we cannot unequivocally
// where this is currently a problem is parameters for which we cannot unequivocally
// resolve an expected type
SessionFactoryImplementor sessionFactory = getSessionFactoryHelper().getFactory();
if ( lhsType != null && rhsType != null ) {

View File

@ -53,7 +53,7 @@ public class CollectionSizeNode extends SqlNode implements SelectExpression {
// where <owner-key-column> = alias_.<collection-key-column>
// Note that `collectionPropertyMapping.toColumns(.., COLLECTION_SIZE)` returns the complete `count(...)` SQL
// expression, hence he expectation for a single expression regardless of the number of columns in the key.
// expression, hence the expectation for a single expression regardless of the number of columns in the key.
final String collectionTableAlias = collectionOwnerFromElement.getFromClause()
.getAliasGenerator()

View File

@ -121,7 +121,7 @@ public class ComponentJoin extends FromElement {
public String renderScalarIdentifierSelect(int i) {
String[] cols = getBasePropertyMapping().toColumns( getTableAlias(), getComponentProperty() );
StringBuilder buf = new StringBuilder();
// For property references generate <tablealias>.<columnname> as <projectionalias>
// For property references generate <tableAlias>.<columnName> as <projectionAlias>
for ( int j = 0; j < cols.length; j++ ) {
final String column = cols[j];
if ( j > 0 ) {

View File

@ -162,7 +162,7 @@ public class DotNode extends FromReferenceNode implements DisplayableNode, Selec
// Set the attributes of the property reference expression.
String propName = property.getText();
propertyName = propName;
// If the uresolved property path isn't set yet, just use the property name.
// If the unresolved property path isn't set yet, just use the property name.
if ( propertyPath == null ) {
propertyPath = propName;
}
@ -559,7 +559,7 @@ public class DotNode extends FromReferenceNode implements DisplayableNode, Selec
return true;
}
// otherwise (subquery case) dont reuse the fromElement if we are processing the from-clause of the subquery
// otherwise (subquery case) don't reuse the fromElement if we are processing the from-clause of the subquery
return getWalker().getCurrentClauseType() != SqlTokenTypes.FROM;
}

View File

@ -176,7 +176,7 @@ public class EntityJoinFromElement extends FromElement {
else {
// We know there is a fromFragment and that we shouldn't render a table group
// This means the entity is polymorphic and the entity join is an inner join
// We move the with clause stuff to the where clause but still need to have a valid on condition
// We move the with clause stuff to the where clause but still need to have a valid condition
buffer.append( "1=1" );
buffer.append( fromFragment );

View File

@ -413,7 +413,7 @@ public class FromElement extends HqlSqlWalkerNode implements DisplayableNode, Pa
else if ( !getWalker().isInFrom() ) {
// HHH-276 : implied joins in a subselect where clause - The destination needs to be added
// to the destination's from clause.
getFromClause().addChild( this ); // Not sure if this is will fix everything, but it works.
getFromClause().addChild( this ); // Not sure if this will fix everything, but it works.
}
else {
// Otherwise, the destination node was implied by the FROM clause and the FROM clause processor
@ -612,7 +612,7 @@ public class FromElement extends HqlSqlWalkerNode implements DisplayableNode, Pa
}
public void setInProjectionList(boolean inProjectionList) {
// Do nothing, eplicit from elements are *always* in the projection list.
// Do nothing, explicit from elements are *always* in the projection list.
}
public boolean inProjectionList() {

View File

@ -364,7 +364,7 @@ class FromElementType {
}
this.queryableCollection = queryableCollection;
if ( !queryableCollection.isOneToMany() ) {
// For many-to-many joins, use the tablename from the queryable collection for the default text.
// For many-to-many joins, use the table name from the queryable collection for the default text.
fromElement.setText( queryableCollection.getTableName() + " " + getTableAlias() );
}
}
@ -553,7 +553,7 @@ class FromElementType {
// indexed, many-to-many collections must be treated specially here if the property to
// be mapped touches on the index as we must adjust the alias to use the alias from
// the association table (which i different than the one passed in)
// the association table (which is different than the one passed in)
if ( queryableCollection.isManyToMany()
&& queryableCollection.hasIndex()
&& SPECIAL_MANY2MANY_TREATMENT_FUNCTION_NAMES.contains( propertyName ) ) {

View File

@ -48,7 +48,7 @@ public class IdentNode extends FromReferenceNode implements SelectExpression {
public void resolveIndex(AST parent) throws SemanticException {
// An ident node can represent an index expression if the ident
// represents a naked property ref
// *Note: this makes the assumption (which is currently the case
// *Note*: this makes the assumption (which is currently the case
// in the hql-sql grammar) that the ident is first resolved
// itself (addrExpr -> resolve()). The other option, if that
// changes, is to call resolve from here; but it is
@ -373,7 +373,7 @@ public class IdentNode extends FromReferenceNode implements SelectExpression {
public void setScalarColumnText(int i) throws SemanticException {
if (nakedPropertyRef) {
// do *not* over-write the column text, as that has already been
// do *not* overwrite the column text, as that has already been
// "rendered" during resolve
ColumnHelper.generateSingleScalarColumn(this, i);
}

View File

@ -203,13 +203,13 @@ public class IntoClause extends HqlSqlWalkerNode implements DisplayableNode {
// really there are two situations where it should be ok to allow the insertion
// into properties defined on a superclass:
// 1) union-subclass with an abstract root entity
// 2) discrim-subclass
// 2) discriminator-subclass
//
// #1 is handled already because of the fact that
// UnionSubclassPersister alreay always returns 0
// for this call...
//
// we may want to disallow it for discrim-subclass just for
// we may want to disallow it for discriminator-subclass just for
// consistency-sake (currently does not work anyway)...
return persister.getSubclassPropertyTableNumber( propertyName ) != 0;
}

View File

@ -72,7 +72,7 @@ public class MapKeyEntityFromElement extends FromElement {
rhsAlias
);
// String[] joinColumns = determineJoinColuns( collectionPersister, joinTableAlias );
// String[] joinColumns = determineJoinColumns( collectionPersister, joinTableAlias );
// todo : assumes columns, no formulas
String[] joinColumns = collectionPersister.getIndexColumnNames( collectionFromElement.getCollectionTableAlias() );

View File

@ -117,7 +117,7 @@ public class SelectClause extends SelectExpressionList {
throw new IllegalStateException( "SelectClause was already prepared!" );
}
//explicit = true; // This is an explict Select.
//explicit = true; // This is an explicit Select.
//ArrayList sqlResultTypeList = new ArrayList();
ArrayList queryReturnTypeList = new ArrayList();
@ -177,7 +177,7 @@ public class SelectClause extends SelectExpressionList {
}
}
//init the aliases, after initing the constructornode
//init the aliases, after initiating the constructorNode
initAliases( selectExpressions );
if ( !getWalker().isShallowQuery() ) {
@ -196,7 +196,7 @@ public class SelectClause extends SelectExpressionList {
FromElement origin = null;
if ( fromElement.getRealOrigin() == null ) {
// work around that crazy issue where the tree contains
// "empty" FromElements (no text); afaict, this is caused
// "empty" FromElements (no text); AFAICT, this is caused
// by FromElementFactory.createCollectionJoin()
if ( fromElement.getOrigin() == null ) {
throw new QueryException( "Unable to determine origin of join fetch [" + fromElement.getDisplayText() + "]" );

View File

@ -43,7 +43,7 @@ public final class ColumnHelper {
ASTFactory factory = node.getASTFactory();
AST n = node;
n.setText( sqlColumns[0] ); // Use the DOT node to emit the first column name.
// Create the column names, folled by the column aliases.
// Create the column names, filled by the column aliases.
for ( int j = 0; j < sqlColumns.length; j++ ) {
if ( j > 0 ) {
n = ASTUtil.createSibling( factory, SqlTokenTypes.SQL_TOKEN, sqlColumns[j], n );

View File

@ -228,7 +228,7 @@ public class JoinProcessor implements SqlTokenTypes {
while ( liter.hasNext() ) {
FromElement fromElement = liter.next();
// We found an implied from element that is used in the WITH clause of another from element, so it need to become part of it's join sequence
// We found an implied from element that is used in the WITH clause of another from element, so it need to become part of its join sequence
if ( fromElement instanceof ImpliedFromElement
&& fromElement.getOrigin().getWithClauseFragment() != null
&& fromElement.getOrigin().getWithClauseFragment().contains( fromElement.getTableAlias() ) ) {

View File

@ -75,7 +75,7 @@ public class SessionFactoryHelper {
if ( persister.getDiscriminatorType() != null ) {
String discrimColumnName = persister.getDiscriminatorColumnName();
// Needed the "clazz_" check to work around union-subclasses
// TODO : is there a way to tell whether a persister is truly discrim-column based inheritence?
// TODO : is there a way to tell whether a persister is truly discriminator-column based inheritance?
if ( discrimColumnName != null && !"clazz_".equals( discrimColumnName ) ) {
return true;
}

View File

@ -165,7 +165,7 @@ public class FromParser implements Parser {
// (AS is always optional, for consistency with SQL/OQL)
// process the "new" HQL style where aliases are assigned
// _after_ the class name or path expression ie. using
// _after_ the class name or path expression, ie. using
// the AS construction
if ( entityName != null ) {
@ -315,8 +315,8 @@ public class FromParser implements Parser {
public void end(QueryTranslatorImpl q) {
if ( afterMemberDeclarations ) {
//The exception throwned by the AST query translator contains the error token location, represented by line and column,
//but it hard to get that info here.
//The exception thrown by the AST query translator contains the error token location, represented by line and column,
//but it is hard to get that info here.
throw new QueryException( "alias not specified for IN" );
}
}

View File

@ -30,13 +30,13 @@ import org.hibernate.type.Type;
public class PathExpressionParser implements Parser {
//TODO: this class does too many things! we need a different
//kind of path expression parser for each of the diffferent
//kind of path expression parser for each of the different
//ways in which path expressions can occur
//We should actually rework this class to not implement Parser
//and just process path expressions in the most convenient way.
//The class is now way to complex!
//The class is now way too complex!
private int dotcount;
private String currentName;

View File

@ -834,7 +834,7 @@ public class QueryTranslatorImpl extends BasicLoader implements FilterTranslator
//there _was_ a select clause
Iterator iter = scalarSelectTokens.iterator();
int c = 0;
boolean nolast = false; //real hacky...
boolean nolast = false; //really hacky...
int parenCount = 0; // used to count the nesting of parentheses
while ( iter.hasNext() ) {
Object next = iter.next();

View File

@ -40,7 +40,7 @@ public abstract class AbstractUUIDGenerator implements IdentifierGenerator {
/**
* Unique across JVMs on this machine (unless they load this class
* in the same quater second - very unlikely)
* in the same quarter second - very unlikely)
*/
protected int getJVM() {
return JVM;

View File

@ -318,7 +318,7 @@ public class MultipleHiLoPerTableGenerator implements PersistentIdentifierGenera
if ( table == null ) {
table = namespace.createTable( qualifiedTableName.getObjectName(), false );
// todo : note sure the best solution here. do we add the columns if missing? other?
// todo : not sure the best solution here. do we add the columns if missing? other?
table.setPrimaryKey( new PrimaryKey( table ) );
final Column pkColumn = new ExportableColumn(

View File

@ -89,7 +89,7 @@ public class HiLoOptimizer extends AbstractOptimizer {
}
// upperLimit defines the upper end of the bucket values
generationState.upperLimit = generationState.lastSourceValue.copy().multiplyBy( incrementSize ).increment();
// initialize value to the low end of the bucket
// initialize value to the lower end of the bucket
generationState.value = generationState.upperLimit.copy().subtract( incrementSize );
}
else if ( ! generationState.upperLimit.gt( generationState.value ) ) {

View File

@ -34,7 +34,7 @@ public final class NoopOptimizer extends AbstractOptimizer {
// IMPL NOTE : this method is called concurrently and is
// not synchronized. It is very important to work on the
// local variable: the field lastSourceValue is not
// reliable as it might be mutated by multipled threads.
// reliable as it might be mutated by multiple threads.
// The lastSourceValue field is only accessed by tests,
// so this is not a concern.
IntegralDataTypeHolder value = callback.getNextValue();

View File

@ -72,7 +72,7 @@ public class PooledOptimizer extends AbstractOptimizer implements InitialValueAw
if ( generationState.hiValue == null ) {
generationState.value = callback.getNextValue();
// unfortunately not really safe to normalize this
// to 1 as an initial value like we do the others
// to 1 as an initial value like we do for the others
// because we would not be able to control this if
// we are using a sequence...
if ( generationState.value.lt( 1 ) ) {

View File

@ -737,7 +737,7 @@ public class TableGenerator implements PersistentIdentifierGenerator, Configurab
if ( table == null ) {
table = namespace.createTable( qualifiedTableName.getObjectName(), false );
// todo : note sure the best solution here. do we add the columns if missing? other?
// todo : not sure the best solution here. do we add the columns if missing? other?
final Column segmentColumn = new ExportableColumn(
database,
table,

View File

@ -197,7 +197,7 @@ public abstract class AbstractSharedSessionContract implements SharedSessionCont
this.transactionCoordinator = sharedOptions.getTransactionCoordinator();
this.jdbcCoordinator = sharedOptions.getJdbcCoordinator();
// todo : "wrap" the transaction to no-op comit/rollback attempts?
// todo : "wrap" the transaction to no-op commit/rollback attempts?
this.currentHibernateTransaction = sharedOptions.getTransaction();
if ( sharedOptions.shouldAutoJoinTransactions() ) {
@ -243,7 +243,7 @@ public abstract class AbstractSharedSessionContract implements SharedSessionCont
private StatementInspector interpret(StatementInspector statementInspector) {
if ( statementInspector == null ) {
// If there is no StatementInspector specified, map to the call
// to the (deprecated) Interceptor #onPrepareStatement method
// to the (deprecated) Interceptor#onPrepareStatement method
return (StatementInspector) interceptor::onPrepareStatement;
}
return statementInspector;
@ -286,7 +286,7 @@ public abstract class AbstractSharedSessionContract implements SharedSessionCont
@Override
public UUID getSessionIdentifier() {
if ( this.sessionIdentifier == null ) {
//Lazily initialized: otherwise all the UUID generations will cause of significant amount of contention.
//Lazily initialized: otherwise all the UUID generations will cause significant amount of contention.
this.sessionIdentifier = StandardRandomStrategy.INSTANCE.generateUUID( null );
}
return sessionIdentifier;
@ -853,7 +853,7 @@ public abstract class AbstractSharedSessionContract implements SharedSessionCont
}
}
else if ( queryPlan.getTranslators()[0].getReturnTypes().length == 1 ) {
// if we have only a single return expression, its java type should match with the requested type
// if we have only a single return expression, its java type should match the requested type
final Type queryResultType = queryPlan.getTranslators()[0].getReturnTypes()[0];
if ( !resultClass.isAssignableFrom( queryResultType.getReturnedClass() ) ) {
throw new IllegalArgumentException(

View File

@ -447,7 +447,7 @@ public class SessionFactoryImpl implements SessionFactoryImplementor {
public Session openSession() throws HibernateException {
final CurrentTenantIdentifierResolver currentTenantIdentifierResolver = getCurrentTenantIdentifierResolver();
//We can only use reuse the defaultSessionOpenOptions as a constant when there is no TenantIdentifierResolver
//We can only reuse the defaultSessionOpenOptions as a constant when there is no TenantIdentifierResolver
if ( currentTenantIdentifierResolver != null ) {
return this.withOptions().openSession();
}
@ -458,7 +458,7 @@ public class SessionFactoryImpl implements SessionFactoryImplementor {
public Session openTemporarySession() throws HibernateException {
final CurrentTenantIdentifierResolver currentTenantIdentifierResolver = getCurrentTenantIdentifierResolver();
//We can only use reuse the defaultSessionOpenOptions as a constant when there is no TenantIdentifierResolver
//We can only reuse the defaultSessionOpenOptions as a constant when there is no TenantIdentifierResolver
if ( currentTenantIdentifierResolver != null ) {
return buildTemporarySessionOpenOptions()
.openSession();
@ -1088,7 +1088,7 @@ public class SessionFactoryImpl implements SessionFactoryImplementor {
return interceptor;
}
// prefer the SF-scoped interceptor, prefer that to any Session-scoped interceptor prototype
// prefer the SessionFactory-scoped interceptor, prefer that to any Session-scoped interceptor prototype
final Interceptor optionsInterceptor = options.getInterceptor();
if ( optionsInterceptor != null && optionsInterceptor != EmptyInterceptor.INSTANCE ) {
return optionsInterceptor;
@ -1299,7 +1299,7 @@ public class SessionFactoryImpl implements SessionFactoryImplementor {
@SuppressWarnings("unchecked")
public T connectionReleaseMode(ConnectionReleaseMode connectionReleaseMode) {
// NOTE : Legacy behavior (when only ConnectionReleaseMode was exposed) was to always acquire a
// Connection using ConnectionAcquisitionMode.AS_NEEDED..
// Connection using ConnectionAcquisitionMode.AS_NEEDED.
final PhysicalConnectionHandlingMode handlingMode = PhysicalConnectionHandlingMode.interpret(
ConnectionAcquisitionMode.AS_NEEDED,
@ -1367,7 +1367,7 @@ public class SessionFactoryImpl implements SessionFactoryImplementor {
@SuppressWarnings("unchecked")
public T clearEventListeners() {
if ( listeners == null ) {
//Needs to initialize explicitly to an empty list as otherwise "null" immplies the default listeners will be applied
//Needs to initialize explicitly to an empty list as otherwise "null" implies the default listeners will be applied
this.listeners = new ArrayList<SessionEventListener>( 3 );
}
else {

View File

@ -1915,7 +1915,7 @@ public class SessionImpl
}
// Since isLookupByNaturalKey is true there can be only one CriterionEntry and getCriterion() will
// return an instanceof NaturalIdentifier
// return an instance of NaturalIdentifier
final CriterionEntry criterionEntry = criteria.iterateExpressionEntries().next();
final NaturalIdentifier naturalIdentifier = (NaturalIdentifier) criterionEntry.getCriterion();
@ -1936,7 +1936,7 @@ public class SessionImpl
final Object naturalIdValue = naturalIdValues.get( naturalIdProperty );
if ( naturalIdValue == null ) {
// A NaturalId property is missing from the critera query, can't use NaturalIdLoadAccess
// A NaturalId property is missing from the criteria query, can't use NaturalIdLoadAccess
return null;
}
@ -3323,7 +3323,7 @@ public class SessionImpl
return loadAccess.load( (Serializable) primaryKey );
}
catch ( EntityNotFoundException ignored ) {
// DefaultLoadEventListener.returnNarrowedProxy may throw ENFE (see HHH-7861 for details),
// DefaultLoadEventListener#returnNarrowedProxy() may throw ENFE (see HHH-7861 for details),
// which find() should not throw. Find() should return null if the entity was not found.
if ( log.isDebugEnabled() ) {
String entityName = entityClass != null ? entityClass.getName(): null;
@ -3345,7 +3345,7 @@ public class SessionImpl
}
catch ( JDBCException e ) {
if ( accessTransaction().isActive() && accessTransaction().getRollbackOnly() ) {
// Assume this is the similar to the WildFly / IronJacamar "feature" described under HHH-12472.
// Assume this is similar to the WildFly / IronJacamar "feature" described under HHH-12472.
// Just log the exception and return null.
if ( log.isDebugEnabled() ) {
log.debug( "JDBCException was thrown for a transaction marked for rollback; " +
@ -3788,9 +3788,9 @@ public class SessionImpl
loadQueryInfluencers = (LoadQueryInfluencers) ois.readObject();
// LoadQueryInfluencers.getEnabledFilters() tries to validate each enabled
// filter, which will fail when called before FilterImpl.afterDeserialize( factory );
// Instead lookup the filter by name and then call FilterImpl.afterDeserialize( factory ).
// LoadQueryInfluencers#getEnabledFilters() tries to validate each enabled
// filter, which will fail when called before FilterImpl#afterDeserialize( factory );
// Instead lookup the filter by name and then call FilterImpl#afterDeserialize( factory ).
for ( String filterName : loadQueryInfluencers.getEnabledFilterNames() ) {
( (FilterImpl) loadQueryInfluencers.getEnabledFilter( filterName ) ).afterDeserialize( getFactory() );
}

View File

@ -156,7 +156,7 @@ public final class StringHelper {
// enclosed in parentheses (HHH-10383)
// Examples:
// 1) "... IN (?1", we assume that "?1" does not need to be enclosed because there
// there is already a right-parenthesis; we assume there will be a matching right-parenthesis.
// is already a right-parenthesis; we assume there will be a matching right-parenthesis.
// 2) "... IN ?1", we assume that "?1" needs to be enclosed in parentheses, because there
// is no left-parenthesis.
@ -441,8 +441,8 @@ public final class StringHelper {
if ( string == null ) {
return 0;
}
// Impl note: takes advantage of the fact that an escpaed single quote
// embedded within a quote-block can really be handled as two seperate
// Impl note: takes advantage of the fact that an escaped single quote
// embedded within a quote-block can really be handled as two separate
// quote-blocks for the purposes of this method...
int count = 0;
int stringLength = string.length();
@ -601,7 +601,7 @@ public final class StringHelper {
*/
private static String cleanAlias(String alias) {
char[] chars = alias.toCharArray();
// short cut check...
// shortcut check...
if ( !Character.isLetter( chars[0] ) ) {
for ( int i = 1; i < chars.length; i++ ) {
// as soon as we encounter our first letter, return the substring

View File

@ -242,7 +242,7 @@ public final class PrimitiveWrapperHelper {
return (PrimitiveWrapperDescriptor<X>) DoubleDescriptor.INSTANCE;
}
// most likely void.class, which we can't really handle here
// most likely Void.class, which we can't really handle here
throw new IllegalArgumentException( "Unrecognized wrapper type class : " + wrapperClass.getName() );
}

View File

@ -31,7 +31,7 @@ public class ErrorLogger implements ErrorHandler, Serializable {
ErrorLogger.class.getName()
);
// lazily initalized
// lazily initialized
private List<SAXParseException> errors;
private String file;

View File

@ -199,7 +199,7 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil
Map mergedIntegrationSettings = null;
Properties properties = persistenceUnit.getProperties();
if ( properties != null ) {
// original integratin setting entries take precedence
// original integration setting entries take precedence
mergedIntegrationSettings = new HashMap( properties );
mergedIntegrationSettings.putAll( integrationSettings );
}
@ -884,7 +884,7 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil
// `IS_JTA_TXN_COORD` is a value set during `#normalizeTransactionCoordinator` to indicate whether
// the execution environment "is JTA" as best as it can tell..
//
// we use this value when JTA was not explicitly specified in regards the DataSource
// we use this value when JTA was not explicitly specified in regards to the DataSource
final boolean isJtaTransactionCoordinator = (boolean) mergedSettings.configurationValues.remove( IS_JTA_TXN_COORD );
final boolean isJta = useJtaDataSource == null ? isJtaTransactionCoordinator : useJtaDataSource;

View File

@ -36,7 +36,7 @@ public class EnhancingClassTransformerImpl implements ClassTransformer {
byte[] classfileBuffer) throws IllegalClassFormatException {
// The first design had the enhancer as a class variable. That approach had some goods and bads.
// We don't had to create an enhancer for each class, but on the other end it would stay in memory forever.
// We don't have to create an enhancer for each class, but on the other end it would stay in memory forever.
// It also assumed that all calls come from the same class loader, which is fair, but this makes it more robust.
try {

View File

@ -38,7 +38,7 @@ public final class XmlHelper {
return null;
}
// getElementsByTagName gives the corresponding elements in the whole
// descendance. We want only children
// descendants. We want only children
NodeList children = element.getChildNodes();
ArrayList goodChildren = new ArrayList();

View File

@ -1975,7 +1975,7 @@ public abstract class Loader {
final String result = persister.getSubclassForDiscriminatorValue( discriminatorValue );
if ( result == null ) {
//woops we got an instance of another class hierarchy branch
//whoops we got an instance of another class hierarchy branch
throw new WrongClassException(
"Discriminator: " + discriminatorValue,
id,

View File

@ -49,7 +49,7 @@ public class CriteriaLoader extends OuterJoinLoader {
// interface
//NOTE: unlike all other Loaders, this one is NOT
// multithreaded, or cacheable!!
// multi-threaded, or cacheable!!
private final CriteriaQueryTranslator translator;
private final Set<Serializable> querySpaces;

View File

@ -213,7 +213,7 @@ public class CriteriaQueryTranslator implements CriteriaQuery {
}
if ( parent.equals( rootCriteria ) ) {
// if its the root criteria, we are done
// if it's the root criteria, we are done
return path;
}
else {
@ -255,7 +255,7 @@ public class CriteriaQueryTranslator implements CriteriaQuery {
final AssociationType atype = (AssociationType) type;
final CollectionType ctype = type.isCollectionType() ? (CollectionType)type : null;
final Type elementType = (ctype != null) ? ctype.getElementType( sessionFactory ) : null;
// is the association a collection of components or value-types? (i.e a colloction of valued types?)
// is the association a collection of components or value-types? (i.e a collection of valued types?)
if ( ctype != null && elementType.isComponentType() ) {
provider = new ComponentCollectionCriteriaInfoProvider( helper.getCollectionPersister(ctype.getRole()) );
}

View File

@ -52,7 +52,7 @@ import org.hibernate.type.Type;
*/
public class CustomLoader extends Loader {
// Currently *not* cachable if autodiscover types is in effect (e.g. "select * ...")
// Currently *not* cacheable if auto-discover types are in effect (e.g. "select * ...")
private final String sql;
private final Set<Serializable> querySpaces = new HashSet<>();

View File

@ -74,8 +74,8 @@ public class SQLQueryParser {
return processedSql;
}
// TODO: should "record" how many properties we have reffered to - and if we
// don't get'em'all we throw an exception! Way better than trial and error ;)
// TODO: should "record" how many properties we have referred to - and if we
// don't get them all we throw an exception! Way better than trial and error ;)
protected String substituteBrackets(String sqlQuery) throws QueryException {
if ( PREPARED_STATEMENT_PATTERN.matcher( sqlQuery.trim() ).matches() ) {
@ -94,7 +94,7 @@ public class SQLQueryParser {
break;
}
// apend everything up until the next encountered open brace
// append everything up until the next encountered open brace
result.append( sqlQuery.substring( curr, left ) );
if ( ( right = sqlQuery.indexOf( '}', left + 1 ) ) < 0 ) {
@ -206,7 +206,7 @@ public class SQLQueryParser {
else {
String[] columnAliases;
// Let return-propertys override whatever the persister has for aliases.
// Let return-properties override whatever the persister has for aliases.
columnAliases = ( String[] ) fieldResults.get(propertyName);
if ( columnAliases==null ) {
columnAliases = collectionPersister.getCollectionPropertyColumnAliases( propertyName, collectionSuffix );
@ -247,7 +247,7 @@ public class SQLQueryParser {
String[] columnAliases;
// Let return-propertys override whatever the persister has for aliases.
// Let return-propertiess override whatever the persister has for aliases.
columnAliases = (String[]) fieldResults.get( propertyName );
if ( columnAliases == null ) {
columnAliases = persister.getSubclassPropertyColumnAliases( propertyName, suffix );

View File

@ -565,7 +565,7 @@ public class SQLQueryReturnProcessor {
throw new HibernateException( "Owner alias [" + ownerAlias + "] is unknown for alias [" + alias + "]" );
}
// If this return's alias has not been processed yet, do so b4 further processing of this return
// If this return's alias has not been processed yet, do so before further processing of this return
if ( !alias2Persister.containsKey( ownerAlias ) ) {
NativeSQLQueryNonScalarReturn ownerReturn = ( NativeSQLQueryNonScalarReturn ) alias2Return.get(ownerAlias);
processReturn( ownerReturn );

Some files were not shown because too many files have changed in this diff Show More