HHH-8741 - More checkstyle cleanups

This commit is contained in:
Steve Ebersole 2013-11-25 01:07:35 -06:00
parent 8fe5460ec0
commit 783831f113
51 changed files with 1108 additions and 900 deletions

View File

@ -22,7 +22,6 @@ package org.hibernate.engine.query.spi;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import javax.persistence.AttributeNode;
@ -44,28 +43,26 @@ import org.hibernate.type.Type;
* Encapsulates a JPA EntityGraph provided through a JPQL query hint. Converts the fetches into a list of AST
* FromElements. The logic is kept here as much as possible in order to make it easy to remove this in the future,
* once our AST is improved and this "hack" is no longer needed.
*
*
* @author Brett Meyer
*/
public class EntityGraphQueryHint {
private final EntityGraph<?> originEntityGraph;
public EntityGraphQueryHint( EntityGraph<?> originEntityGraph ) {
public EntityGraphQueryHint(EntityGraph<?> originEntityGraph) {
this.originEntityGraph = originEntityGraph;
}
public List<FromElement> toFromElements(FromClause fromClause, HqlSqlWalker walker) {
// If a role already has an explicit fetch in the query, skip it in the graph.
Map<String, FromElement> explicitFetches = new HashMap<String, FromElement>();
Iterator iter = fromClause.getFromElements().iterator();
while ( iter.hasNext() ) {
final FromElement fromElement = ( FromElement ) iter.next();
if (fromElement.getRole() != null) {
for ( Object o : fromClause.getFromElements() ) {
final FromElement fromElement = (FromElement) o;
if ( fromElement.getRole() != null ) {
explicitFetches.put( fromElement.getRole(), fromElement );
}
}
return getFromElements(
originEntityGraph.getAttributeNodes(),
fromClause.getFromElement(),
@ -74,7 +71,7 @@ public class EntityGraphQueryHint {
explicitFetches
);
}
private List<FromElement> getFromElements(
List attributeNodes,
FromElement origin,
@ -82,34 +79,35 @@ public class EntityGraphQueryHint {
HqlSqlWalker walker,
Map<String, FromElement> explicitFetches) {
final List<FromElement> fromElements = new ArrayList<FromElement>();
for (Object obj : attributeNodes) {
for ( Object obj : attributeNodes ) {
final AttributeNode<?> attributeNode = (AttributeNode<?>) obj;
final String attributeName = attributeNode.getAttributeName();
final String className = origin.getClassName();
final String role = className + "." + attributeName;
final String classAlias = origin.getClassAlias();
final String originTableAlias = origin.getTableAlias();
Type propertyType = origin.getPropertyType( attributeName, attributeName );
final Type propertyType = origin.getPropertyType( attributeName, attributeName );
try {
FromElement fromElement = null;
if (!explicitFetches.containsKey( role )) {
if ( !explicitFetches.containsKey( role ) ) {
if ( propertyType.isEntityType() ) {
final EntityType entityType = (EntityType) propertyType;
final String[] columns = origin.toColumns( originTableAlias, attributeName, false );
final String tableAlias = walker.getAliasGenerator().createName(
entityType.getAssociatedEntityName() );
final FromElementFactory fromElementFactory = new FromElementFactory( fromClause, origin,
attributeName, classAlias, columns, false);
entityType.getAssociatedEntityName()
);
final FromElementFactory fromElementFactory = new FromElementFactory(
fromClause, origin,
attributeName, classAlias, columns, false
);
final JoinSequence joinSequence = walker.getSessionFactoryHelper().createJoinSequence(
false, entityType, tableAlias, JoinType.LEFT_OUTER_JOIN, columns );
false, entityType, tableAlias, JoinType.LEFT_OUTER_JOIN, columns
);
fromElement = fromElementFactory.createEntityJoin(
entityType.getAssociatedEntityName(),
tableAlias,
@ -121,25 +119,32 @@ public class EntityGraphQueryHint {
null
);
}
else if ( propertyType.isCollectionType() ) {
final String[] columns = origin.toColumns( originTableAlias, attributeName, false );
final FromElementFactory fromElementFactory = new FromElementFactory( fromClause, origin,
attributeName, classAlias, columns, false);
else if ( propertyType.isCollectionType() ) {
final String[] columns = origin.toColumns( originTableAlias, attributeName, false );
final FromElementFactory fromElementFactory = new FromElementFactory(
fromClause, origin,
attributeName, classAlias, columns, false
);
final QueryableCollection queryableCollection = walker.getSessionFactoryHelper()
.requireQueryableCollection( role );
fromElement = fromElementFactory.createCollection(
queryableCollection, role, JoinType.LEFT_OUTER_JOIN, true, false ) ;
queryableCollection, role, JoinType.LEFT_OUTER_JOIN, true, false
);
}
}
if (fromElement != null) {
if ( fromElement != null ) {
fromElements.add( fromElement );
// recurse into subgraphs
for (Subgraph<?> subgraph : attributeNode.getSubgraphs().values()) {
fromElements.addAll( getFromElements( subgraph.getAttributeNodes(), fromElement,
fromClause, walker, explicitFetches ) );
for ( Subgraph<?> subgraph : attributeNode.getSubgraphs().values() ) {
fromElements.addAll(
getFromElements(
subgraph.getAttributeNodes(), fromElement,
fromClause, walker, explicitFetches
)
);
}
}
}
@ -147,7 +152,7 @@ public class EntityGraphQueryHint {
throw new QueryException( "Could not apply the EntityGraph to the Query!", e );
}
}
return fromElements;
}
}

View File

@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
* Copyright (c) 2008, 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@ -20,16 +20,16 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*
*/
package org.hibernate.engine.query.spi.sql;
import java.util.Map;
import org.hibernate.LockMode;
/**
* Represents a return defined as part of a native sql query which
* names a collection role in the form {classname}.{collectionrole}; it
* names a collection role in the form {className}.{collectionRole}; it
* is used in defining a custom sql query for loading an entity's
* collection in non-fetching scenarios (i.e., loading the collection
* itself as the "root" of the result).
@ -52,6 +52,7 @@ public class NativeSQLQueryCollectionReturn extends NativeSQLQueryNonScalarRetur
* @param propertyResults Any user-supplied column->property mappings
* @param lockMode The lock mode to apply to the collection.
*/
@SuppressWarnings("unchecked")
public NativeSQLQueryCollectionReturn(
String alias,
String ownerEntityName,
@ -64,6 +65,13 @@ public class NativeSQLQueryCollectionReturn extends NativeSQLQueryNonScalarRetur
this.hashCode = determineHashCode();
}
private int determineHashCode() {
int result = super.hashCode();
result = 31 * result + ( ownerEntityName != null ? ownerEntityName.hashCode() : 0 );
result = 31 * result + ( ownerProperty != null ? ownerProperty.hashCode() : 0 );
return result;
}
/**
* Returns the class owning the collection.
*
@ -82,6 +90,8 @@ public class NativeSQLQueryCollectionReturn extends NativeSQLQueryNonScalarRetur
return ownerProperty;
}
@Override
@SuppressWarnings("RedundantIfStatement")
public boolean equals(Object o) {
if ( this == o ) {
return true;
@ -93,7 +103,7 @@ public class NativeSQLQueryCollectionReturn extends NativeSQLQueryNonScalarRetur
return false;
}
NativeSQLQueryCollectionReturn that = ( NativeSQLQueryCollectionReturn ) o;
final NativeSQLQueryCollectionReturn that = (NativeSQLQueryCollectionReturn) o;
if ( ownerEntityName != null ? !ownerEntityName.equals( that.ownerEntityName ) : that.ownerEntityName != null ) {
return false;
@ -105,14 +115,8 @@ public class NativeSQLQueryCollectionReturn extends NativeSQLQueryNonScalarRetur
return true;
}
@Override
public int hashCode() {
return hashCode;
}
private int determineHashCode() {
int result = super.hashCode();
result = 31 * result + ( ownerEntityName != null ? ownerEntityName.hashCode() : 0 );
result = 31 * result + ( ownerProperty != null ? ownerProperty.hashCode() : 0 );
return result;
}
}

View File

@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
* Copyright (c) 2008, 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@ -20,9 +20,9 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*
*/
package org.hibernate.engine.query.spi.sql;
import java.util.Map;
import org.hibernate.LockMode;
@ -46,6 +46,7 @@ public class NativeSQLQueryJoinReturn extends NativeSQLQueryNonScalarReturn {
* @param propertyResults Any user-supplied column->property mappings
* @param lockMode The lock mode to apply
*/
@SuppressWarnings("unchecked")
public NativeSQLQueryJoinReturn(
String alias,
String ownerAlias,
@ -58,6 +59,13 @@ public class NativeSQLQueryJoinReturn extends NativeSQLQueryNonScalarReturn {
this.hashCode = determineHashCode();
}
private int determineHashCode() {
int result = super.hashCode();
result = 31 * result + ( ownerAlias != null ? ownerAlias.hashCode() : 0 );
result = 31 * result + ( ownerProperty != null ? ownerProperty.hashCode() : 0 );
return result;
}
/**
* Retrieve the alias of the owner of this fetched association.
*
@ -77,6 +85,8 @@ public class NativeSQLQueryJoinReturn extends NativeSQLQueryNonScalarReturn {
return ownerProperty;
}
@Override
@SuppressWarnings("RedundantIfStatement")
public boolean equals(Object o) {
if ( this == o ) {
return true;
@ -88,7 +98,7 @@ public class NativeSQLQueryJoinReturn extends NativeSQLQueryNonScalarReturn {
return false;
}
NativeSQLQueryJoinReturn that = ( NativeSQLQueryJoinReturn ) o;
final NativeSQLQueryJoinReturn that = (NativeSQLQueryJoinReturn) o;
if ( ownerAlias != null ? !ownerAlias.equals( that.ownerAlias ) : that.ownerAlias != null ) {
return false;
@ -100,14 +110,8 @@ public class NativeSQLQueryJoinReturn extends NativeSQLQueryNonScalarReturn {
return true;
}
@Override
public int hashCode() {
return hashCode;
}
private int determineHashCode() {
int result = super.hashCode();
result = 31 * result + ( ownerAlias != null ? ownerAlias.hashCode() : 0 );
result = 31 * result + ( ownerProperty != null ? ownerProperty.hashCode() : 0 );
return result;
}
}

View File

@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
* Copyright (c) 2008, 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@ -20,9 +20,9 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*
*/
package org.hibernate.engine.query.spi.sql;
import java.io.Serializable;
import java.util.Collections;
import java.util.HashMap;
@ -62,6 +62,14 @@ public abstract class NativeSQLQueryNonScalarReturn implements NativeSQLQueryRet
this.hashCode = determineHashCode();
}
private int determineHashCode() {
int result = alias != null ? alias.hashCode() : 0;
result = 31 * result + ( getClass().getName().hashCode() );
result = 31 * result + ( lockMode != null ? lockMode.hashCode() : 0 );
result = 31 * result + propertyResults.hashCode();
return result;
}
/**
* Retrieve the defined result alias
*
@ -89,18 +97,13 @@ public abstract class NativeSQLQueryNonScalarReturn implements NativeSQLQueryRet
return Collections.unmodifiableMap( propertyResults );
}
@Override
public int hashCode() {
return hashCode;
}
private int determineHashCode() {
int result = alias != null ? alias.hashCode() : 0;
result = 31 * result + ( getClass().getName().hashCode() );
result = 31 * result + ( lockMode != null ? lockMode.hashCode() : 0 );
result = 31 * result + ( propertyResults != null ? propertyResults.hashCode() : 0 );
return result;
}
@Override
@SuppressWarnings("RedundantIfStatement")
public boolean equals(Object o) {
if ( this == o ) {
return true;
@ -109,7 +112,7 @@ public abstract class NativeSQLQueryNonScalarReturn implements NativeSQLQueryRet
return false;
}
NativeSQLQueryNonScalarReturn that = ( NativeSQLQueryNonScalarReturn ) o;
final NativeSQLQueryNonScalarReturn that = (NativeSQLQueryNonScalarReturn) o;
if ( alias != null ? !alias.equals( that.alias ) : that.alias != null ) {
return false;
@ -117,7 +120,7 @@ public abstract class NativeSQLQueryNonScalarReturn implements NativeSQLQueryRet
if ( lockMode != null ? !lockMode.equals( that.lockMode ) : that.lockMode != null ) {
return false;
}
if ( propertyResults != null ? !propertyResults.equals( that.propertyResults ) : that.propertyResults != null ) {
if ( !propertyResults.equals( that.propertyResults ) ) {
return false;
}

View File

@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
* Copyright (c) 2008, 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@ -20,9 +20,9 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*
*/
package org.hibernate.engine.query.spi.sql;
import java.util.Map;
import org.hibernate.LockMode;
@ -47,7 +47,7 @@ public class NativeSQLQueryRootReturn extends NativeSQLQueryNonScalarReturn {
* @param lockMode The lock mode to apply
*/
public NativeSQLQueryRootReturn(String alias, String entityName, LockMode lockMode) {
this(alias, entityName, null, lockMode);
this( alias, entityName, null, lockMode );
}
/**
@ -63,6 +63,12 @@ public class NativeSQLQueryRootReturn extends NativeSQLQueryNonScalarReturn {
this.hashCode = determineHashCode();
}
private int determineHashCode() {
int result = super.hashCode();
result = 31 * result + ( returnEntityName != null ? returnEntityName.hashCode() : 0 );
return result;
}
/**
* The name of the entity to be returned.
*
@ -72,6 +78,8 @@ public class NativeSQLQueryRootReturn extends NativeSQLQueryNonScalarReturn {
return returnEntityName;
}
@Override
@SuppressWarnings("RedundantIfStatement")
public boolean equals(Object o) {
if ( this == o ) {
return true;
@ -83,7 +91,7 @@ public class NativeSQLQueryRootReturn extends NativeSQLQueryNonScalarReturn {
return false;
}
NativeSQLQueryRootReturn that = ( NativeSQLQueryRootReturn ) o;
final NativeSQLQueryRootReturn that = (NativeSQLQueryRootReturn) o;
if ( returnEntityName != null ? !returnEntityName.equals( that.returnEntityName ) : that.returnEntityName != null ) {
return false;
@ -92,13 +100,8 @@ public class NativeSQLQueryRootReturn extends NativeSQLQueryNonScalarReturn {
return true;
}
@Override
public int hashCode() {
return hashCode;
}
private int determineHashCode() {
int result = super.hashCode();
result = 31 * result + ( returnEntityName != null ? returnEntityName.hashCode() : 0 );
return result;
}
}

View File

@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
* Copyright (c) 2008, 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@ -20,9 +20,9 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*
*/
package org.hibernate.engine.query.spi.sql;
import org.hibernate.type.Type;
/**
@ -41,6 +41,13 @@ public class NativeSQLQueryScalarReturn implements NativeSQLQueryReturn {
this.hashCode = determineHashCode();
}
private int determineHashCode() {
int result = type != null ? type.hashCode() : 0;
result = 31 * result + ( getClass().getName().hashCode() );
result = 31 * result + ( columnAlias != null ? columnAlias.hashCode() : 0 );
return result;
}
public String getColumnAlias() {
return columnAlias;
}
@ -49,6 +56,8 @@ public class NativeSQLQueryScalarReturn implements NativeSQLQueryReturn {
return type;
}
@Override
@SuppressWarnings("RedundantIfStatement")
public boolean equals(Object o) {
if ( this == o ) {
return true;
@ -57,8 +66,7 @@ public class NativeSQLQueryScalarReturn implements NativeSQLQueryReturn {
return false;
}
NativeSQLQueryScalarReturn that = ( NativeSQLQueryScalarReturn ) o;
final NativeSQLQueryScalarReturn that = (NativeSQLQueryScalarReturn) o;
if ( columnAlias != null ? !columnAlias.equals( that.columnAlias ) : that.columnAlias != null ) {
return false;
}
@ -69,17 +77,11 @@ public class NativeSQLQueryScalarReturn implements NativeSQLQueryReturn {
return true;
}
@Override
public int hashCode() {
return hashCode;
}
private int determineHashCode() {
int result = type != null ? type.hashCode() : 0;
result = 31 * result + ( getClass().getName().hashCode() );
result = 31 * result + ( columnAlias != null ? columnAlias.hashCode() : 0 );
return result;
}
@Override
public void traceLog(TraceLogger logger) {
logger.writeLine( "Scalar[" );

View File

@ -52,13 +52,15 @@ public final class EntityEntry implements Serializable {
private Object[] deletedState;
private boolean existsInDatabase;
private Object version;
private transient EntityPersister persister; // for convenience to save some lookups
private transient EntityPersister persister;
private final EntityMode entityMode;
private final String tenantId;
private final String entityName;
private transient EntityKey cachedEntityKey; // cached EntityKey (lazy-initialized)
// cached EntityKey (lazy-initialized)
private transient EntityKey cachedEntityKey;
private boolean isBeingReplicated;
private boolean loadedWithLazyPropertiesUnfetched; //NOTE: this is not updated when properties are fetched lazily!
//NOTE: this is not updated when properties are fetched lazily!
private boolean loadedWithLazyPropertiesUnfetched;
private final transient Object rowId;
private final transient PersistenceContext persistenceContext;
@ -130,7 +132,8 @@ public final class EntityEntry implements Serializable {
this.existsInDatabase = existsInDatabase;
this.isBeingReplicated = isBeingReplicated;
this.loadedWithLazyPropertiesUnfetched = loadedWithLazyPropertiesUnfetched;
this.rowId = null; // this is equivalent to the old behavior...
// this is equivalent to the old behavior...
this.rowId = null;
this.persistenceContext = persistenceContext;
}
@ -147,8 +150,9 @@ public final class EntityEntry implements Serializable {
}
public void setStatus(Status status) {
if (status==Status.READ_ONLY) {
loadedState = null; //memory optimization
if ( status == Status.READ_ONLY ) {
//memory optimization
loadedState = null;
}
if ( this.status != status ) {
this.previousStatus = this.status;
@ -236,8 +240,10 @@ public final class EntityEntry implements Serializable {
interceptor.clearDirty();
}
}
if( entity instanceof SelfDirtinessTracker)
if( entity instanceof SelfDirtinessTracker) {
((SelfDirtinessTracker) entity).$$_hibernate_clearDirtyAttributes();
}
persistenceContext.getSession()
.getFactory()
@ -264,12 +270,15 @@ public final class EntityEntry implements Serializable {
}
public boolean isNullifiable(boolean earlyInsert, SessionImplementor session) {
return getStatus() == Status.SAVING || (
earlyInsert ?
!isExistsInDatabase() :
session.getPersistenceContext().getNullifiableEntityKeys()
.contains( getEntityKey() )
);
if ( getStatus() == Status.SAVING ) {
return true;
}
else if ( earlyInsert ) {
return !isExistsInDatabase();
}
else {
return session.getPersistenceContext().getNullifiableEntityKeys().contains( getEntityKey() );
}
}
public Object getLoadedValue(String propertyName) {
@ -277,8 +286,7 @@ public final class EntityEntry implements Serializable {
return null;
}
else {
int propertyIndex = ( (UniqueKeyLoadable) persister )
.getPropertyIndex( propertyName );
int propertyIndex = ( (UniqueKeyLoadable) persister ).getPropertyIndex( propertyName );
return loadedState[propertyIndex];
}
}
@ -297,7 +305,7 @@ public final class EntityEntry implements Serializable {
*/
public boolean requiresDirtyCheck(Object entity) {
return isModifiableEntity()
&& ( ! isUnequivocallyNonDirty( entity ) );
&& ( !isUnequivocallyNonDirty( entity ) );
}
@SuppressWarnings( {"SimplifiableIfStatement"})
@ -344,14 +352,15 @@ public final class EntityEntry implements Serializable {
public void forceLocked(Object entity, Object nextVersion) {
version = nextVersion;
loadedState[ persister.getVersionProperty() ] = version;
// TODO: use LockMode.PESSIMISTIC_FORCE_INCREMENT
//noinspection deprecation
setLockMode( LockMode.FORCE ); // TODO: use LockMode.PESSIMISTIC_FORCE_INCREMENT
setLockMode( LockMode.FORCE );
persister.setPropertyValue( entity, getPersister().getVersionProperty(), nextVersion );
}
public boolean isReadOnly() {
if (status != Status.MANAGED && status != Status.READ_ONLY) {
throw new HibernateException("instance was not in a valid state");
if ( status != Status.MANAGED && status != Status.READ_ONLY ) {
throw new HibernateException( "instance was not in a valid state" );
}
return status == Status.READ_ONLY;
}
@ -381,10 +390,9 @@ public final class EntityEntry implements Serializable {
}
}
@Override
public String toString() {
return "EntityEntry" +
MessageHelper.infoString(entityName, id) +
'(' + status + ')';
return "EntityEntry" + MessageHelper.infoString( entityName, id ) + '(' + status + ')';
}
public boolean isLoadedWithLazyPropertiesUnfetched() {
@ -434,19 +442,17 @@ public final class EntityEntry implements Serializable {
PersistenceContext persistenceContext) throws IOException, ClassNotFoundException {
String previousStatusString;
return new EntityEntry(
// this complexity comes from non-flushed changes, should really look at how that reattaches entries
( persistenceContext.getSession() == null ? null : persistenceContext.getSession().getFactory() ),
persistenceContext.getSession().getFactory(),
(String) ois.readObject(),
( Serializable ) ois.readObject(),
(Serializable) ois.readObject(),
EntityMode.parse( (String) ois.readObject() ),
(String) ois.readObject(),
Status.valueOf( (String) ois.readObject() ),
( ( previousStatusString = ( String ) ois.readObject() ).length() == 0 ?
null :
Status.valueOf( previousStatusString )
),
( Object[] ) ois.readObject(),
( Object[] ) ois.readObject(),
( previousStatusString = (String) ois.readObject() ).length() == 0
? null
: Status.valueOf( previousStatusString ),
(Object[]) ois.readObject(),
(Object[]) ois.readObject(),
ois.readObject(),
LockMode.valueOf( (String) ois.readObject() ),
ois.readBoolean(),

View File

@ -133,8 +133,8 @@ public class LoadQueryInfluencers implements Serializable {
}
public Object getFilterParameterValue(String filterParameterName) {
String[] parsed = parseFilterParameterName( filterParameterName );
FilterImpl filter = ( FilterImpl ) enabledFilters.get( parsed[0] );
final String[] parsed = parseFilterParameterName( filterParameterName );
final FilterImpl filter = (FilterImpl) enabledFilters.get( parsed[0] );
if ( filter == null ) {
throw new IllegalArgumentException( "Filter [" + parsed[0] + "] currently not enabled" );
}
@ -142,12 +142,12 @@ public class LoadQueryInfluencers implements Serializable {
}
public Type getFilterParameterType(String filterParameterName) {
String[] parsed = parseFilterParameterName( filterParameterName );
FilterDefinition filterDef = sessionFactory.getFilterDefinition( parsed[0] );
final String[] parsed = parseFilterParameterName( filterParameterName );
final FilterDefinition filterDef = sessionFactory.getFilterDefinition( parsed[0] );
if ( filterDef == null ) {
throw new IllegalArgumentException( "Filter [" + parsed[0] + "] not defined" );
}
Type type = filterDef.getParameterType( parsed[1] );
final Type type = filterDef.getParameterType( parsed[1] );
if ( type == null ) {
// this is an internal error of some sort...
throw new InternalError( "Unable to locate type for filter parameter" );
@ -160,8 +160,8 @@ public class LoadQueryInfluencers implements Serializable {
if ( dot <= 0 ) {
throw new IllegalArgumentException( "Invalid filter-parameter name format" );
}
String filterName = filterParameterName.substring( 0, dot );
String parameterName = filterParameterName.substring( dot + 1 );
final String filterName = filterParameterName.substring( 0, dot );
final String parameterName = filterParameterName.substring( dot + 1 );
return new String[] { filterName, parameterName };
}

View File

@ -316,8 +316,8 @@ public final class QueryParameters {
}
public void validateParameters() throws QueryException {
int types = positionalParameterTypes == null ? 0 : positionalParameterTypes.length;
int values = positionalParameterValues == null ? 0 : positionalParameterValues.length;
final int types = positionalParameterTypes == null ? 0 : positionalParameterTypes.length;
final int values = positionalParameterValues == null ? 0 : positionalParameterValues.length;
if ( types != values ) {
throw new QueryException(
"Number of positional parameter types:" + types +
@ -413,7 +413,7 @@ public final class QueryParameters {
* initialized (i.e., isReadOnlyInitialized() == false).
*/
public boolean isReadOnly() {
if ( ! isReadOnlyInitialized() ) {
if ( !isReadOnlyInitialized() ) {
throw new IllegalStateException( "cannot call isReadOnly() when isReadOnlyInitialized() returns false" );
}
return readOnly;
@ -491,13 +491,10 @@ public final class QueryParameters {
}
else {
final Dialect dialect = factory.getDialect();
String symbols = new StringBuilder().append( ParserHelper.HQL_SEPARATORS )
.append( dialect.openQuote() )
.append( dialect.closeQuote() )
.toString();
StringTokenizer tokens = new StringTokenizer( sql, symbols, true );
StringBuilder result = new StringBuilder();
final String symbols = ParserHelper.HQL_SEPARATORS + dialect.openQuote() + dialect.closeQuote();
final StringTokenizer tokens = new StringTokenizer( sql, symbols, true );
StringBuilder result = new StringBuilder();
List parameters = new ArrayList();
List parameterTypes = new ArrayList();
@ -507,13 +504,13 @@ public final class QueryParameters {
if ( token.startsWith( ParserHelper.HQL_VARIABLE_PREFIX ) ) {
final String filterParameterName = token.substring( 1 );
final String[] parts = LoadQueryInfluencers.parseFilterParameterName( filterParameterName );
final FilterImpl filter = ( FilterImpl ) filters.get( parts[0] );
final FilterImpl filter = (FilterImpl) filters.get( parts[0] );
final Object value = filter.getParameter( parts[1] );
final Type type = filter.getFilterDefinition().getParameterType( parts[1] );
if ( value != null && Collection.class.isAssignableFrom( value.getClass() ) ) {
Iterator itr = ( ( Collection ) value ).iterator();
Iterator itr = ( (Collection) value ).iterator();
while ( itr.hasNext() ) {
Object elementValue = itr.next();
final Object elementValue = itr.next();
result.append( '?' );
parameters.add( elementValue );
parameterTypes.add( type );

View File

@ -77,9 +77,9 @@ public class WebSphereJtaPlatform extends AbstractJtaPlatform {
protected TransactionManager locateTransactionManager() {
try {
final Method method = transactionManagerAccessClass.getMethod( "getTransactionManager" );
return ( TransactionManager ) method.invoke( null );
return (TransactionManager) method.invoke( null );
}
catch ( Exception e ) {
catch (Exception e) {
throw new JtaPlatformException( "Could not obtain WebSphere TransactionManager", e );
}

View File

@ -43,7 +43,7 @@ import org.hibernate.persister.entity.EntityPersister;
* @author Steve Ebersole
*/
public class DefaultPostLoadEventListener implements PostLoadEventListener {
@Override
public void onPostLoad(PostLoadEvent event) {
final Object entity = event.getEntity();
final EntityEntry entry = event.getSession().getPersistenceContext().getEntry( entity );
@ -54,23 +54,25 @@ public class DefaultPostLoadEventListener implements PostLoadEventListener {
final LockMode lockMode = entry.getLockMode();
if ( LockMode.PESSIMISTIC_FORCE_INCREMENT.equals( lockMode ) ) {
final EntityPersister persister = entry.getPersister();
Object nextVersion = persister.forceVersionIncrement(
entry.getId(), entry.getVersion(), event.getSession()
final Object nextVersion = persister.forceVersionIncrement(
entry.getId(),
entry.getVersion(),
event.getSession()
);
entry.forceLocked( entity, nextVersion );
}
else if ( LockMode.OPTIMISTIC_FORCE_INCREMENT.equals( lockMode ) ) {
EntityIncrementVersionProcess incrementVersion = new EntityIncrementVersionProcess( entity, entry );
final EntityIncrementVersionProcess incrementVersion = new EntityIncrementVersionProcess( entity, entry );
event.getSession().getActionQueue().registerProcess( incrementVersion );
}
else if ( LockMode.OPTIMISTIC.equals( lockMode ) ) {
EntityVerifyVersionProcess verifyVersion = new EntityVerifyVersionProcess( entity, entry );
final EntityVerifyVersionProcess verifyVersion = new EntityVerifyVersionProcess( entity, entry );
event.getSession().getActionQueue().registerProcess( verifyVersion );
}
if ( event.getPersister().implementsLifecycle() ) {
//log.debug( "calling onLoad()" );
( ( Lifecycle ) event.getEntity() ).onLoad( event.getSession(), event.getId() );
( (Lifecycle) event.getEntity() ).onLoad( event.getSession(), event.getId() );
}
}

View File

@ -47,16 +47,17 @@ public class OnLockVisitor extends ReattachVisitor {
super( session, key, owner );
}
Object processCollection(Object collection, CollectionType type) throws HibernateException {
SessionImplementor session = getSession();
CollectionPersister persister = session.getFactory().getCollectionPersister( type.getRole() );
@Override
public Object processCollection(Object collection, CollectionType type) throws HibernateException {
if ( collection == null ) {
//do nothing
return null;
}
else if ( collection instanceof PersistentCollection ) {
PersistentCollection persistentCollection = ( PersistentCollection ) collection;
final SessionImplementor session = getSession();
final CollectionPersister persister = session.getFactory().getCollectionPersister( type.getRole() );
if ( collection instanceof PersistentCollection ) {
final PersistentCollection persistentCollection = (PersistentCollection) collection;
if ( persistentCollection.setCurrentSession( session ) ) {
if ( isOwnerUnchanged( persistentCollection, persister, extractCollectionKeyFromOwner( persister ) ) ) {
// a "detached" collection that originally belonged to the same entity
@ -84,7 +85,6 @@ public class OnLockVisitor extends ReattachVisitor {
}
return null;
}
}

View File

@ -51,21 +51,20 @@ public class OnReplicateVisitor extends ReattachVisitor {
this.isUpdate = isUpdate;
}
Object processCollection(Object collection, CollectionType type)
throws HibernateException {
@Override
public Object processCollection(Object collection, CollectionType type) throws HibernateException {
if ( collection == CollectionType.UNFETCHED_COLLECTION ) {
return null;
}
EventSource session = getSession();
CollectionPersister persister = session.getFactory().getCollectionPersister( type.getRole() );
final EventSource session = getSession();
final CollectionPersister persister = session.getFactory().getCollectionPersister( type.getRole() );
if ( isUpdate ) {
removeCollection( persister, extractCollectionKeyFromOwner( persister ), session );
}
if ( collection != null && ( collection instanceof PersistentCollection ) ) {
PersistentCollection wrapper = ( PersistentCollection ) collection;
if ( collection != null && collection instanceof PersistentCollection ) {
final PersistentCollection wrapper = (PersistentCollection) collection;
wrapper.setCurrentSession( session );
if ( wrapper.wasInitialized() ) {
session.getPersistenceContext().addNewCollection( persister, wrapper );

View File

@ -44,7 +44,6 @@ import org.jboss.logging.Logger;
* @author Steve Ebersole
*/
public class SQLExceptionConverterFactory {
private static final CoreMessageLogger LOG = Logger.getMessageLogger( CoreMessageLogger.class, SQLExceptionConverterFactory.class.getName() );
private SQLExceptionConverterFactory() {
@ -67,7 +66,7 @@ public class SQLExceptionConverterFactory {
public static SQLExceptionConverter buildSQLExceptionConverter(Dialect dialect, Properties properties) throws HibernateException {
SQLExceptionConverter converter = null;
String converterClassName = ( String ) properties.get( Environment.SQL_EXCEPTION_CONVERTER );
String converterClassName = (String) properties.get( Environment.SQL_EXCEPTION_CONVERTER );
if ( StringHelper.isNotEmpty( converterClassName ) ) {
converter = constructConverter( converterClassName, dialect.getViolatedConstraintNameExtracter() );
}
@ -81,7 +80,7 @@ public class SQLExceptionConverterFactory {
try {
( (Configurable) converter ).configure( properties );
}
catch ( HibernateException e ) {
catch (HibernateException e) {
LOG.unableToConfigureSqlExceptionConverter( e );
throw e;
}
@ -107,18 +106,17 @@ public class SQLExceptionConverterFactory {
private static SQLExceptionConverter constructConverter(String converterClassName, ViolatedConstraintNameExtracter violatedConstraintNameExtracter) {
try {
LOG.tracev( "Attempting to construct instance of specified SQLExceptionConverter [{0}]", converterClassName );
Class converterClass = ReflectHelper.classForName( converterClassName );
final Class converterClass = ReflectHelper.classForName( converterClassName );
// First, try to find a matching constructor accepting a ViolatedConstraintNameExtracter param...
Constructor[] ctors = converterClass.getDeclaredConstructors();
for ( int i = 0; i < ctors.length; i++ ) {
if ( ctors[i].getParameterTypes() != null && ctors[i].getParameterTypes().length == 1 ) {
if ( ViolatedConstraintNameExtracter.class.isAssignableFrom( ctors[i].getParameterTypes()[0] ) ) {
final Constructor[] ctors = converterClass.getDeclaredConstructors();
for ( Constructor ctor : ctors ) {
if ( ctor.getParameterTypes() != null && ctor.getParameterTypes().length == 1 ) {
if ( ViolatedConstraintNameExtracter.class.isAssignableFrom( ctor.getParameterTypes()[0] ) ) {
try {
return ( SQLExceptionConverter )
ctors[i].newInstance( new Object[]{violatedConstraintNameExtracter} );
return (SQLExceptionConverter) ctor.newInstance( violatedConstraintNameExtracter );
}
catch ( Throwable t ) {
catch (Throwable ignore) {
// eat it and try next
}
}
@ -126,10 +124,10 @@ public class SQLExceptionConverterFactory {
}
// Otherwise, try to use the no-arg constructor
return ( SQLExceptionConverter ) converterClass.newInstance();
return (SQLExceptionConverter) converterClass.newInstance();
}
catch ( Throwable t ) {
catch (Throwable t) {
LOG.unableToConstructSqlExceptionConverter( t );
}

View File

@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
* Copyright (c) 2008, 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@ -20,9 +20,9 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*
*/
package org.hibernate.hql.internal;
import java.util.HashMap;
import java.util.Map;
@ -53,8 +53,9 @@ public final class CollectionProperties {
private CollectionProperties() {
}
@SuppressWarnings("SimplifiableIfStatement")
public static boolean isCollectionProperty(String name) {
String key = name.toLowerCase();
final String key = name.toLowerCase();
// CollectionPropertyMapping processes everything except 'index'.
if ( COLLECTION_INDEX_LOWER.equals( key ) ) {
return false;
@ -65,11 +66,11 @@ public final class CollectionProperties {
}
public static String getNormalizedPropertyName(String name) {
return ( String ) HQL_COLLECTION_PROPERTIES.get( name );
return (String) HQL_COLLECTION_PROPERTIES.get( name );
}
public static boolean isAnyCollectionProperty(String name) {
String key = name.toLowerCase();
final String key = name.toLowerCase();
return HQL_COLLECTION_PROPERTIES.containsKey( key );
}
}

View File

@ -142,7 +142,7 @@ public class HqlSqlWalker extends HqlSqlBaseWalker implements ErrorReporter, Par
private int parameterCount;
private Map namedParameters = new HashMap();
private ArrayList parameters = new ArrayList();
private ArrayList<ParameterSpecification> parameters = new ArrayList<ParameterSpecification>();
private int numberOfParametersInSetClause;
private int positionalParameterCount;
@ -1182,7 +1182,7 @@ public class HqlSqlWalker extends HqlSqlBaseWalker implements ErrorReporter, Par
return printer;
}
public ArrayList getParameters() {
public ArrayList<ParameterSpecification> getParameters() {
return parameters;
}

View File

@ -27,7 +27,6 @@ package org.hibernate.hql.internal.ast;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
@ -46,14 +45,15 @@ import org.hibernate.type.Type;
* @author Steve Ebersole
*/
public class ParameterTranslationsImpl implements ParameterTranslations {
private final Map namedParameters;
private final Map<String,ParameterInfo> namedParameters;
private final ParameterInfo[] ordinalParameters;
@Override
public boolean supportsOrdinalParameterMetadata() {
return true;
}
@Override
public int getOrdinalParameterCount() {
return ordinalParameters.length;
}
@ -63,26 +63,31 @@ public class ParameterTranslationsImpl implements ParameterTranslations {
return ordinalParameters[ordinalPosition - 1];
}
@Override
public int getOrdinalParameterSqlLocation(int ordinalPosition) {
return getOrdinalParameterInfo( ordinalPosition ).getSqlLocations()[0];
}
@Override
public Type getOrdinalParameterExpectedType(int ordinalPosition) {
return getOrdinalParameterInfo( ordinalPosition ).getExpectedType();
}
@Override
public Set getNamedParameterNames() {
return namedParameters.keySet();
}
public ParameterInfo getNamedParameterInfo(String name) {
return ( ParameterInfo ) namedParameters.get( name );
return namedParameters.get( name );
}
@Override
public int[] getNamedParameterSqlLocations(String name) {
return getNamedParameterInfo( name ).getSqlLocations();
}
@Override
public Type getNamedParameterExpectedType(String name) {
return getNamedParameterInfo( name ).getExpectedType();
}
@ -94,28 +99,27 @@ public class ParameterTranslationsImpl implements ParameterTranslations {
* Note: the order in the incoming list denotes the parameter's
* psudeo-position within the resulting sql statement.
*
* @param parameterSpecifications
* @param parameterSpecifications The parameter specifications
*/
public ParameterTranslationsImpl(List parameterSpecifications) {
public ParameterTranslationsImpl(List<ParameterSpecification> parameterSpecifications) {
class NamedParamTempHolder {
String name;
Type type;
List positions = new ArrayList();
List<Integer> positions = new ArrayList<Integer>();
}
int size = parameterSpecifications.size();
List ordinalParameterList = new ArrayList();
Map namedParameterMap = new HashMap();
final int size = parameterSpecifications.size();
final List<ParameterInfo> ordinalParameterList = new ArrayList<ParameterInfo>();
final Map<String,NamedParamTempHolder> namedParameterMap = new HashMap<String,NamedParamTempHolder>();
for ( int i = 0; i < size; i++ ) {
final ParameterSpecification spec = ( ParameterSpecification ) parameterSpecifications.get( i );
if ( PositionalParameterSpecification.class.isAssignableFrom( spec.getClass() ) ) {
PositionalParameterSpecification ordinalSpec = ( PositionalParameterSpecification ) spec;
final ParameterSpecification spec = parameterSpecifications.get( i );
if ( PositionalParameterSpecification.class.isInstance( spec ) ) {
final PositionalParameterSpecification ordinalSpec = (PositionalParameterSpecification) spec;
ordinalParameterList.add( new ParameterInfo( i, ordinalSpec.getExpectedType() ) );
}
else if ( NamedParameterSpecification.class.isAssignableFrom( spec.getClass() ) ) {
NamedParameterSpecification namedSpec = ( NamedParameterSpecification ) spec;
NamedParamTempHolder paramHolder = ( NamedParamTempHolder ) namedParameterMap.get( namedSpec.getName() );
else if ( NamedParameterSpecification.class.isInstance( spec ) ) {
final NamedParameterSpecification namedSpec = (NamedParameterSpecification) spec;
NamedParamTempHolder paramHolder = namedParameterMap.get( namedSpec.getName() );
if ( paramHolder == null ) {
paramHolder = new NamedParamTempHolder();
paramHolder.name = namedSpec.getName();
@ -124,24 +128,20 @@ public class ParameterTranslationsImpl implements ParameterTranslations {
}
paramHolder.positions.add( i );
}
else {
// don't care about other param types here, just those explicitly user-defined...
}
// don't care about other param types here, just those explicitly user-defined...
}
ordinalParameters = ( ParameterInfo[] ) ordinalParameterList.toArray( new ParameterInfo[ordinalParameterList.size()] );
ordinalParameters = ordinalParameterList.toArray( new ParameterInfo[ordinalParameterList.size()] );
if ( namedParameterMap.isEmpty() ) {
namedParameters = java.util.Collections.EMPTY_MAP;
namedParameters = java.util.Collections.emptyMap();
}
else {
Map namedParametersBacking = new HashMap( namedParameterMap.size() );
Iterator itr = namedParameterMap.values().iterator();
while( itr.hasNext() ) {
final NamedParamTempHolder holder = ( NamedParamTempHolder ) itr.next();
final Map<String,ParameterInfo> namedParametersBacking = new HashMap<String,ParameterInfo>( namedParameterMap.size() );
for ( NamedParamTempHolder holder : namedParameterMap.values() ) {
namedParametersBacking.put(
holder.name,
new ParameterInfo( ArrayHelper.toIntArray( holder.positions ), holder.type )
new ParameterInfo( ArrayHelper.toIntArray( holder.positions ), holder.type )
);
}
namedParameters = java.util.Collections.unmodifiableMap( namedParametersBacking );

View File

@ -367,9 +367,10 @@ public class QueryTranslatorImpl implements FilterTranslator {
throws HibernateException {
// Delegate to the QueryLoader...
errorIfDML();
QueryNode query = ( QueryNode ) sqlAst;
boolean hasLimit = queryParameters.getRowSelection() != null && queryParameters.getRowSelection().definesLimits();
boolean needsDistincting = ( query.getSelectClause().isDistinct() || hasLimit ) && containsCollectionFetches();
final QueryNode query = (QueryNode) sqlAst;
final boolean hasLimit = queryParameters.getRowSelection() != null && queryParameters.getRowSelection().definesLimits();
final boolean needsDistincting = ( query.getSelectClause().isDistinct() || hasLimit ) && containsCollectionFetches();
QueryParameters queryParametersToUse;
if ( hasLimit && containsCollectionFetches() ) {
@ -484,7 +485,7 @@ public class QueryTranslatorImpl implements FilterTranslator {
@Override
public boolean containsCollectionFetches() {
errorIfDML();
List collectionFetches = ( ( QueryNode ) sqlAst ).getFromClause().getCollectionFetches();
List collectionFetches = ( (QueryNode) sqlAst ).getFromClause().getCollectionFetches();
return collectionFetches != null && collectionFetches.size() > 0;
}
@Override
@ -499,7 +500,7 @@ public class QueryTranslatorImpl implements FilterTranslator {
errorIfDML();
QueryNode query = ( QueryNode ) sqlAst;
final QueryNode query = (QueryNode) sqlAst;
// If there are no collection fetches, then no further checks are needed
List collectionFetches = query.getFromClause().getCollectionFetches();
@ -551,10 +552,10 @@ public class QueryTranslatorImpl implements FilterTranslator {
}
private StatementExecutor buildAppropriateStatementExecutor(HqlSqlWalker walker) {
Statement statement = ( Statement ) walker.getAST();
final Statement statement = (Statement) walker.getAST();
if ( walker.getStatementType() == HqlSqlTokenTypes.DELETE ) {
FromElement fromElement = walker.getFinalFromClause().getFromElement();
Queryable persister = fromElement.getQueryable();
final FromElement fromElement = walker.getFinalFromClause().getFromElement();
final Queryable persister = fromElement.getQueryable();
if ( persister.isMultiTable() ) {
return new MultiTableDeleteExecutor( walker );
}
@ -563,8 +564,8 @@ public class QueryTranslatorImpl implements FilterTranslator {
}
}
else if ( walker.getStatementType() == HqlSqlTokenTypes.UPDATE ) {
FromElement fromElement = walker.getFinalFromClause().getFromElement();
Queryable persister = fromElement.getQueryable();
final FromElement fromElement = walker.getFinalFromClause().getFromElement();
final Queryable persister = fromElement.getQueryable();
if ( persister.isMultiTable() ) {
// even here, if only properties mapped to the "base table" are referenced
// in the set and where clauses, this could be handled by the BasicDelegate.
@ -576,7 +577,7 @@ public class QueryTranslatorImpl implements FilterTranslator {
}
}
else if ( walker.getStatementType() == HqlSqlTokenTypes.INSERT ) {
return new BasicExecutor( walker, ( ( InsertStatement ) statement ).getIntoClause().getQueryable() );
return new BasicExecutor( walker, ( (InsertStatement) statement ).getIntoClause().getQueryable() );
}
else {
throw new QueryException( "Unexpected statement type" );
@ -619,8 +620,8 @@ public class QueryTranslatorImpl implements FilterTranslator {
}
}
private void handleDotStructure(AST dotStructureRoot) {
String expression = ASTUtil.getPathText( dotStructureRoot );
Object constant = ReflectHelper.getConstantValue( expression );
final String expression = ASTUtil.getPathText( dotStructureRoot );
final Object constant = ReflectHelper.getConstantValue( expression );
if ( constant != null ) {
dotStructureRoot.setFirstChild( null );
dotStructureRoot.setType( HqlTokenTypes.JAVA_CONSTANT );

View File

@ -97,6 +97,7 @@ public class SqlASTFactory extends ASTFactory implements HqlSqlTokenTypes {
* Returns the class for a given token type (a.k.a. AST node type).
*
* @param tokenType The token type.
*
* @return Class - The AST node class to instantiate.
*/
public Class getASTNodeType(int tokenType) {
@ -122,7 +123,7 @@ public class SqlASTFactory extends ASTFactory implements HqlSqlTokenTypes {
return DotNode.class;
case INDEX_OP:
return IndexNode.class;
// Alias references and identifiers use the same node class.
// Alias references and identifiers use the same node class.
case ALIAS_REF:
case IDENT:
return IdentNode.class;
@ -208,18 +209,19 @@ public class SqlASTFactory extends ASTFactory implements HqlSqlTokenTypes {
}
default:
return SqlNode.class;
} // switch
}
}
@SuppressWarnings("unchecked")
protected AST createUsingCtor(Token token, String className) {
Class c;
AST t;
try {
c = Class.forName( className );
Class[] tokenArgType = new Class[]{antlr.Token.class};
Class[] tokenArgType = new Class[] {antlr.Token.class};
Constructor ctor = c.getConstructor( tokenArgType );
if ( ctor != null ) {
t = ( AST ) ctor.newInstance( new Object[]{token} ); // make a new one
t = (AST) ctor.newInstance( token );
initializeSqlNode( t );
}
else {
@ -228,7 +230,7 @@ public class SqlASTFactory extends ASTFactory implements HqlSqlTokenTypes {
t = create( c );
}
}
catch ( Exception e ) {
catch (Exception e) {
throw new IllegalArgumentException( "Invalid class or can't make instance, " + className );
}
return t;
@ -237,11 +239,11 @@ public class SqlASTFactory extends ASTFactory implements HqlSqlTokenTypes {
private void initializeSqlNode(AST t) {
// Initialize SQL nodes here.
if ( t instanceof InitializeableNode ) {
InitializeableNode initializeableNode = ( InitializeableNode ) t;
InitializeableNode initializeableNode = (InitializeableNode) t;
initializeableNode.initialize( walker );
}
if ( t instanceof SessionFactoryAwareNode ) {
( ( SessionFactoryAwareNode ) t ).setSessionFactory( walker.getSessionFactoryHelper().getFactory() );
( (SessionFactoryAwareNode) t ).setSessionFactory( walker.getSessionFactoryHelper().getFactory() );
}
}
@ -249,15 +251,16 @@ public class SqlASTFactory extends ASTFactory implements HqlSqlTokenTypes {
* Actually instantiate the AST node.
*
* @param c The class to instantiate.
*
* @return The instantiated and initialized node.
*/
protected AST create(Class c) {
AST t;
try {
t = ( AST ) c.newInstance(); // make a new one
t = (AST) c.newInstance();
initializeSqlNode( t );
}
catch ( Exception e ) {
catch (Exception e) {
error( "Can't create AST Node " + c.getName() );
return null;
}

View File

@ -52,11 +52,9 @@ import antlr.collections.AST;
* @author Brett Meyer
*/
public class DeleteExecutor extends BasicExecutor {
private static final Logger LOG = Logger.getLogger( DeleteExecutor.class );
private final List<String> deletes = new ArrayList<String>();
private List<ParameterSpecification> parameterSpecifications;
public DeleteExecutor(HqlSqlWalker walker, Queryable persister) {
@ -66,10 +64,10 @@ public class DeleteExecutor extends BasicExecutor {
final Dialect dialect = factory.getDialect();
try {
final DeleteStatement deleteStatement = ( DeleteStatement ) walker.getAST();
final DeleteStatement deleteStatement = (DeleteStatement) walker.getAST();
final String idSubselectWhere;
if (deleteStatement.hasWhereClause()) {
if ( deleteStatement.hasWhereClause() ) {
final AST whereClause = deleteStatement.getWhereClause();
final SqlGenerator gen = new SqlGenerator( factory );
gen.whereClause( whereClause );
@ -88,11 +86,13 @@ public class DeleteExecutor extends BasicExecutor {
final AbstractCollectionPersister cPersister = (AbstractCollectionPersister) factory
.getCollectionPersister( cType.getRole() );
if ( cPersister.isManyToMany() ) {
if (persister.getIdentifierColumnNames().length > 1
&& !dialect.supportsTuplesInSubqueries()) {
LOG.warn( "This dialect is unable to cascade the delete into the many-to-many join table" +
if ( persister.getIdentifierColumnNames().length > 1
&& !dialect.supportsTuplesInSubqueries() ) {
LOG.warn(
"This dialect is unable to cascade the delete into the many-to-many join table" +
" when the entity has multiple primary keys. Either properly setup cascading on" +
" the constraints or manually clear the associations prior to deleting the entities." );
" the constraints or manually clear the associations prior to deleting the entities."
);
}
else {
final String idSubselect = "(select "

View File

@ -44,7 +44,7 @@ public abstract class AbstractMapComponentNode extends FromReferenceNode impleme
private String[] columns;
public FromReferenceNode getMapReference() {
return ( FromReferenceNode ) getFirstChild();
return (FromReferenceNode) getFirstChild();
}
public String[] getColumns() {
@ -66,19 +66,19 @@ public abstract class AbstractMapComponentNode extends FromReferenceNode impleme
throw attemptedDereference();
}
FromReferenceNode mapReference = getMapReference();
final FromReferenceNode mapReference = getMapReference();
mapReference.resolve( true, true );
FromElement sourceFromElement = null;
if ( isAliasRef( mapReference ) ) {
QueryableCollection collectionPersister = mapReference.getFromElement().getQueryableCollection();
final QueryableCollection collectionPersister = mapReference.getFromElement().getQueryableCollection();
if ( Map.class.isAssignableFrom( collectionPersister.getCollectionType().getReturnedClass() ) ) {
sourceFromElement = mapReference.getFromElement();
}
}
else {
if ( mapReference.getDataType().isCollectionType() ) {
CollectionType collectionType = (CollectionType) mapReference.getDataType();
final CollectionType collectionType = (CollectionType) mapReference.getDataType();
if ( Map.class.isAssignableFrom( collectionType.getReturnedClass() ) ) {
sourceFromElement = mapReference.getFromElement();
}

View File

@ -109,14 +109,15 @@ public abstract class AbstractNullnessCheckNode extends UnaryLogicOperatorNode {
}
private static Type extractDataType(Node operand) {
Type type = null;
if ( operand instanceof SqlNode ) {
type = ( ( SqlNode ) operand ).getDataType();
return ( (SqlNode) operand ).getDataType();
}
if ( type == null && operand instanceof ExpectedTypeAwareNode ) {
type = ( ( ExpectedTypeAwareNode ) operand ).getExpectedType();
if ( operand instanceof ExpectedTypeAwareNode ) {
return ( (ExpectedTypeAwareNode) operand ).getExpectedType();
}
return type;
return null;
}
private static String[] extractMutationTexts(Node operand, int count) {
@ -128,9 +129,9 @@ public abstract class AbstractNullnessCheckNode extends UnaryLogicOperatorNode {
return rtn;
}
else if ( operand.getType() == HqlSqlTokenTypes.VECTOR_EXPR ) {
String[] rtn = new String[ operand.getNumberOfChildren() ];
int x = 0;
final String[] rtn = new String[ operand.getNumberOfChildren() ];
AST node = operand.getFirstChild();
int x = 0;
while ( node != null ) {
rtn[ x++ ] = node.getText();
node = node.getNextSibling();

View File

@ -44,34 +44,28 @@ public abstract class AbstractRestrictableStatement extends AbstractStatement im
protected abstract CoreMessageLogger getLog();
/**
* @see org.hibernate.hql.internal.ast.tree.RestrictableStatement#getFromClause
*/
@Override
public final FromClause getFromClause() {
if ( fromClause == null ) {
fromClause = ( FromClause ) ASTUtil.findTypeInChildren( this, HqlSqlTokenTypes.FROM );
fromClause = (FromClause) ASTUtil.findTypeInChildren( this, HqlSqlTokenTypes.FROM );
}
return fromClause;
}
/**
* @see RestrictableStatement#hasWhereClause
*/
@Override
public final boolean hasWhereClause() {
AST whereClause = locateWhereClause();
return whereClause != null && whereClause.getNumberOfChildren() > 0;
}
/**
* @see org.hibernate.hql.internal.ast.tree.RestrictableStatement#getWhereClause
*/
@Override
public final AST getWhereClause() {
if ( whereClause == null ) {
whereClause = locateWhereClause();
// If there is no WHERE node, make one.
if ( whereClause == null ) {
getLog().debug( "getWhereClause() : Creating a new WHERE clause..." );
whereClause = ASTUtil.create( getWalker().getASTFactory(), HqlSqlTokenTypes.WHERE, "WHERE" );
whereClause = getWalker().getASTFactory().create( HqlSqlTokenTypes.WHERE, "WHERE" );
// inject the WHERE after the parent
AST parent = ASTUtil.findTypeInChildren( this, getWhereClauseParentTokenType() );
whereClause.setNextSibling( parent.getNextSibling() );

View File

@ -68,21 +68,17 @@ public class AssignmentSpecification {
// knows about the property-ref path in the correct format; it is either this, or
// recurse over the DotNodes constructing the property path just like DotNode does
// internally
DotNode lhs = ( DotNode ) eq.getFirstChild();
SqlNode rhs = ( SqlNode ) lhs.getNextSibling();
final DotNode lhs = (DotNode) eq.getFirstChild();
final SqlNode rhs = (SqlNode) lhs.getNextSibling();
validateLhs( lhs );
final String propertyPath = lhs.getPropertyPath();
Set temp = new HashSet();
Set<String> temp = new HashSet<String>();
// yuck!
if ( persister instanceof UnionSubclassEntityPersister ) {
UnionSubclassEntityPersister usep = ( UnionSubclassEntityPersister ) persister;
String[] tables = persister.getConstraintOrderedTableNameClosure();
int size = tables.length;
for ( int i = 0; i < size; i ++ ) {
temp.add( tables[i] );
}
final String[] tables = persister.getConstraintOrderedTableNameClosure();
Collections.addAll( temp, tables );
}
else {
temp.add(
@ -91,11 +87,11 @@ public class AssignmentSpecification {
}
this.tableNames = Collections.unmodifiableSet( temp );
if (rhs==null) {
if ( rhs == null ) {
hqlParameters = new ParameterSpecification[0];
}
else if ( isParam( rhs ) ) {
hqlParameters = new ParameterSpecification[] { ( (ParameterNode) rhs ).getHqlParameterSpecification() };
hqlParameters = new ParameterSpecification[] {( (ParameterNode) rhs ).getHqlParameterSpecification()};
}
else {
List parameterList = ASTUtil.collectChildren(
@ -106,10 +102,10 @@ public class AssignmentSpecification {
}
}
);
hqlParameters = new ParameterSpecification[ parameterList.size() ];
hqlParameters = new ParameterSpecification[parameterList.size()];
Iterator itr = parameterList.iterator();
int i = 0;
while( itr.hasNext() ) {
while ( itr.hasNext() ) {
hqlParameters[i++] = ( (ParameterNode) itr.next() ).getHqlParameterSpecification();
}
}
@ -127,10 +123,13 @@ public class AssignmentSpecification {
if ( sqlAssignmentString == null ) {
try {
SqlGenerator sqlGenerator = new SqlGenerator( factory );
sqlGenerator.comparisonExpr( eq, false ); // false indicates to not generate parens around the assignment
sqlGenerator.comparisonExpr(
eq,
false
); // false indicates to not generate parens around the assignment
sqlAssignmentString = sqlGenerator.getSQL();
}
catch( Throwable t ) {
catch (Throwable t) {
throw new QueryException( "cannot interpret set-clause assignment" );
}
}

View File

@ -36,50 +36,54 @@ import antlr.SemanticException;
public class BetweenOperatorNode extends SqlNode implements OperatorNode {
public void initialize() throws SemanticException {
Node fixture = getFixtureOperand();
final Node fixture = getFixtureOperand();
if ( fixture == null ) {
throw new SemanticException( "fixture operand of a between operator was null" );
}
Node low = getLowOperand();
final Node low = getLowOperand();
if ( low == null ) {
throw new SemanticException( "low operand of a between operator was null" );
}
Node high = getHighOperand();
final Node high = getHighOperand();
if ( high == null ) {
throw new SemanticException( "high operand of a between operator was null" );
}
check( fixture, low, high );
check( low, high, fixture );
check( high, fixture, low );
}
@Override
public Type getDataType() {
// logic operators by definition resolve to boolean.
return StandardBasicTypes.BOOLEAN;
}
public Node getFixtureOperand() {
return ( Node ) getFirstChild();
return (Node) getFirstChild();
}
public Node getLowOperand() {
return ( Node ) getFirstChild().getNextSibling();
return (Node) getFirstChild().getNextSibling();
}
public Node getHighOperand() {
return ( Node ) getFirstChild().getNextSibling().getNextSibling();
return (Node) getFirstChild().getNextSibling().getNextSibling();
}
private void check(Node check, Node first, Node second) {
if ( ExpectedTypeAwareNode.class.isAssignableFrom( check.getClass() ) ) {
Type expectedType = null;
if ( SqlNode.class.isAssignableFrom( first.getClass() ) ) {
expectedType = ( ( SqlNode ) first ).getDataType();
expectedType = ( (SqlNode) first ).getDataType();
}
if ( expectedType == null && SqlNode.class.isAssignableFrom( second.getClass() ) ) {
expectedType = ( ( SqlNode ) second ).getDataType();
expectedType = ( (SqlNode) second ).getDataType();
}
( ( ExpectedTypeAwareNode ) check ).setExpectedType( expectedType );
( (ExpectedTypeAwareNode) check ).setExpectedType( expectedType );
}
}
}

View File

@ -35,15 +35,17 @@ import antlr.SemanticException;
*
* @author Gavin King
*/
public class BinaryArithmeticOperatorNode extends AbstractSelectExpression implements BinaryOperatorNode, DisplayableNode {
public class BinaryArithmeticOperatorNode extends AbstractSelectExpression
implements BinaryOperatorNode, DisplayableNode {
@Override
public void initialize() throws SemanticException {
final Node lhs = getLeftHandOperand();
final Node rhs = getRightHandOperand();
if ( lhs == null ) {
throw new SemanticException( "left-hand operand of a binary operator was null" );
}
final Node rhs = getRightHandOperand();
if ( rhs == null ) {
throw new SemanticException( "right-hand operand of a binary operator was null" );
}
@ -65,7 +67,7 @@ public class BinaryArithmeticOperatorNode extends AbstractSelectExpression imple
else {
expectedType = rhType;
}
( ( ExpectedTypeAwareNode ) lhs ).setExpectedType( expectedType );
( (ExpectedTypeAwareNode) lhs ).setExpectedType( expectedType );
}
else if ( ParameterNode.class.isAssignableFrom( rhs.getClass() ) && lhType != null ) {
Type expectedType = null;
@ -105,10 +107,12 @@ public class BinaryArithmeticOperatorNode extends AbstractSelectExpression imple
// TODO : we may also want to check that the types here map to exactly one column/JDBC-type
// can't think of a situation where arithmetic expression between multi-column mappings
// makes any sense.
Node lhs = getLeftHandOperand();
Node rhs = getRightHandOperand();
Type lhType = ( lhs instanceof SqlNode ) ? ( (SqlNode) lhs ).getDataType() : null;
Type rhType = ( rhs instanceof SqlNode ) ? ( (SqlNode) rhs ).getDataType() : null;
final Node lhs = getLeftHandOperand();
final Node rhs = getRightHandOperand();
final Type lhType = ( lhs instanceof SqlNode ) ? ( (SqlNode) lhs ).getDataType() : null;
final Type rhType = ( rhs instanceof SqlNode ) ? ( (SqlNode) rhs ).getDataType() : null;
if ( isDateTimeType( lhType ) || isDateTimeType( rhType ) ) {
return resolveDateTimeArithmeticResultType( lhType, rhType );
}
@ -116,7 +120,8 @@ public class BinaryArithmeticOperatorNode extends AbstractSelectExpression imple
if ( lhType == null ) {
if ( rhType == null ) {
// we do not know either type
return StandardBasicTypes.DOUBLE; //BLIND GUESS!
// BLIND GUESS!
return StandardBasicTypes.DOUBLE;
}
else {
// we know only the rhs-hand type, so use that
@ -129,22 +134,22 @@ public class BinaryArithmeticOperatorNode extends AbstractSelectExpression imple
return lhType;
}
else {
if ( lhType== StandardBasicTypes.DOUBLE || rhType==StandardBasicTypes.DOUBLE ) {
if ( lhType == StandardBasicTypes.DOUBLE || rhType == StandardBasicTypes.DOUBLE ) {
return StandardBasicTypes.DOUBLE;
}
if ( lhType==StandardBasicTypes.FLOAT || rhType==StandardBasicTypes.FLOAT ) {
if ( lhType == StandardBasicTypes.FLOAT || rhType == StandardBasicTypes.FLOAT ) {
return StandardBasicTypes.FLOAT;
}
if ( lhType==StandardBasicTypes.BIG_DECIMAL || rhType==StandardBasicTypes.BIG_DECIMAL ) {
if ( lhType == StandardBasicTypes.BIG_DECIMAL || rhType == StandardBasicTypes.BIG_DECIMAL ) {
return StandardBasicTypes.BIG_DECIMAL;
}
if ( lhType==StandardBasicTypes.BIG_INTEGER || rhType==StandardBasicTypes.BIG_INTEGER ) {
if ( lhType == StandardBasicTypes.BIG_INTEGER || rhType == StandardBasicTypes.BIG_INTEGER ) {
return StandardBasicTypes.BIG_INTEGER;
}
if ( lhType==StandardBasicTypes.LONG || rhType==StandardBasicTypes.LONG ) {
if ( lhType == StandardBasicTypes.LONG || rhType == StandardBasicTypes.LONG ) {
return StandardBasicTypes.LONG;
}
if ( lhType==StandardBasicTypes.INTEGER || rhType==StandardBasicTypes.INTEGER ) {
if ( lhType == StandardBasicTypes.INTEGER || rhType == StandardBasicTypes.INTEGER ) {
return StandardBasicTypes.INTEGER;
}
return lhType;

View File

@ -50,11 +50,12 @@ public class BinaryLogicOperatorNode extends HqlSqlWalkerNode implements BinaryO
*/
@Override
public void initialize() throws SemanticException {
Node lhs = getLeftHandOperand();
final Node lhs = getLeftHandOperand();
if ( lhs == null ) {
throw new SemanticException( "left-hand operand of a binary operator was null" );
}
Node rhs = getRightHandOperand();
final Node rhs = getRightHandOperand();
if ( rhs == null ) {
throw new SemanticException( "right-hand operand of a binary operator was null" );
}
@ -70,10 +71,10 @@ public class BinaryLogicOperatorNode extends HqlSqlWalkerNode implements BinaryO
}
if ( ExpectedTypeAwareNode.class.isAssignableFrom( lhs.getClass() ) ) {
( ( ExpectedTypeAwareNode ) lhs ).setExpectedType( rhsType );
( (ExpectedTypeAwareNode) lhs ).setExpectedType( rhsType );
}
if ( ExpectedTypeAwareNode.class.isAssignableFrom( rhs.getClass() ) ) {
( ( ExpectedTypeAwareNode ) rhs ).setExpectedType( lhsType );
( (ExpectedTypeAwareNode) rhs ).setExpectedType( lhsType );
}
mutateRowValueConstructorSyntaxesIfNecessary( lhsType, rhsType );
@ -89,7 +90,7 @@ public class BinaryLogicOperatorNode extends HqlSqlWalkerNode implements BinaryO
if ( lhsColumnSpan != getColumnSpan( rhsType, sessionFactory ) ) {
throw new TypeMismatchException(
"left and right hand sides of a binary logic operator were incompatibile [" +
lhsType.getName() + " : "+ rhsType.getName() + "]"
lhsType.getName() + " : " + rhsType.getName() + "]"
);
}
if ( lhsColumnSpan > 1 ) {
@ -105,7 +106,7 @@ public class BinaryLogicOperatorNode extends HqlSqlWalkerNode implements BinaryO
private int getColumnSpan(Type type, SessionFactoryImplementor sfi) {
int columnSpan = type.getColumnSpan( sfi );
if ( columnSpan == 0 && type instanceof OneToOneType ) {
columnSpan = ( ( OneToOneType ) type ).getIdentifierOrUniqueKeyType( sfi ).getColumnSpan( sfi );
columnSpan = ( (OneToOneType) type ).getIdentifierOrUniqueKeyType( sfi ).getColumnSpan( sfi );
}
return columnSpan;
}
@ -205,11 +206,11 @@ public class BinaryLogicOperatorNode extends HqlSqlWalkerNode implements BinaryO
return rtn;
}
else if ( operand.getType() == HqlSqlTokenTypes.VECTOR_EXPR ) {
String[] rtn = new String[ operand.getNumberOfChildren() ];
String[] rtn = new String[operand.getNumberOfChildren()];
int x = 0;
AST node = operand.getFirstChild();
while ( node != null ) {
rtn[ x++ ] = node.getText();
rtn[x++] = node.getText();
node = node.getNextSibling();
}
return rtn;
@ -245,7 +246,7 @@ public class BinaryLogicOperatorNode extends HqlSqlWalkerNode implements BinaryO
}
@Override
public Type getDataType() {
public Type getDataType() {
// logic operators by definition resolve to booleans
return StandardBasicTypes.BOOLEAN;
}
@ -256,7 +257,7 @@ public class BinaryLogicOperatorNode extends HqlSqlWalkerNode implements BinaryO
* @return The left-hand operand
*/
public Node getLeftHandOperand() {
return ( Node ) getFirstChild();
return (Node) getFirstChild();
}
/**
@ -265,6 +266,6 @@ public class BinaryLogicOperatorNode extends HqlSqlWalkerNode implements BinaryO
* @return The right-hand operand
*/
public Node getRightHandOperand() {
return ( Node ) getFirstChild().getNextSibling();
return (Node) getFirstChild().getNextSibling();
}
}

View File

@ -47,15 +47,15 @@ import antlr.collections.AST;
* @author josh
*/
public class FromClause extends HqlSqlWalkerNode implements HqlSqlTokenTypes, DisplayableNode {
private static final CoreMessageLogger LOG = CoreLogging.messageLogger( FromClause.class );
private static final CoreMessageLogger LOG = CoreLogging.messageLogger( FromClause.class );
public static final int ROOT_LEVEL = 1;
private int level = ROOT_LEVEL;
private Set fromElements = new HashSet();
private Map fromElementByClassAlias = new HashMap();
private Map fromElementByTableAlias = new HashMap();
private Map fromElementsByPath = new HashMap();
private Set<FromElement> fromElements = new HashSet<FromElement>();
private Map<String,FromElement> fromElementByClassAlias = new HashMap<String,FromElement>();
private Map<String,FromElement> fromElementByTableAlias = new HashMap<String,FromElement>();
private Map<String,FromElement> fromElementsByPath = new HashMap<String,FromElement>();
/**
* All of the implicit FROM xxx JOIN yyy elements that are the destination of a collection. These are created from
@ -69,7 +69,7 @@ public class FromClause extends HqlSqlWalkerNode implements HqlSqlTokenTypes, Di
/**
* Collection of FROM clauses of which this is the parent.
*/
private Set childFromClauses;
private Set<FromClause> childFromClauses;
/**
* Counts the from elements as they are added.
*/
@ -82,8 +82,9 @@ public class FromClause extends HqlSqlWalkerNode implements HqlSqlTokenTypes, Di
/**
* Adds a new from element to the from node.
*
* @param path The reference to the class.
* @param path The reference to the class.
* @param alias The alias AST.
*
* @return FromElement - The new FROM element.
*/
public FromElement addFromElement(String path, AST alias) throws SemanticException {
@ -124,10 +125,11 @@ public class FromClause extends HqlSqlWalkerNode implements HqlSqlTokenTypes, Di
* Retreives the from-element represented by the given alias.
*
* @param aliasOrClassName The alias by which to locate the from-element.
*
* @return The from-element assigned the given alias, or null if none.
*/
public FromElement getFromElement(String aliasOrClassName) {
FromElement fromElement = ( FromElement ) fromElementByClassAlias.get( aliasOrClassName );
FromElement fromElement = fromElementByClassAlias.get( aliasOrClassName );
if ( fromElement == null && getSessionFactoryHelper().isStrictJPAQLComplianceEnabled() ) {
fromElement = findIntendedAliasedFromElementBasedOnCrazyJPARequirements( aliasOrClassName );
}
@ -138,7 +140,7 @@ public class FromClause extends HqlSqlWalkerNode implements HqlSqlTokenTypes, Di
}
public FromElement findFromElementBySqlAlias(String sqlAlias) {
FromElement fromElement = ( FromElement ) fromElementByTableAlias.get( sqlAlias );
FromElement fromElement = fromElementByTableAlias.get( sqlAlias );
if ( fromElement == null && parentFromClause != null ) {
fromElement = parentFromClause.getFromElement( sqlAlias );
}
@ -159,12 +161,10 @@ public class FromClause extends HqlSqlWalkerNode implements HqlSqlTokenTypes, Di
}
private FromElement findIntendedAliasedFromElementBasedOnCrazyJPARequirements(String specifiedAlias) {
Iterator itr = fromElementByClassAlias.entrySet().iterator();
while ( itr.hasNext() ) {
Map.Entry entry = ( Map.Entry ) itr.next();
String alias = ( String ) entry.getKey();
for ( Map.Entry<String, FromElement> entry : fromElementByClassAlias.entrySet() ) {
final String alias = entry.getKey();
if ( alias.equalsIgnoreCase( specifiedAlias ) ) {
return ( FromElement ) entry.getValue();
return entry.getValue();
}
}
return null;
@ -174,6 +174,7 @@ public class FromClause extends HqlSqlWalkerNode implements HqlSqlTokenTypes, Di
* Convenience method to check whether a given token represents a from-element alias.
*
* @param possibleAlias The potential from-element alias to check.
*
* @return True if the possibleAlias is an alias to a from-element visible
* from this point in the query graph.
*/
@ -201,7 +202,7 @@ public class FromClause extends HqlSqlWalkerNode implements HqlSqlTokenTypes, Di
// if ( fromElements == null || fromElements.isEmpty() ) {
// throw new QueryException( "Unable to locate from element" );
// }
return (FromElement) getFromElements().get(0);
return (FromElement) getFromElements().get( 0 );
}
/**
@ -227,38 +228,38 @@ public class FromClause extends HqlSqlWalkerNode implements HqlSqlTokenTypes, Di
private static ASTUtil.FilterPredicate fromElementPredicate = new ASTUtil.IncludePredicate() {
@Override
public boolean include(AST node) {
FromElement fromElement = ( FromElement ) node;
public boolean include(AST node) {
FromElement fromElement = (FromElement) node;
return fromElement.isFromOrJoinFragment();
}
};
private static ASTUtil.FilterPredicate projectionListPredicate = new ASTUtil.IncludePredicate() {
@Override
public boolean include(AST node) {
FromElement fromElement = ( FromElement ) node;
public boolean include(AST node) {
FromElement fromElement = (FromElement) node;
return fromElement.inProjectionList();
}
};
private static ASTUtil.FilterPredicate collectionFetchPredicate = new ASTUtil.IncludePredicate() {
@Override
public boolean include(AST node) {
FromElement fromElement = ( FromElement ) node;
public boolean include(AST node) {
FromElement fromElement = (FromElement) node;
return fromElement.isFetch() && fromElement.getQueryableCollection() != null;
}
};
private static ASTUtil.FilterPredicate explicitFromPredicate = new ASTUtil.IncludePredicate() {
@Override
public boolean include(AST node) {
final FromElement fromElement = ( FromElement ) node;
public boolean include(AST node) {
final FromElement fromElement = (FromElement) node;
return !fromElement.isImplied();
}
};
FromElement findCollectionJoin(String path) {
return ( FromElement ) collectionJoinFromElementsByPath.get( path );
return (FromElement) collectionJoinFromElementsByPath.get( path );
}
/**
@ -275,7 +276,7 @@ public class FromClause extends HqlSqlWalkerNode implements HqlSqlTokenTypes, Di
FromElement findJoinByPathLocal(String path) {
Map joinsByPath = fromElementsByPath;
return ( FromElement ) joinsByPath.get( path );
return (FromElement) joinsByPath.get( path );
}
void addJoinByPathMap(String path, FromElement destination) {
@ -289,6 +290,7 @@ public class FromClause extends HqlSqlWalkerNode implements HqlSqlTokenTypes, Di
* Returns true if the from node contains the class alias name.
*
* @param alias The HQL class alias name.
*
* @return true if the from node contains the class alias name.
*/
public boolean containsClassAlias(String alias) {
@ -303,6 +305,7 @@ public class FromClause extends HqlSqlWalkerNode implements HqlSqlTokenTypes, Di
* Returns true if the from node contains the table alias name.
*
* @param alias The SQL table alias name.
*
* @return true if the from node contains the table alias name.
*/
public boolean containsTableAlias(String alias) {
@ -332,16 +335,14 @@ public class FromClause extends HqlSqlWalkerNode implements HqlSqlTokenTypes, Di
private void addChild(FromClause fromClause) {
if ( childFromClauses == null ) {
childFromClauses = new HashSet();
childFromClauses = new HashSet<FromClause>();
}
childFromClauses.add( fromClause );
}
public FromClause locateChildFromClauseWithJoinByPath(String path) {
if ( childFromClauses != null && !childFromClauses.isEmpty() ) {
Iterator children = childFromClauses.iterator();
while ( children.hasNext() ) {
FromClause child = ( FromClause ) children.next();
for ( FromClause child : childFromClauses ) {
if ( child.findJoinByPathLocal( path ) != null ) {
return child;
}
@ -368,7 +369,10 @@ public class FromClause extends HqlSqlWalkerNode implements HqlSqlTokenTypes, Di
void addCollectionJoinFromElementByPath(String path, FromElement destination) {
LOG.debugf( "addCollectionJoinFromElementByPath() : %s -> %s", path, destination );
collectionJoinFromElementsByPath.put( path, destination ); // Add the new node to the map so that we don't create it twice.
collectionJoinFromElementsByPath.put(
path,
destination
); // Add the new node to the map so that we don't create it twice.
}
public FromClause getParentFromClause() {
@ -390,8 +394,7 @@ public class FromClause extends HqlSqlWalkerNode implements HqlSqlTokenTypes, Di
while ( iter.hasNext() ) {
childrenInTree.add( iter.next() );
}
for ( Iterator iterator = fromElements.iterator(); iterator.hasNext(); ) {
FromElement fromElement = ( FromElement ) iterator.next();
for ( FromElement fromElement : fromElements ) {
if ( !childrenInTree.contains( fromElement ) ) {
throw new IllegalStateException( "Element not in AST: " + fromElement );
}
@ -403,9 +406,7 @@ public class FromClause extends HqlSqlWalkerNode implements HqlSqlTokenTypes, Di
}
@Override
public String toString() {
return "FromClause{" +
"level=" + level +
"}";
public String toString() {
return "FromClause{level=" + level + "}";
}
}

View File

@ -30,6 +30,7 @@ import org.hibernate.hql.internal.ast.util.ASTUtil;
import org.hibernate.hql.internal.ast.util.AliasGenerator;
import org.hibernate.hql.internal.ast.util.PathHelper;
import org.hibernate.hql.internal.ast.util.SessionFactoryHelper;
import org.hibernate.internal.CoreLogging;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.persister.collection.QueryableCollection;
@ -43,8 +44,6 @@ import org.hibernate.type.ComponentType;
import org.hibernate.type.EntityType;
import org.hibernate.type.Type;
import org.jboss.logging.Logger;
import antlr.ASTFactory;
import antlr.SemanticException;
import antlr.collections.AST;
@ -55,8 +54,7 @@ import antlr.collections.AST;
* @author josh
*/
public class FromElementFactory implements SqlTokenTypes {
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, FromElementFactory.class.getName());
private static final CoreMessageLogger LOG = CoreLogging.messageLogger( FromElementFactory.class );
private FromClause fromClause;
private FromElement origin;
@ -84,12 +82,12 @@ public class FromElementFactory implements SqlTokenTypes {
* Creates collection from elements.
*/
public FromElementFactory(
FromClause fromClause,
FromElement origin,
String path,
String classAlias,
String[] columns,
boolean implied) {
FromClause fromClause,
FromElement origin,
String path,
String classAlias,
String[] columns,
boolean implied) {
this( fromClause, origin, path );
this.classAlias = classAlias;
this.columns = columns;
@ -98,23 +96,25 @@ public class FromElementFactory implements SqlTokenTypes {
}
FromElement addFromElement() throws SemanticException {
FromClause parentFromClause = fromClause.getParentFromClause();
final FromClause parentFromClause = fromClause.getParentFromClause();
if ( parentFromClause != null ) {
// Look up class name using the first identifier in the path.
String pathAlias = PathHelper.getAlias( path );
FromElement parentFromElement = parentFromClause.getFromElement( pathAlias );
final String pathAlias = PathHelper.getAlias( path );
final FromElement parentFromElement = parentFromClause.getFromElement( pathAlias );
if ( parentFromElement != null ) {
return createFromElementInSubselect( path, pathAlias, parentFromElement, classAlias );
}
}
EntityPersister entityPersister = fromClause.getSessionFactoryHelper().requireClassPersister( path );
final EntityPersister entityPersister = fromClause.getSessionFactoryHelper().requireClassPersister( path );
FromElement elem = createAndAddFromElement( path,
final FromElement elem = createAndAddFromElement(
path,
classAlias,
entityPersister,
( EntityType ) ( ( Queryable ) entityPersister ).getType(),
null );
(EntityType) ( (Queryable) entityPersister ).getType(),
null
);
// Add to the query spaces.
fromClause.getWalker().addQuerySpaces( entityPersister.getQuerySpaces() );
@ -123,11 +123,12 @@ public class FromElementFactory implements SqlTokenTypes {
}
private FromElement createFromElementInSubselect(
String path,
String pathAlias,
FromElement parentFromElement,
String classAlias) throws SemanticException {
String path,
String pathAlias,
FromElement parentFromElement,
String classAlias) throws SemanticException {
LOG.debugf( "createFromElementInSubselect() : path = %s", path );
// Create an DotNode AST for the path and resolve it.
FromElement fromElement = evaluateFromElementPath( path, classAlias );
EntityPersister entityPersister = fromElement.getEntityPersister();
@ -148,11 +149,12 @@ public class FromElementFactory implements SqlTokenTypes {
if ( fromElement.getFromClause() != fromClause ) {
LOG.debug( "createFromElementInSubselect() : creating a new FROM element..." );
fromElement = createFromElement( entityPersister );
initializeAndAddFromElement( fromElement,
initializeAndAddFromElement(
fromElement,
path,
classAlias,
entityPersister,
( EntityType ) ( ( Queryable ) entityPersister ).getType(),
(EntityType) ( (Queryable) entityPersister ).getType(),
tableAlias
);
}
@ -162,31 +164,36 @@ public class FromElementFactory implements SqlTokenTypes {
private FromElement evaluateFromElementPath(String path, String classAlias) throws SemanticException {
ASTFactory factory = fromClause.getASTFactory();
FromReferenceNode pathNode = ( FromReferenceNode ) PathHelper.parsePath( path, factory );
pathNode.recursiveResolve( FromReferenceNode.ROOT_LEVEL, // This is the root level node.
false, // Generate an explicit from clause at the root.
FromReferenceNode pathNode = (FromReferenceNode) PathHelper.parsePath( path, factory );
pathNode.recursiveResolve(
// This is the root level node.
FromReferenceNode.ROOT_LEVEL,
// Generate an explicit from clause at the root.
false,
classAlias,
null
null
);
if (pathNode.getImpliedJoin() != null) return pathNode.getImpliedJoin();
return pathNode.getFromElement();
if ( pathNode.getImpliedJoin() != null ) {
return pathNode.getImpliedJoin();
}
return pathNode.getFromElement();
}
FromElement createCollectionElementsJoin(
QueryableCollection queryableCollection,
String collectionName) throws SemanticException {
QueryableCollection queryableCollection,
String collectionName) throws SemanticException {
JoinSequence collectionJoinSequence = fromClause.getSessionFactoryHelper()
.createCollectionJoinSequence( queryableCollection, collectionName );
.createCollectionJoinSequence( queryableCollection, collectionName );
this.queryableCollection = queryableCollection;
return createCollectionJoin( collectionJoinSequence, null );
}
public FromElement createCollection(
QueryableCollection queryableCollection,
String role,
JoinType joinType,
boolean fetchFlag,
boolean indexed)
QueryableCollection queryableCollection,
String role,
JoinType joinType,
boolean fetchFlag,
boolean indexed)
throws SemanticException {
if ( !collection ) {
throw new IllegalStateException( "FromElementFactory not initialized for collections!" );
@ -214,14 +221,17 @@ public class FromElementFactory implements SqlTokenTypes {
}
Type elementType = queryableCollection.getElementType();
if ( elementType.isEntityType() ) { // A collection of entities...
if ( elementType.isEntityType() ) {
// A collection of entities...
elem = createEntityAssociation( role, roleAlias, joinType );
}
else if ( elementType.isComponentType() ) { // A collection of components...
else if ( elementType.isComponentType() ) {
// A collection of components...
JoinSequence joinSequence = createJoinSequence( roleAlias, joinType );
elem = createCollectionJoin( joinSequence, roleAlias );
}
else { // A collection of scalar elements...
else {
// A collection of scalar elements...
JoinSequence joinSequence = createJoinSequence( roleAlias, joinType );
elem = createCollectionJoin( joinSequence, roleAlias );
}
@ -234,7 +244,8 @@ public class FromElementFactory implements SqlTokenTypes {
}
if ( explicitSubqueryFromElement ) {
elem.setInProjectionList( true ); // Treat explict from elements in sub-queries properly.
// Treat explict from elements in sub-queries properly.
elem.setInProjectionList( true );
}
if ( fetchFlag ) {
@ -244,13 +255,13 @@ public class FromElementFactory implements SqlTokenTypes {
}
public FromElement createEntityJoin(
String entityClass,
String tableAlias,
JoinSequence joinSequence,
boolean fetchFlag,
boolean inFrom,
EntityType type,
String role,
String entityClass,
String tableAlias,
JoinSequence joinSequence,
boolean fetchFlag,
boolean inFrom,
EntityType type,
String role,
String joinPath) throws SemanticException {
FromElement elem = createJoin( entityClass, tableAlias, joinSequence, type, false );
elem.setFetch( fetchFlag );
@ -280,14 +291,14 @@ public class FromElementFactory implements SqlTokenTypes {
// and 'elem' represents an implicit join
if ( elem.getFromClause() != elem.getOrigin().getFromClause() ||
// ( implied && DotNode.useThetaStyleImplicitJoins ) ) {
DotNode.useThetaStyleImplicitJoins ) {
DotNode.useThetaStyleImplicitJoins ) {
// the "root from-element" in correlated subqueries do need this piece
elem.setType( FROM_FRAGMENT );
joinSequence.setUseThetaStyle( true );
elem.setUseFromFragment( false );
}
}
elem.setRole( role );
return elem;
@ -322,9 +333,9 @@ public class FromElementFactory implements SqlTokenTypes {
associatedEntityName,
classAlias,
targetEntityPersister,
( EntityType ) queryableCollection.getElementType(),
(EntityType) queryableCollection.getElementType(),
tableAlias
);
);
// If the join is implied, then don't include sub-classes on the element.
if ( implied ) {
destination.setIncludeSubclasses( false );
@ -343,59 +354,81 @@ public class FromElementFactory implements SqlTokenTypes {
// Create the join element under the from element.
JoinType joinType = JoinType.INNER_JOIN;
JoinSequence joinSequence = sfh.createJoinSequence( implied, elementAssociationType, tableAlias, joinType, targetColumns );
JoinSequence joinSequence = sfh.createJoinSequence(
implied,
elementAssociationType,
tableAlias,
joinType,
targetColumns
);
elem = initializeJoin( path, destination, joinSequence, targetColumns, origin, false );
elem.setUseFromFragment( true ); // The associated entity is implied, but it must be included in the FROM.
elem.setCollectionTableAlias( roleAlias ); // The collection alias is the role.
elem.setUseFromFragment( true ); // The associated entity is implied, but it must be included in the FROM.
elem.setCollectionTableAlias( roleAlias ); // The collection alias is the role.
return elem;
}
private FromElement createCollectionJoin(JoinSequence collectionJoinSequence, String tableAlias) throws SemanticException {
private FromElement createCollectionJoin(JoinSequence collectionJoinSequence, String tableAlias)
throws SemanticException {
String text = queryableCollection.getTableName();
AST ast = createFromElement( text );
FromElement destination = ( FromElement ) ast;
FromElement destination = (FromElement) ast;
Type elementType = queryableCollection.getElementType();
if ( elementType.isCollectionType() ) {
throw new SemanticException( "Collections of collections are not supported!" );
}
destination.initializeCollection( fromClause, classAlias, tableAlias );
destination.setType( JOIN_FRAGMENT ); // Tag this node as a JOIN.
destination.setIncludeSubclasses( false ); // Don't include subclasses in the join.
destination.setCollectionJoin( true ); // This is a clollection join.
destination.setType( JOIN_FRAGMENT ); // Tag this node as a JOIN.
destination.setIncludeSubclasses( false ); // Don't include subclasses in the join.
destination.setCollectionJoin( true ); // This is a clollection join.
destination.setJoinSequence( collectionJoinSequence );
destination.setOrigin( origin, false );
destination.setCollectionTableAlias(tableAlias);
destination.setCollectionTableAlias( tableAlias );
// origin.addDestination( destination );
// This was the cause of HHH-242
// origin.setType( FROM_FRAGMENT ); // Set the parent node type so that the AST is properly formed.
origin.setText( "" ); // The destination node will have all the FROM text.
origin.setCollectionJoin( true ); // The parent node is a collection join too (voodoo - see JoinProcessor)
origin.setText( "" ); // The destination node will have all the FROM text.
origin.setCollectionJoin( true ); // The parent node is a collection join too (voodoo - see JoinProcessor)
fromClause.addCollectionJoinFromElementByPath( path, destination );
fromClause.getWalker().addQuerySpaces( queryableCollection.getCollectionSpaces() );
return destination;
}
private FromElement createEntityAssociation(
String role,
String roleAlias,
JoinType joinType) throws SemanticException {
String role,
String roleAlias,
JoinType joinType) throws SemanticException {
FromElement elem;
Queryable entityPersister = ( Queryable ) queryableCollection.getElementPersister();
Queryable entityPersister = (Queryable) queryableCollection.getElementPersister();
String associatedEntityName = entityPersister.getEntityName();
// Get the class name of the associated entity.
if ( queryableCollection.isOneToMany() ) {
LOG.debugf( "createEntityAssociation() : One to many - path = %s role = %s associatedEntityName = %s",
LOG.debugf(
"createEntityAssociation() : One to many - path = %s role = %s associatedEntityName = %s",
path,
role,
associatedEntityName );
associatedEntityName
);
JoinSequence joinSequence = createJoinSequence( roleAlias, joinType );
elem = createJoin( associatedEntityName, roleAlias, joinSequence, ( EntityType ) queryableCollection.getElementType(), false );
elem = createJoin(
associatedEntityName,
roleAlias,
joinSequence,
(EntityType) queryableCollection.getElementType(),
false
);
}
else {
LOG.debugf( "createManyToMany() : path = %s role = %s associatedEntityName = %s", path, role, associatedEntityName );
elem = createManyToMany( role, associatedEntityName,
roleAlias, entityPersister, ( EntityType ) queryableCollection.getElementType(), joinType );
LOG.debugf(
"createManyToMany() : path = %s role = %s associatedEntityName = %s",
path,
role,
associatedEntityName
);
elem = createManyToMany(
role, associatedEntityName,
roleAlias, entityPersister, (EntityType) queryableCollection.getElementType(), joinType
);
fromClause.getWalker().addQuerySpaces( queryableCollection.getCollectionSpaces() );
}
elem.setCollectionTableAlias( roleAlias );
@ -403,28 +436,30 @@ public class FromElementFactory implements SqlTokenTypes {
}
private FromElement createJoin(
String entityClass,
String tableAlias,
JoinSequence joinSequence,
EntityType type,
boolean manyToMany) throws SemanticException {
String entityClass,
String tableAlias,
JoinSequence joinSequence,
EntityType type,
boolean manyToMany) throws SemanticException {
// origin, path, implied, columns, classAlias,
EntityPersister entityPersister = fromClause.getSessionFactoryHelper().requireClassPersister( entityClass );
FromElement destination = createAndAddFromElement( entityClass,
FromElement destination = createAndAddFromElement(
entityClass,
classAlias,
entityPersister,
type,
tableAlias );
tableAlias
);
return initializeJoin( path, destination, joinSequence, getColumns(), origin, manyToMany );
}
private FromElement createManyToMany(
String role,
String associatedEntityName,
String roleAlias,
Queryable entityPersister,
EntityType type,
JoinType joinType) throws SemanticException {
String role,
String associatedEntityName,
String roleAlias,
Queryable entityPersister,
EntityType type,
JoinType joinType) throws SemanticException {
FromElement elem;
SessionFactoryHelper sfh = fromClause.getSessionFactoryHelper();
if ( inElementsFunction /*implied*/ ) {
@ -440,7 +475,12 @@ public class FromElementFactory implements SqlTokenTypes {
String[] secondJoinColumns = sfh.getCollectionElementColumns( role, roleAlias );
// Add the second join, the one that ends in the destination table.
JoinSequence joinSequence = createJoinSequence( roleAlias, joinType );
joinSequence.addJoin( sfh.getElementAssociationType( collectionType ), tableAlias, joinType, secondJoinColumns );
joinSequence.addJoin(
sfh.getElementAssociationType( collectionType ),
tableAlias,
joinType,
secondJoinColumns
);
elem = createJoin( associatedEntityName, tableAlias, joinSequence, type, false );
elem.setUseFromFragment( true );
}
@ -457,11 +497,11 @@ public class FromElementFactory implements SqlTokenTypes {
}
private FromElement createAndAddFromElement(
String className,
String classAlias,
EntityPersister entityPersister,
EntityType type,
String tableAlias) {
String className,
String classAlias,
EntityPersister entityPersister,
EntityType type,
String tableAlias) {
if ( !( entityPersister instanceof Joinable ) ) {
throw new IllegalArgumentException( "EntityPersister " + entityPersister + " does not implement Joinable!" );
}
@ -471,12 +511,12 @@ public class FromElementFactory implements SqlTokenTypes {
}
private void initializeAndAddFromElement(
FromElement element,
String className,
String classAlias,
EntityPersister entityPersister,
EntityType type,
String tableAlias) {
FromElement element,
String className,
String classAlias,
EntityPersister entityPersister,
EntityType type,
String tableAlias) {
if ( tableAlias == null ) {
AliasGenerator aliasGenerator = fromClause.getAliasGenerator();
tableAlias = aliasGenerator.createName( entityPersister.getEntityName() );
@ -485,17 +525,19 @@ public class FromElementFactory implements SqlTokenTypes {
}
private FromElement createFromElement(EntityPersister entityPersister) {
Joinable joinable = ( Joinable ) entityPersister;
Joinable joinable = (Joinable) entityPersister;
String text = joinable.getTableName();
AST ast = createFromElement( text );
FromElement element = ( FromElement ) ast;
FromElement element = (FromElement) ast;
return element;
}
private AST createFromElement(String text) {
AST ast = ASTUtil.create( fromClause.getASTFactory(),
AST ast = ASTUtil.create(
fromClause.getASTFactory(),
implied ? IMPLIED_FROM : FROM_FRAGMENT, // This causes the factory to instantiate the desired class.
text );
text
);
// Reset the node type, because the rest of the system is expecting FROM_FRAGMENT, all we wanted was
// for the factory to create the right sub-class. This might get reset again later on anyway to make the
// SQL generation simpler.
@ -504,12 +546,12 @@ public class FromElementFactory implements SqlTokenTypes {
}
private FromElement initializeJoin(
String path,
FromElement destination,
JoinSequence joinSequence,
String[] columns,
FromElement origin,
boolean manyToMany) {
String path,
FromElement destination,
JoinSequence joinSequence,
String[] columns,
FromElement origin,
boolean manyToMany) {
destination.setType( JOIN_FRAGMENT );
destination.setJoinSequence( joinSequence );
destination.setColumns( columns );

View File

@ -36,6 +36,7 @@ import org.hibernate.hql.internal.CollectionProperties;
import org.hibernate.hql.internal.CollectionSubqueryFactory;
import org.hibernate.hql.internal.NameGenerator;
import org.hibernate.hql.internal.antlr.HqlSqlTokenTypes;
import org.hibernate.internal.CoreLogging;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.collections.ArrayHelper;
import org.hibernate.param.ParameterSpecification;
@ -49,8 +50,6 @@ import org.hibernate.persister.entity.Queryable;
import org.hibernate.type.EntityType;
import org.hibernate.type.Type;
import org.jboss.logging.Logger;
import antlr.SemanticException;
/**
@ -59,8 +58,7 @@ import antlr.SemanticException;
* @author josh
*/
class FromElementType {
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, FromElementType.class.getName());
private static final CoreMessageLogger LOG = CoreLogging.messageLogger( FromElementType.class );
private FromElement fromElement;
private EntityType entityType;
@ -76,7 +74,7 @@ class FromElementType {
this.persister = persister;
this.entityType = entityType;
if ( persister != null ) {
fromElement.setText( ( ( Queryable ) persister ).getTableName() + " " + getTableAlias() );
fromElement.setText( ( (Queryable) persister ).getTableName() + " " + getTableAlias() );
}
}
@ -117,7 +115,9 @@ class FromElementType {
}
public Type getSelectType() {
if (entityType==null) return null;
if ( entityType == null ) {
return null;
}
boolean shallow = fromElement.getFromClause().getWalker().isShallowQuery();
return fromElement.getSessionFactoryHelper()
.getFactory()
@ -131,18 +131,22 @@ class FromElementType {
* @return the Hibernate queryable implementation for the HQL class.
*/
public Queryable getQueryable() {
return ( persister instanceof Queryable ) ? ( Queryable ) persister : null;
return ( persister instanceof Queryable ) ? (Queryable) persister : null;
}
/**
* Render the identifier select, but in a 'scalar' context (i.e. generate the column alias).
*
* @param i the sequence of the returned type
*
* @return the identifier select with the column alias.
*/
String renderScalarIdentifierSelect(int i) {
checkInitialized();
String[] cols = getPropertyMapping( EntityPersister.ENTITY_ID ).toColumns( getTableAlias(), EntityPersister.ENTITY_ID );
String[] cols = getPropertyMapping( EntityPersister.ENTITY_ID ).toColumns(
getTableAlias(),
EntityPersister.ENTITY_ID
);
StringBuilder buf = new StringBuilder();
// For property references generate <tablealias>.<columnname> as <projectionalias>
for ( int j = 0; j < cols.length; j++ ) {
@ -159,7 +163,8 @@ class FromElementType {
* Returns the identifier select SQL fragment.
*
* @param size The total number of returned types.
* @param k The sequence of the current returned type.
* @param k The sequence of the current returned type.
*
* @return the identifier select SQL fragment.
*/
String renderIdentifierSelect(int size, int k) {
@ -168,19 +173,26 @@ class FromElementType {
if ( fromElement.getFromClause().isSubQuery() ) {
// TODO: Replace this with a more elegant solution.
String[] idColumnNames = ( persister != null ) ?
( ( Queryable ) persister ).getIdentifierColumnNames() : new String[0];
( (Queryable) persister ).getIdentifierColumnNames() : new String[0];
StringBuilder buf = new StringBuilder();
for ( int i = 0; i < idColumnNames.length; i++ ) {
buf.append( fromElement.getTableAlias() ).append( '.' ).append( idColumnNames[i] );
if ( i != idColumnNames.length - 1 ) buf.append( ", " );
if ( i != idColumnNames.length - 1 ) {
buf.append( ", " );
}
}
return buf.toString();
}
else {
if (persister==null) {
if ( persister == null ) {
throw new QueryException( "not an entity" );
}
String fragment = ( ( Queryable ) persister ).identifierSelectFragment( getTableAlias(), getSuffix( size, k ) );
String fragment = ( (Queryable) persister ).identifierSelectFragment(
getTableAlias(), getSuffix(
size,
k
)
);
return trimLeadingCommaAndSpaces( fragment );
}
}
@ -190,8 +202,7 @@ class FromElementType {
}
private static String generateSuffix(int size, int k) {
String suffix = size == 1 ? "" : Integer.toString( k ) + '_';
return suffix;
return size == 1 ? "" : Integer.toString( k ) + '_';
}
private void checkInitialized() {
@ -200,8 +211,10 @@ class FromElementType {
/**
* Returns the property select SQL fragment.
*
* @param size The total number of returned types.
* @param k The sequence of the current returned type.
* @param k The sequence of the current returned type.
*
* @return the property select SQL fragment.
*/
String renderPropertySelect(int size, int k, boolean allProperties) {
@ -210,7 +223,7 @@ class FromElementType {
return "";
}
else {
String fragment = ( ( Queryable ) persister ).propertySelectFragment(
String fragment = ( (Queryable) persister ).propertySelectFragment(
getTableAlias(),
getSuffix( size, k ),
allProperties
@ -240,7 +253,7 @@ class FromElementType {
if ( collectionSuffix == null ) {
collectionSuffix = generateSuffix( size, k );
}
String fragment = queryableCollection.selectFragment( getTableAlias(), collectionSuffix );
String fragment = queryableCollection.selectFragment( getTableAlias(), collectionSuffix );
return trimLeadingCommaAndSpaces( fragment );
}
}
@ -250,6 +263,7 @@ class FromElementType {
* SQL fragment. :-P
*
* @param fragment An SQL fragment.
*
* @return The fragment, without the leading comma and spaces.
*/
private static String trimLeadingCommaAndSpaces(String fragment) {
@ -273,8 +287,11 @@ class FromElementType {
// Class names in the FROM clause result in a JoinSequence (the old FromParser does this).
if ( persister instanceof Joinable ) {
Joinable joinable = ( Joinable ) persister;
final JoinSequence joinSequence = fromElement.getSessionFactoryHelper().createJoinSequence().setRoot( joinable, getTableAlias() );
Joinable joinable = (Joinable) persister;
final JoinSequence joinSequence = fromElement.getSessionFactoryHelper().createJoinSequence().setRoot(
joinable,
getTableAlias()
);
joinSequence.applyTreatAsDeclarations( treatAsDeclarations );
return joinSequence;
}
@ -294,7 +311,9 @@ class FromElementType {
for ( String treatAsSubclassName : treatAsDeclarations ) {
try {
EntityPersister subclassPersister = fromElement.getSessionFactoryHelper().requireClassPersister( treatAsSubclassName );
EntityPersister subclassPersister = fromElement.getSessionFactoryHelper().requireClassPersister(
treatAsSubclassName
);
this.treatAsDeclarations.add( subclassPersister.getEntityName() );
}
catch (SemanticException e) {
@ -327,7 +346,9 @@ class FromElementType {
* Returns the type of a property, given it's name (the last part) and the full path.
*
* @param propertyName The last part of the full path to the property.
*
* @return The type.
*
* @0param getPropertyPath The full property path.
*/
public Type getPropertyType(String propertyName, String propertyPath) {
@ -341,14 +362,18 @@ class FromElementType {
if ( persister != null && propertyName.equals( propertyPath ) && propertyName.equals( persister.getIdentifierPropertyName() ) ) {
type = persister.getIdentifierType();
}
else { // Otherwise, use the property mapping.
else { // Otherwise, use the property mapping.
PropertyMapping mapping = getPropertyMapping( propertyName );
type = mapping.toType( propertyPath );
}
if ( type == null ) {
throw new MappingException( "Property " + propertyName + " does not exist in " +
( ( queryableCollection == null ) ? "class" : "collection" ) + " "
+ ( ( queryableCollection == null ) ? fromElement.getClassName() : queryableCollection.getRole() ) );
throw new MappingException(
"Property " + propertyName + " does not exist in " +
( ( queryableCollection == null ) ? "class" : "collection" ) + " "
+ ( ( queryableCollection == null ) ?
fromElement.getClassName() :
queryableCollection.getRole() )
);
}
return type;
}
@ -378,43 +403,43 @@ class FromElementType {
propertyMapping.toColumns( tableAlias, path )
);
LOG.debugf( "toColumns(%s,%s) : subquery = %s", tableAlias, path, subquery );
return new String[]{"(" + subquery + ")"};
return new String[] {"(" + subquery + ")"};
}
if (forceAlias) {
return propertyMapping.toColumns(tableAlias, path);
}
if ( forceAlias ) {
return propertyMapping.toColumns( tableAlias, path );
}
if (fromElement.getWalker().getStatementType() == HqlSqlTokenTypes.SELECT) {
return propertyMapping.toColumns(tableAlias, path);
}
if ( fromElement.getWalker().getStatementType() == HqlSqlTokenTypes.SELECT ) {
return propertyMapping.toColumns( tableAlias, path );
}
if (fromElement.getWalker().isSubQuery()) {
// for a subquery, the alias to use depends on a few things (we
// already know this is not an overall SELECT):
// 1) if this FROM_ELEMENT represents a correlation to the
// outer-most query
// A) if the outer query represents a multi-table
// persister, we need to use the given alias
// in anticipation of one of the multi-table
// executors being used (as this subquery will
// actually be used in the "id select" phase
// of that multi-table executor)
// B) otherwise, we need to use the persister's
// table name as the column qualification
// 2) otherwise (not correlated), use the given alias
if (isCorrelation()) {
if (isMultiTable()) {
return propertyMapping.toColumns(tableAlias, path);
if ( fromElement.getWalker().isSubQuery() ) {
// for a subquery, the alias to use depends on a few things (we
// already know this is not an overall SELECT):
// 1) if this FROM_ELEMENT represents a correlation to the
// outer-most query
// A) if the outer query represents a multi-table
// persister, we need to use the given alias
// in anticipation of one of the multi-table
// executors being used (as this subquery will
// actually be used in the "id select" phase
// of that multi-table executor)
// B) otherwise, we need to use the persister's
// table name as the column qualification
// 2) otherwise (not correlated), use the given alias
if ( isCorrelation() ) {
if ( isMultiTable() ) {
return propertyMapping.toColumns( tableAlias, path );
}
return propertyMapping.toColumns(extractTableName(), path);
return propertyMapping.toColumns( extractTableName(), path );
}
return propertyMapping.toColumns(tableAlias, path);
}
return propertyMapping.toColumns( tableAlias, path );
}
if (fromElement.getWalker().getCurrentTopLevelClauseType() == HqlSqlTokenTypes.SELECT) {
return propertyMapping.toColumns(tableAlias, path);
}
if ( fromElement.getWalker().getCurrentTopLevelClauseType() == HqlSqlTokenTypes.SELECT ) {
return propertyMapping.toColumns( tableAlias, path );
}
if ( isManipulationQuery() && isMultiTable() && inWhereClause() ) {
// the actual where-clause will end up being ripped out the update/delete and used in
@ -431,13 +456,13 @@ class FromElementType {
private boolean isCorrelation() {
FromClause top = fromElement.getWalker().getFinalFromClause();
return fromElement.getFromClause() != fromElement.getWalker().getCurrentFromClause() &&
fromElement.getFromClause() == top;
fromElement.getFromClause() == top;
}
private boolean isMultiTable() {
// should be safe to only ever expect EntityPersister references here
return fromElement.getQueryable() != null &&
fromElement.getQueryable().isMultiTable();
fromElement.getQueryable().isMultiTable();
}
private String extractTableName() {
@ -462,8 +487,8 @@ class FromElementType {
PropertyMapping getPropertyMapping(String propertyName) {
checkInitialized();
if ( queryableCollection == null ) { // Not a collection?
return ( PropertyMapping ) persister; // Return the entity property mapping.
if ( queryableCollection == null ) { // Not a collection?
return (PropertyMapping) persister; // Return the entity property mapping.
}
// indexed, many-to-many collections must be treated specially here if the property to
@ -493,7 +518,7 @@ class FromElementType {
if ( queryableCollection.getElementType().isComponentType() ) {
// Collection of components.
if ( propertyName.equals( EntityPersister.ENTITY_ID ) ) {
return ( PropertyMapping ) queryableCollection.getOwnerEntityPersister();
return (PropertyMapping) queryableCollection.getOwnerEntityPersister();
}
}
return queryableCollection;
@ -524,7 +549,7 @@ class FromElementType {
}
private void validate(String propertyName) {
if ( ! ( CollectionPropertyNames.COLLECTION_INDEX.equals( propertyName )
if ( !( CollectionPropertyNames.COLLECTION_INDEX.equals( propertyName )
|| CollectionPropertyNames.COLLECTION_MAX_INDEX.equals( propertyName )
|| CollectionPropertyNames.COLLECTION_MIN_INDEX.equals( propertyName ) ) ) {
throw new IllegalArgumentException( "Expecting index-related function call" );
@ -550,13 +575,13 @@ class FromElementType {
if ( cols.length != 1 ) {
throw new QueryException( "composite collection index in minIndex()" );
}
return new String[] { "min(" + cols[0] + ')' };
return new String[] {"min(" + cols[0] + ')'};
}
else {
if ( cols.length != 1 ) {
throw new QueryException( "composite collection index in maxIndex()" );
}
return new String[] { "max(" + cols[0] + ')' };
return new String[] {"max(" + cols[0] + ')'};
}
}

View File

@ -36,7 +36,7 @@ import antlr.collections.AST;
* @author josh
*/
public abstract class FromReferenceNode extends AbstractSelectExpression
implements ResolvableNode, DisplayableNode, InitializeableNode, PathNode {
implements ResolvableNode, DisplayableNode, InitializeableNode, PathNode {
private static final CoreMessageLogger LOG = CoreLogging.messageLogger( FromReferenceNode.class );
@ -46,7 +46,7 @@ public abstract class FromReferenceNode extends AbstractSelectExpression
public static final int ROOT_LEVEL = 0;
@Override
public FromElement getFromElement() {
public FromElement getFromElement() {
return fromElement;
}
@ -62,6 +62,7 @@ public abstract class FromReferenceNode extends AbstractSelectExpression
public void resolveFirstChild() throws SemanticException {
}
@Override
public String getPath() {
return getOriginalText();
}
@ -77,6 +78,7 @@ public abstract class FromReferenceNode extends AbstractSelectExpression
}
}
@Override
public String getDisplayText() {
StringBuilder buf = new StringBuilder();
buf.append( "{" ).append( ( fromElement == null ) ? "no fromElement" : fromElement.getDisplayText() );
@ -88,11 +90,12 @@ public abstract class FromReferenceNode extends AbstractSelectExpression
recursiveResolve( level, impliedAtRoot, classAlias, this );
}
public void recursiveResolve(int level, boolean impliedAtRoot, String classAlias, AST parent) throws SemanticException {
public void recursiveResolve(int level, boolean impliedAtRoot, String classAlias, AST parent)
throws SemanticException {
AST lhs = getFirstChild();
int nextLevel = level + 1;
if ( lhs != null ) {
FromReferenceNode n = ( FromReferenceNode ) lhs;
FromReferenceNode n = (FromReferenceNode) lhs;
n.recursiveResolve( nextLevel, impliedAtRoot, null, this );
}
resolveFirstChild();
@ -104,18 +107,21 @@ public abstract class FromReferenceNode extends AbstractSelectExpression
}
@Override
public boolean isReturnableEntity() throws SemanticException {
public boolean isReturnableEntity() throws SemanticException {
return !isScalar() && fromElement.isEntity();
}
@Override
public void resolveInFunctionCall(boolean generateJoin, boolean implicitJoin) throws SemanticException {
resolve( generateJoin, implicitJoin );
}
@Override
public void resolve(boolean generateJoin, boolean implicitJoin) throws SemanticException {
resolve( generateJoin, implicitJoin, null );
}
@Override
public void resolve(boolean generateJoin, boolean implicitJoin, String classAlias) throws SemanticException {
resolve( generateJoin, implicitJoin, classAlias, null );
}

View File

@ -42,7 +42,7 @@ public class HqlSqlWalkerNode extends SqlNode implements InitializeableNode {
private HqlSqlWalker walker;
public void initialize(Object param) {
walker = ( HqlSqlWalker ) param;
walker = (HqlSqlWalker) param;
}
public HqlSqlWalker getWalker() {

View File

@ -40,17 +40,18 @@ import antlr.collections.AST;
* @author Steve Ebersole
*/
public class InLogicOperatorNode extends BinaryLogicOperatorNode implements BinaryOperatorNode {
public Node getInList() {
return getRightHandOperand();
}
@Override
public void initialize() throws SemanticException {
Node lhs = getLeftHandOperand();
final Node lhs = getLeftHandOperand();
if ( lhs == null ) {
throw new SemanticException( "left-hand operand of in operator was null" );
}
Node inList = getInList();
final Node inList = getInList();
if ( inList == null ) {
throw new SemanticException( "right-hand operand of in operator was null" );
}
@ -59,140 +60,153 @@ public class InLogicOperatorNode extends BinaryLogicOperatorNode implements Bina
// some form of property ref and that the children of the in-list represent
// one-or-more params.
if ( SqlNode.class.isAssignableFrom( lhs.getClass() ) ) {
Type lhsType = ( ( SqlNode ) lhs ).getDataType();
Type lhsType = ( (SqlNode) lhs ).getDataType();
AST inListChild = inList.getFirstChild();
while ( inListChild != null ) {
if ( ExpectedTypeAwareNode.class.isAssignableFrom( inListChild.getClass() ) ) {
( ( ExpectedTypeAwareNode ) inListChild ).setExpectedType( lhsType );
( (ExpectedTypeAwareNode) inListChild ).setExpectedType( lhsType );
}
inListChild = inListChild.getNextSibling();
}
}
SessionFactoryImplementor sessionFactory = getSessionFactoryHelper().getFactory();
if ( sessionFactory.getDialect().supportsRowValueConstructorSyntaxInInList() )
final SessionFactoryImplementor sessionFactory = getSessionFactoryHelper().getFactory();
if ( sessionFactory.getDialect().supportsRowValueConstructorSyntaxInInList() ) {
return;
Type lhsType = extractDataType( lhs );
if ( lhsType == null )
return;
int lhsColumnSpan = lhsType.getColumnSpan( sessionFactory );
Node rhsNode = (Node) inList.getFirstChild();
if ( !isNodeAcceptable( rhsNode ) )
return;
int rhsColumnSpan = 0;
if ( rhsNode == null ) {
return; // early exit for empty IN list
} else if ( rhsNode.getType() == HqlTokenTypes.VECTOR_EXPR ) {
rhsColumnSpan = rhsNode.getNumberOfChildren();
} else {
Type rhsType = extractDataType( rhsNode );
if ( rhsType == null )
return;
rhsColumnSpan = rhsType.getColumnSpan( sessionFactory );
}
}
final Type lhsType = extractDataType( lhs );
if ( lhsType == null ) {
return;
}
final int lhsColumnSpan = lhsType.getColumnSpan( sessionFactory );
final Node rhsNode = (Node) inList.getFirstChild();
if ( !isNodeAcceptable( rhsNode ) ) {
return;
}
int rhsColumnSpan;
if ( rhsNode == null ) {
// early exit for empty IN list
return;
}
else if ( rhsNode.getType() == HqlTokenTypes.VECTOR_EXPR ) {
rhsColumnSpan = rhsNode.getNumberOfChildren();
}
else {
final Type rhsType = extractDataType( rhsNode );
if ( rhsType == null ) {
return;
}
rhsColumnSpan = rhsType.getColumnSpan( sessionFactory );
}
if ( lhsColumnSpan > 1 && rhsColumnSpan > 1 ) {
mutateRowValueConstructorSyntaxInInListSyntax( lhsColumnSpan, rhsColumnSpan );
}
}
/**
* this is possible for parameter lists and explicit lists. It is completely unreasonable for sub-queries.
*/
private boolean isNodeAcceptable( Node rhsNode ) {
return rhsNode == null /* empty IN list */ || rhsNode instanceof LiteralNode
|| rhsNode instanceof ParameterNode
|| rhsNode.getType() == HqlTokenTypes.VECTOR_EXPR;
}
/**
* Mutate the subtree relating to a row-value-constructor in "in" list to instead use
* a series of ORen and ANDed predicates. This allows multi-column type comparisons
* and explicit row-value-constructor in "in" list syntax even on databases which do
* not support row-value-constructor in "in" list.
* <p/>
* For example, here we'd mutate "... where (col1, col2) in ( ('val1', 'val2'), ('val3', 'val4') ) ..." to
* "... where (col1 = 'val1' and col2 = 'val2') or (col1 = 'val3' and val2 = 'val4') ..."
*
* @param lhsColumnSpan The number of elements in the row value constructor list.
*/
private void mutateRowValueConstructorSyntaxInInListSyntax(
int lhsColumnSpan, int rhsColumnSpan ) {
String[] lhsElementTexts = extractMutationTexts( getLeftHandOperand(),
lhsColumnSpan );
Node rhsNode = (Node) getInList().getFirstChild();
private boolean isNodeAcceptable(Node rhsNode) {
return rhsNode == null /* empty IN list */ || rhsNode instanceof LiteralNode
|| rhsNode instanceof ParameterNode
|| rhsNode.getType() == HqlTokenTypes.VECTOR_EXPR;
}
ParameterSpecification lhsEmbeddedCompositeParameterSpecification = getLeftHandOperand() == null
|| ( !ParameterNode.class.isInstance( getLeftHandOperand() ) ) ? null
: ( (ParameterNode) getLeftHandOperand() )
.getHqlParameterSpecification();
/**
* Mutate the subtree relating to a row-value-constructor in "in" list to instead use
* a series of ORen and ANDed predicates. This allows multi-column type comparisons
* and explicit row-value-constructor in "in" list syntax even on databases which do
* not support row-value-constructor in "in" list.
* <p/>
* For example, here we'd mutate "... where (col1, col2) in ( ('val1', 'val2'), ('val3', 'val4') ) ..." to
* "... where (col1 = 'val1' and col2 = 'val2') or (col1 = 'val3' and val2 = 'val4') ..."
*
* @param lhsColumnSpan The number of elements in the row value constructor list.
*/
private void mutateRowValueConstructorSyntaxInInListSyntax(
int lhsColumnSpan, int rhsColumnSpan) {
String[] lhsElementTexts = extractMutationTexts(
getLeftHandOperand(),
lhsColumnSpan
);
Node rhsNode = (Node) getInList().getFirstChild();
ParameterSpecification lhsEmbeddedCompositeParameterSpecification = getLeftHandOperand() == null
|| ( !ParameterNode.class.isInstance( getLeftHandOperand() ) ) ? null
: ( (ParameterNode) getLeftHandOperand() )
.getHqlParameterSpecification();
final boolean negated = getType() == HqlSqlTokenTypes.NOT_IN;
if ( rhsNode != null && rhsNode.getNextSibling() == null ) {
if ( rhsNode != null && rhsNode.getNextSibling() == null ) {
/**
* only one element in the vector grouping.
* <code> where (a,b) in ( (1,2) ) </code> this will be mutated to
* <code>where a=1 and b=2 </code>
*/
String[] rhsElementTexts = extractMutationTexts( rhsNode, rhsColumnSpan );
setType( negated ? HqlTokenTypes.OR : HqlSqlTokenTypes.AND );
setText( negated ? "or" : "and" );
ParameterSpecification rhsEmbeddedCompositeParameterSpecification =
setType( negated ? HqlTokenTypes.OR : HqlSqlTokenTypes.AND );
setText( negated ? "or" : "and" );
ParameterSpecification rhsEmbeddedCompositeParameterSpecification =
rhsNode == null || ( !ParameterNode.class.isInstance( rhsNode ) )
? null
: ( (ParameterNode) rhsNode ).getHqlParameterSpecification();
translate(
translate(
lhsColumnSpan,
negated ? HqlSqlTokenTypes.NE : HqlSqlTokenTypes.EQ,
negated ? "<>" : "=",
lhsElementTexts,
rhsElementTexts,
lhsEmbeddedCompositeParameterSpecification,
rhsEmbeddedCompositeParameterSpecification,
rhsElementTexts,
lhsEmbeddedCompositeParameterSpecification,
rhsEmbeddedCompositeParameterSpecification,
this
);
}
}
else {
List andElementsNodeList = new ArrayList();
while ( rhsNode != null ) {
String[] rhsElementTexts = extractMutationTexts( rhsNode, rhsColumnSpan );
AST group = getASTFactory().create(
List andElementsNodeList = new ArrayList();
while ( rhsNode != null ) {
String[] rhsElementTexts = extractMutationTexts( rhsNode, rhsColumnSpan );
AST group = getASTFactory().create(
negated ? HqlSqlTokenTypes.OR : HqlSqlTokenTypes.AND,
negated ? "or" : "and"
);
ParameterSpecification rhsEmbeddedCompositeParameterSpecification =
ParameterSpecification rhsEmbeddedCompositeParameterSpecification =
rhsNode == null || ( !ParameterNode.class.isInstance( rhsNode ) )
? null
: ( (ParameterNode) rhsNode ).getHqlParameterSpecification();
translate(
translate(
lhsColumnSpan,
negated ? HqlSqlTokenTypes.NE : HqlSqlTokenTypes.EQ,
negated ? "<>" : "=",
lhsElementTexts,
lhsElementTexts,
rhsElementTexts,
lhsEmbeddedCompositeParameterSpecification,
rhsEmbeddedCompositeParameterSpecification,
lhsEmbeddedCompositeParameterSpecification,
rhsEmbeddedCompositeParameterSpecification,
group
);
andElementsNodeList.add( group );
rhsNode = (Node) rhsNode.getNextSibling();
}
setType( negated ? HqlSqlTokenTypes.AND : HqlSqlTokenTypes.OR );
setText( negated ? "and" : "or" );
AST curNode = this;
for ( int i = andElementsNodeList.size() - 1; i > 1; i-- ) {
AST group = getASTFactory().create(
andElementsNodeList.add( group );
rhsNode = (Node) rhsNode.getNextSibling();
}
setType( negated ? HqlSqlTokenTypes.AND : HqlSqlTokenTypes.OR );
setText( negated ? "and" : "or" );
AST curNode = this;
for ( int i = andElementsNodeList.size() - 1; i > 1; i-- ) {
AST group = getASTFactory().create(
negated ? HqlSqlTokenTypes.AND : HqlSqlTokenTypes.OR,
negated ? "and" : "or"
);
curNode.setFirstChild( group );
curNode = group;
AST and = (AST) andElementsNodeList.get( i );
group.setNextSibling( and );
}
AST node0 = (AST) andElementsNodeList.get( 0 );
AST node1 = (AST) andElementsNodeList.get( 1 );
node0.setNextSibling( node1 );
curNode.setFirstChild( node0 );
}
}
curNode.setFirstChild( group );
curNode = group;
AST and = (AST) andElementsNodeList.get( i );
group.setNextSibling( and );
}
AST node0 = (AST) andElementsNodeList.get( 0 );
AST node1 = (AST) andElementsNodeList.get( 1 );
node0.setNextSibling( node1 );
curNode.setFirstChild( node0 );
}
}
}

View File

@ -53,47 +53,50 @@ import antlr.collections.AST;
public class IndexNode extends FromReferenceNode {
private static final CoreMessageLogger LOG = CoreLogging.messageLogger( IndexNode.class );
@Override
public void setScalarColumnText(int i) throws SemanticException {
throw new UnsupportedOperationException( "An IndexNode cannot generate column text!" );
}
@Override
public void prepareForDot(String propertyName) throws SemanticException {
public void prepareForDot(String propertyName) throws SemanticException {
FromElement fromElement = getFromElement();
if ( fromElement == null ) {
throw new IllegalStateException( "No FROM element for index operator!" );
}
QueryableCollection queryableCollection = fromElement.getQueryableCollection();
if ( queryableCollection != null && !queryableCollection.isOneToMany() ) {
FromReferenceNode collectionNode = ( FromReferenceNode ) getFirstChild();
String path = collectionNode.getPath() + "[]." + propertyName;
final QueryableCollection queryableCollection = fromElement.getQueryableCollection();
if ( queryableCollection != null && !queryableCollection.isOneToMany() ) {
final FromReferenceNode collectionNode = (FromReferenceNode) getFirstChild();
final String path = collectionNode.getPath() + "[]." + propertyName;
LOG.debugf( "Creating join for many-to-many elements for %s", path );
FromElementFactory factory = new FromElementFactory( fromElement.getFromClause(), fromElement, path );
final FromElementFactory factory = new FromElementFactory( fromElement.getFromClause(), fromElement, path );
// This will add the new from element to the origin.
FromElement elementJoin = factory.createElementJoin( queryableCollection );
final FromElement elementJoin = factory.createElementJoin( queryableCollection );
setFromElement( elementJoin );
}
}
@Override
public void resolveIndex(AST parent) throws SemanticException {
throw new UnsupportedOperationException();
}
@Override
public void resolve(boolean generateJoin, boolean implicitJoin, String classAlias, AST parent)
throws SemanticException {
throws SemanticException {
if ( isResolved() ) {
return;
}
FromReferenceNode collectionNode = ( FromReferenceNode ) getFirstChild();
FromReferenceNode collectionNode = (FromReferenceNode) getFirstChild();
SessionFactoryHelper sessionFactoryHelper = getSessionFactoryHelper();
collectionNode.resolveIndex( this ); // Fully resolve the map reference, create implicit joins.
collectionNode.resolveIndex( this ); // Fully resolve the map reference, create implicit joins.
Type type = collectionNode.getDataType();
if ( !type.isCollectionType() ) {
throw new SemanticException( "The [] operator cannot be applied to type " + type.toString() );
}
String collectionRole = ( ( CollectionType ) type ).getRole();
String collectionRole = ( (CollectionType) type ).getRole();
QueryableCollection queryableCollection = sessionFactoryHelper.requireQueryableCollection( collectionRole );
if ( !queryableCollection.hasIndex() ) {
throw new QueryException( "unindexed fromElement before []: " + collectionNode.getPath() );
@ -141,7 +144,7 @@ public class IndexNode extends FromReferenceNode {
try {
gen.simpleExpr( selector ); //TODO: used to be exprNoParens! was this needed?
}
catch ( RecognitionException e ) {
catch (RecognitionException e) {
throw new QueryException( e.getMessage(), e );
}
String selectorExpression = gen.getSQL();
@ -149,10 +152,10 @@ public class IndexNode extends FromReferenceNode {
List<ParameterSpecification> paramSpecs = gen.getCollectedParameters();
if ( paramSpecs != null ) {
switch ( paramSpecs.size() ) {
case 0 :
case 0:
// nothing to do
break;
case 1 :
case 1:
ParameterSpecification paramSpec = paramSpecs.get( 0 );
paramSpec.setExpectedType( queryableCollection.getIndexType() );
fromElement.setIndexCollectionSelectorParamSpec( paramSpec );
@ -183,7 +186,7 @@ public class IndexNode extends FromReferenceNode {
@Override
public int bind(PreparedStatement statement, QueryParameters qp, SessionImplementor session, int position)
throws SQLException {
throws SQLException {
int bindCount = 0;
for ( ParameterSpecification paramSpec : paramSpecs ) {
bindCount += paramSpec.bind( statement, qp, session, position + bindCount );
@ -202,7 +205,7 @@ public class IndexNode extends FromReferenceNode {
@Override
public String renderDisplayInfo() {
return "index-selector [" + collectDisplayInfo() + "]" ;
return "index-selector [" + collectDisplayInfo() + "]";
}
private String collectDisplayInfo() {

View File

@ -23,6 +23,7 @@
*
*/
package org.hibernate.hql.internal.ast.tree;
import org.hibernate.QueryException;
import org.hibernate.hql.internal.antlr.HqlSqlTokenTypes;
@ -33,16 +34,12 @@ import org.hibernate.hql.internal.antlr.HqlSqlTokenTypes;
*/
public class InsertStatement extends AbstractStatement {
/**
* @see Statement#getStatementType()
*/
@Override
public int getStatementType() {
return HqlSqlTokenTypes.INSERT;
}
/**
* @see Statement#needsExecutor()
*/
@Override
public boolean needsExecutor() {
return true;
}
@ -57,12 +54,12 @@ public class InsertStatement extends AbstractStatement {
}
/**
* Retreive this insert statement's into-clause.
* Retrieve this insert statement's into-clause.
*
* @return The into-clause
*/
public IntoClause getIntoClause() {
return ( IntoClause ) getFirstChild();
return (IntoClause) getFirstChild();
}
/**
@ -71,7 +68,7 @@ public class InsertStatement extends AbstractStatement {
* @return The select-clause.
*/
public SelectClause getSelectClause() {
return ( ( QueryNode ) getIntoClause().getNextSibling() ).getSelectClause();
return ( (QueryNode) getIntoClause().getNextSibling() ).getSelectClause();
}
}

View File

@ -39,7 +39,6 @@ import org.hibernate.type.Type;
* @author Steve Ebersole
*/
public class JavaConstantNode extends Node implements ExpectedTypeAwareNode, SessionFactoryAwareNode {
private SessionFactoryImplementor factory;
private String constantExpression;
@ -49,7 +48,7 @@ public class JavaConstantNode extends Node implements ExpectedTypeAwareNode, Ses
private Type expectedType;
@Override
public void setText(String s) {
public void setText(String s) {
// for some reason the antlr.CommonAST initialization routines force
// this method to get called twice. The first time with an empty string
if ( StringHelper.isNotEmpty( s ) ) {
@ -60,31 +59,35 @@ public class JavaConstantNode extends Node implements ExpectedTypeAwareNode, Ses
}
}
@Override
public void setExpectedType(Type expectedType) {
this.expectedType = expectedType;
}
@Override
public Type getExpectedType() {
return expectedType;
}
@Override
public void setSessionFactory(SessionFactoryImplementor factory) {
this.factory = factory;
}
@Override
public String getRenderText(SessionFactoryImplementor sessionFactory) {
Type type = expectedType == null
@SuppressWarnings("unchecked")
public String getRenderText(SessionFactoryImplementor sessionFactory) {
final Type type = expectedType == null
? heuristicType
: Number.class.isAssignableFrom( heuristicType.getReturnedClass() )
? heuristicType
: expectedType;
? heuristicType
: expectedType;
try {
LiteralType literalType = ( LiteralType ) type;
Dialect dialect = factory.getDialect();
final LiteralType literalType = (LiteralType) type;
final Dialect dialect = factory.getDialect();
return literalType.objectToSQLString( constantValue, dialect );
}
catch ( Exception t ) {
catch (Exception t) {
throw new QueryException( QueryTranslator.ERROR_CANNOT_FORMAT_LITERAL + constantExpression, t );
}
}

View File

@ -57,6 +57,7 @@ public class MapEntryNode extends AbstractMapComponentNode implements Aggregated
this.base = base;
}
@Override
public String generateAlias(String sqlExpression) {
return NameGenerator.scalarName( base, counter++ );
}
@ -77,8 +78,8 @@ public class MapEntryNode extends AbstractMapComponentNode implements Aggregated
@Override
@SuppressWarnings("unchecked")
protected Type resolveType(QueryableCollection collectionPersister) {
Type keyType = collectionPersister.getIndexType();
Type valueType = collectionPersister.getElementType();
final Type keyType = collectionPersister.getIndexType();
final Type valueType = collectionPersister.getElementType();
types.add( keyType );
types.add( valueType );
mapEntryBuilder = new MapEntryBuilder();
@ -96,7 +97,7 @@ public class MapEntryNode extends AbstractMapComponentNode implements Aggregated
String text = "";
String[] columns = new String[selections.size()];
for ( int i = 0; i < selections.size(); i++ ) {
SelectExpression selectExpression = (SelectExpression) selections.get(i);
SelectExpression selectExpression = (SelectExpression) selections.get( i );
text += ( ", " + selectExpression.getExpression() + " as " + selectExpression.getAlias() );
columns[i] = selectExpression.getExpression();
}
@ -113,7 +114,7 @@ public class MapEntryNode extends AbstractMapComponentNode implements Aggregated
Type keyType = collectionPersister.getIndexType();
if ( keyType.isAssociationType() ) {
EntityType entityType = (EntityType) keyType;
Queryable keyEntityPersister = ( Queryable ) sfi().getEntityPersister(
Queryable keyEntityPersister = (Queryable) sfi().getEntityPersister(
entityType.getAssociatedEntityName( sfi() )
);
SelectFragment fragment = keyEntityPersister.propertySelectFragmentFragment(
@ -127,7 +128,7 @@ public class MapEntryNode extends AbstractMapComponentNode implements Aggregated
@SuppressWarnings({"unchecked", "ForLoopReplaceableByForEach"})
private void appendSelectExpressions(String[] columnNames, List selections, AliasGenerator aliasGenerator) {
for ( int i = 0; i < columnNames.length; i++ ) {
for ( int i = 0; i < columnNames.length; i++ ) {
selections.add(
new BasicSelectExpression(
collectionTableAlias() + '.' + columnNames[i],
@ -154,7 +155,7 @@ public class MapEntryNode extends AbstractMapComponentNode implements Aggregated
Type valueType = collectionPersister.getElementType();
if ( valueType.isAssociationType() ) {
EntityType valueEntityType = (EntityType) valueType;
Queryable valueEntityPersister = ( Queryable ) sfi().getEntityPersister(
Queryable valueEntityPersister = (Queryable) sfi().getEntityPersister(
valueEntityType.getAssociatedEntityName( sfi() )
);
SelectFragment fragment = valueEntityPersister.propertySelectFragmentFragment(
@ -228,14 +229,14 @@ public class MapEntryNode extends AbstractMapComponentNode implements Aggregated
return true;
}
private List types = new ArrayList(4); // size=4 to prevent resizing
private List types = new ArrayList( 4 ); // size=4 to prevent resizing
@Override
public List getAggregatedSelectionTypeList() {
return types;
}
private static final String[] ALIASES = { null, null };
private static final String[] ALIASES = {null, null};
@Override
public String[] getAggregatedAliases() {
@ -294,7 +295,7 @@ public class MapEntryNode extends AbstractMapComponentNode implements Aggregated
if ( o == null || getClass() != o.getClass() ) {
return false;
}
EntryAdapter that = ( EntryAdapter ) o;
EntryAdapter that = (EntryAdapter) o;
// make sure we have the same types...
return ( key == null ? that.key == null : key.equals( that.key ) )

View File

@ -32,6 +32,7 @@ import org.hibernate.hql.internal.antlr.SqlTokenTypes;
import org.hibernate.hql.internal.ast.TypeDiscriminatorMetadata;
import org.hibernate.hql.internal.ast.util.ASTUtil;
import org.hibernate.hql.internal.ast.util.ColumnHelper;
import org.hibernate.internal.CoreLogging;
import org.hibernate.persister.collection.CollectionPropertyNames;
import org.hibernate.persister.collection.QueryableCollection;
import org.hibernate.type.Type;
@ -47,7 +48,7 @@ import antlr.collections.AST;
* @author josh
*/
public class MethodNode extends AbstractSelectExpression implements FunctionNode {
private static final Logger LOG = Logger.getLogger( MethodNode.class.getName() );
private static final Logger LOG = CoreLogging.logger( MethodNode.class );
private String methodName;
private FromElement fromElement;
@ -107,8 +108,10 @@ public class MethodNode extends AbstractSelectExpression implements FunctionNode
public void initializeMethodNode(AST name, boolean inSelect) {
name.setType( SqlTokenTypes.METHOD_NAME );
String text = name.getText();
methodName = text.toLowerCase(); // Use the lower case function name.
this.inSelect = inSelect; // Remember whether we're in a SELECT clause or not.
// Use the lower case function name.
methodName = text.toLowerCase();
// Remember whether we're in a SELECT clause or not.
this.inSelect = inSelect;
}
private void typeDiscriminator(AST path) throws SemanticException {
@ -148,7 +151,7 @@ public class MethodNode extends AbstractSelectExpression implements FunctionNode
throw new SemanticException( "Collection function " + name.getText() + " has no path!" );
}
SqlNode expr = ( SqlNode ) path;
SqlNode expr = (SqlNode) path;
Type type = expr.getDataType();
LOG.debugf( "collectionProperty() : name=%s type=%s", name, type );
@ -158,7 +161,7 @@ public class MethodNode extends AbstractSelectExpression implements FunctionNode
protected void resolveCollectionProperty(AST expr) throws SemanticException {
String propertyName = CollectionProperties.getNormalizedPropertyName( methodName );
if ( expr instanceof FromReferenceNode ) {
FromReferenceNode collectionNode = ( FromReferenceNode ) expr;
FromReferenceNode collectionNode = (FromReferenceNode) expr;
// If this is 'elements' then create a new FROM element.
if ( CollectionPropertyNames.COLLECTION_ELEMENTS.equals( propertyName ) ) {
handleElements( collectionNode, propertyName );
@ -170,7 +173,7 @@ public class MethodNode extends AbstractSelectExpression implements FunctionNode
selectColumns = fromElement.toColumns( fromElement.getTableAlias(), propertyName, inSelect );
}
if ( collectionNode instanceof DotNode ) {
prepareAnyImplicitJoins( ( DotNode ) collectionNode );
prepareAnyImplicitJoins( (DotNode) collectionNode );
}
if ( !inSelect ) {
fromElement.setText( "" );
@ -183,18 +186,18 @@ public class MethodNode extends AbstractSelectExpression implements FunctionNode
else {
throw new SemanticException(
"Unexpected expression " + expr +
" found for collection function " + propertyName
);
" found for collection function " + propertyName
);
}
}
private void prepareAnyImplicitJoins(DotNode dotNode) throws SemanticException {
if ( dotNode.getLhs() instanceof DotNode ) {
DotNode lhs = ( DotNode ) dotNode.getLhs();
DotNode lhs = (DotNode) dotNode.getLhs();
FromElement lhsOrigin = lhs.getFromElement();
if ( lhsOrigin != null && "".equals( lhsOrigin.getText() ) ) {
String lhsOriginText = lhsOrigin.getQueryable().getTableName() +
" " + lhsOrigin.getTableAlias();
" " + lhsOrigin.getTableAlias();
lhsOrigin.setText( lhsOriginText );
}
prepareAnyImplicitJoins( lhs );
@ -217,11 +220,12 @@ public class MethodNode extends AbstractSelectExpression implements FunctionNode
selectColumns = collectionFromElement.toColumns( fromElement.getTableAlias(), propertyName, inSelect );
}
@Override
public void setScalarColumnText(int i) throws SemanticException {
if ( selectColumns == null ) { // Dialect function
if ( selectColumns == null ) { // Dialect function
ColumnHelper.generateSingleScalarColumn( this, i );
}
else { // Collection 'property function'
else { // Collection 'property function'
ColumnHelper.generateScalarColumns( this, selectColumns, i );
}
}
@ -230,7 +234,7 @@ public class MethodNode extends AbstractSelectExpression implements FunctionNode
}
@Override
public FromElement getFromElement() {
public FromElement getFromElement() {
return fromElement;
}
@ -238,7 +242,7 @@ public class MethodNode extends AbstractSelectExpression implements FunctionNode
return "{" +
"method=" + methodName +
",selectColumns=" + ( selectColumns == null ?
null : Arrays.asList( selectColumns ) ) +
null : Arrays.asList( selectColumns ) ) +
",fromElement=" + fromElement.getTableAlias() +
"}";
}

View File

@ -28,11 +28,10 @@ import org.hibernate.hql.internal.antlr.HqlSqlTokenTypes;
import org.hibernate.hql.internal.antlr.SqlTokenTypes;
import org.hibernate.hql.internal.ast.util.ASTUtil;
import org.hibernate.hql.internal.ast.util.ColumnHelper;
import org.hibernate.internal.CoreLogging;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.type.Type;
import org.jboss.logging.Logger;
import antlr.SemanticException;
import antlr.collections.AST;
@ -42,34 +41,29 @@ import antlr.collections.AST;
* @author Joshua Davis
*/
public class QueryNode extends AbstractRestrictableStatement implements SelectExpression {
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, QueryNode.class.getName());
private static final CoreMessageLogger LOG = CoreLogging.messageLogger( QueryNode.class );
private OrderByClause orderByClause;
private int scalarColumnIndex = -1;
/**
* @see Statement#getStatementType()
*/
@Override
public int getStatementType() {
return HqlSqlTokenTypes.QUERY;
}
/**
* @see Statement#needsExecutor()
*/
@Override
public boolean needsExecutor() {
return false;
}
@Override
protected int getWhereClauseParentTokenType() {
protected int getWhereClauseParentTokenType() {
return SqlTokenTypes.FROM;
}
@Override
protected CoreMessageLogger getLog() {
return LOG;
protected CoreMessageLogger getLog() {
return LOG;
}
/**
@ -86,7 +80,7 @@ public class QueryNode extends AbstractRestrictableStatement implements SelectEx
// If it is not found; simply return null...
//
// Also, do not cache since it gets generated well after we are created.
return ( SelectClause ) ASTUtil.findTypeInChildren( this, SqlTokenTypes.SELECT_CLAUSE );
return (SelectClause) ASTUtil.findTypeInChildren( this, SqlTokenTypes.SELECT_CLAUSE );
}
public final boolean hasOrderByClause() {
@ -101,7 +95,7 @@ public class QueryNode extends AbstractRestrictableStatement implements SelectEx
// if there is no order by, make one
if ( orderByClause == null ) {
LOG.debug( "getOrderByClause() : Creating a new ORDER BY clause" );
orderByClause = ( OrderByClause ) ASTUtil.create( getWalker().getASTFactory(), SqlTokenTypes.ORDER, "ORDER" );
orderByClause = (OrderByClause) getWalker().getASTFactory().create( SqlTokenTypes.ORDER, "ORDER" );
// Find the WHERE; if there is no WHERE, find the FROM...
AST prevSibling = ASTUtil.findTypeInChildren( this, SqlTokenTypes.WHERE );
@ -118,51 +112,60 @@ public class QueryNode extends AbstractRestrictableStatement implements SelectEx
}
private OrderByClause locateOrderByClause() {
return ( OrderByClause ) ASTUtil.findTypeInChildren( this, SqlTokenTypes.ORDER );
return (OrderByClause) ASTUtil.findTypeInChildren( this, SqlTokenTypes.ORDER );
}
private String alias;
@Override
public String getAlias() {
return alias;
}
@Override
public FromElement getFromElement() {
return null;
}
@Override
public boolean isConstructor() {
return false;
}
@Override
public boolean isReturnableEntity() throws SemanticException {
return false;
}
@Override
public boolean isScalar() throws SemanticException {
return true;
}
@Override
public void setAlias(String alias) {
this.alias = alias;
}
@Override
public void setScalarColumn(int i) throws SemanticException {
scalarColumnIndex = i;
setScalarColumnText( i );
}
@Override
public int getScalarColumnIndex() {
return scalarColumnIndex;
}
@Override
public void setScalarColumnText(int i) throws SemanticException {
ColumnHelper.generateSingleScalarColumn( this, i );
}
@Override
public Type getDataType() {
public Type getDataType() {
return ( (SelectExpression) getSelectClause().getFirstSelectExpression() ).getDataType();
}

View File

@ -97,7 +97,7 @@ public class SelectClause extends SelectExpressionList {
public Type[] getQueryReturnTypes() {
return queryReturnTypes;
}
/**
* The HQL aliases, or generated aliases
*
@ -124,6 +124,7 @@ public class SelectClause extends SelectExpressionList {
* Prepares an explicitly defined select clause.
*
* @param fromClause The from clause linked to this select clause.
*
* @throws SemanticException indicates a semntic issue with the explicit select clause.
*/
public void initializeExplicitSelectClause(FromClause fromClause) throws SemanticException {
@ -140,14 +141,14 @@ public class SelectClause extends SelectExpressionList {
// changes the AST!!!
SelectExpression[] selectExpressions = collectSelectExpressions();
// we only support parameters in select in the case of INSERT...SELECT statements
if (getParameterPositions().size() > 0 && getWalker().getStatementType() != HqlSqlTokenTypes.INSERT) {
throw new QueryException("Parameters are only supported in SELECT clauses when used as part of a INSERT INTO DML statement");
}
for ( int i = 0; i < selectExpressions.length; i++ ) {
SelectExpression selectExpression = selectExpressions[i];
// we only support parameters in select in the case of INSERT...SELECT statements
if ( getParameterPositions().size() > 0 && getWalker().getStatementType() != HqlSqlTokenTypes.INSERT ) {
throw new QueryException(
"Parameters are only supported in SELECT clauses when used as part of a INSERT INTO DML statement"
);
}
for ( SelectExpression selectExpression : selectExpressions ) {
if ( AggregatedSelectExpression.class.isInstance( selectExpression ) ) {
aggregatedSelectExpression = (AggregatedSelectExpression) selectExpression;
queryReturnTypeList.addAll( aggregatedSelectExpression.getAggregatedSelectionTypeList() );
@ -157,18 +158,23 @@ public class SelectClause extends SelectExpressionList {
// we have no choice but to do this check here
// this is not very elegant but the "right way" would most likely involve a bigger rewrite so as to
// treat ParameterNodes in select clauses as SelectExpressions
boolean inSubquery = selectExpression instanceof QueryNode && ((QueryNode) selectExpression).getFromClause().getParentFromClause() != null;
if (getWalker().getStatementType() == HqlSqlTokenTypes.INSERT && inSubquery) {
boolean inSubquery = selectExpression instanceof QueryNode
&& ( (QueryNode) selectExpression ).getFromClause().getParentFromClause() != null;
if ( getWalker().getStatementType() == HqlSqlTokenTypes.INSERT && inSubquery ) {
// we do not support parameters for subqueries in INSERT...SELECT
if (((QueryNode) selectExpression).getSelectClause().getParameterPositions().size() > 0) {
throw new QueryException("Use of parameters in subqueries of INSERT INTO DML statements is not supported.");
if ( ( (QueryNode) selectExpression ).getSelectClause().getParameterPositions().size() > 0 ) {
throw new QueryException(
"Use of parameters in subqueries of INSERT INTO DML statements is not supported."
);
}
}
}
Type type = selectExpression.getDataType();
if ( type == null ) {
throw new IllegalStateException( "No data type for node: " + selectExpression.getClass().getName() + " "
+ new ASTPrinter( SqlTokenTypes.class ).showAsString( ( AST ) selectExpression, "" ) );
throw new IllegalStateException(
"No data type for node: " + selectExpression.getClass().getName() + " "
+ new ASTPrinter( SqlTokenTypes.class ).showAsString( (AST) selectExpression, "" )
);
}
//sqlResultTypeList.add( type );
@ -187,18 +193,19 @@ public class SelectClause extends SelectExpressionList {
}
//init the aliases, after initing the constructornode
initAliases(selectExpressions);
initAliases( selectExpressions );
if ( !getWalker().isShallowQuery() ) {
// add the fetched entities
List fromElements = fromClause.getProjectionList();
ASTAppender appender = new ASTAppender( getASTFactory(), this ); // Get ready to start adding nodes.
// Get ready to start adding nodes.
ASTAppender appender = new ASTAppender( getASTFactory(), this );
int size = fromElements.size();
Iterator iterator = fromElements.iterator();
for ( int k = 0; iterator.hasNext(); k++ ) {
FromElement fromElement = ( FromElement ) iterator.next();
FromElement fromElement = (FromElement) iterator.next();
if ( fromElement.isFetch() ) {
FromElement origin = null;
@ -219,8 +226,8 @@ public class SelectClause extends SelectExpressionList {
if ( !fromElementsForLoad.contains( origin ) ) {
throw new QueryException(
"query specified join fetching, but the owner " +
"of the fetched association was not present in the select list " +
"[" + fromElement.getDisplayText() + "]"
"of the fetched association was not present in the select list " +
"[" + fromElement.getDisplayText() + "]"
);
}
Type type = fromElement.getSelectType();
@ -234,7 +241,11 @@ public class SelectClause extends SelectExpressionList {
//sqlResultTypeList.add( type );
// Generate the select expression.
String text = fromElement.renderIdentifierSelect( size, k );
SelectExpressionImpl generatedExpr = ( SelectExpressionImpl ) appender.append( SqlTokenTypes.SELECT_EXPR, text, false );
SelectExpressionImpl generatedExpr = (SelectExpressionImpl) appender.append(
SqlTokenTypes.SELECT_EXPR,
text,
false
);
if ( generatedExpr != null ) {
generatedExpr.setFromElement( fromElement );
}
@ -257,7 +268,7 @@ public class SelectClause extends SelectExpressionList {
}
private void finishInitialization(ArrayList queryReturnTypeList) {
queryReturnTypes = ( Type[] ) queryReturnTypeList.toArray( new Type[queryReturnTypeList.size()] );
queryReturnTypes = (Type[]) queryReturnTypeList.toArray( new Type[queryReturnTypeList.size()] );
initializeColumnNames();
prepared = true;
}
@ -269,16 +280,16 @@ public class SelectClause extends SelectExpressionList {
// todo: we should really just collect these from the various SelectExpressions, rather than regenerating here
columnNames = getSessionFactoryHelper().generateColumnNames( queryReturnTypes );
columnNamesStartPositions = new int[ columnNames.length ];
columnNamesStartPositions = new int[columnNames.length];
int startPosition = 1;
for ( int i = 0 ; i < columnNames.length ; i ++ ) {
columnNamesStartPositions[ i ] = startPosition;
startPosition += columnNames[ i ].length;
for ( int i = 0; i < columnNames.length; i++ ) {
columnNamesStartPositions[i] = startPosition;
startPosition += columnNames[i].length;
}
}
public int getColumnNamesStartPosition(int i) {
return columnNamesStartPositions[ i ];
return columnNamesStartPositions[i];
}
/**
@ -297,13 +308,13 @@ public class SelectClause extends SelectExpressionList {
// }
List fromElements = fromClause.getProjectionList();
ASTAppender appender = new ASTAppender( getASTFactory(), this ); // Get ready to start adding nodes.
ASTAppender appender = new ASTAppender( getASTFactory(), this ); // Get ready to start adding nodes.
int size = fromElements.size();
ArrayList queryReturnTypeList = new ArrayList( size );
Iterator iterator = fromElements.iterator();
for ( int k = 0; iterator.hasNext(); k++ ) {
FromElement fromElement = ( FromElement ) iterator.next();
FromElement fromElement = (FromElement) iterator.next();
Type type = fromElement.getSelectType();
addCollectionFromElement( fromElement );
@ -318,7 +329,11 @@ public class SelectClause extends SelectExpressionList {
fromElementsForLoad.add( fromElement );
// Generate the select expression.
String text = fromElement.renderIdentifierSelect( size, k );
SelectExpressionImpl generatedExpr = ( SelectExpressionImpl ) appender.append( SqlTokenTypes.SELECT_EXPR, text, false );
SelectExpressionImpl generatedExpr = (SelectExpressionImpl) appender.append(
SqlTokenTypes.SELECT_EXPR,
text,
false
);
if ( generatedExpr != null ) {
generatedExpr.setFromElement( fromElement );
}
@ -337,14 +352,14 @@ public class SelectClause extends SelectExpressionList {
}
finishInitialization( queryReturnTypeList );
}
public static boolean VERSION2_SQL;
private void addCollectionFromElement(FromElement fromElement) {
if ( fromElement.isFetch() ) {
if ( fromElement.getQueryableCollection() != null ) {
String suffix;
if (collectionFromElements==null) {
if ( collectionFromElements == null ) {
collectionFromElements = new ArrayList();
suffix = VERSION2_SQL ? "__" : "0__";
}
@ -366,9 +381,10 @@ public class SelectClause extends SelectExpressionList {
return n;
}
@SuppressWarnings("SimplifiableIfStatement")
private boolean isReturnableEntity(SelectExpression selectExpression) throws SemanticException {
FromElement fromElement = selectExpression.getFromElement();
boolean isFetchOrValueCollection = fromElement != null &&
boolean isFetchOrValueCollection = fromElement != null &&
( fromElement.isFetch() || fromElement.isCollectionOfValuesOrComponents() );
if ( isFetchOrValueCollection ) {
return false;
@ -382,17 +398,17 @@ public class SelectClause extends SelectExpressionList {
if ( !currentFromClause.isSubQuery() ) {
for ( int i = 0; i < se.length; i++ ) {
SelectExpression expr = se[i];
expr.setScalarColumn( i ); // Create SQL_TOKEN nodes for the columns.
expr.setScalarColumn( i ); // Create SQL_TOKEN nodes for the columns.
}
}
}
private void initAliases(SelectExpression[] selectExpressions) {
if ( aggregatedSelectExpression == null ) {
aliases = new String[selectExpressions.length];
for ( int i=0; i<selectExpressions.length; i++ ) {
for ( int i = 0; i < selectExpressions.length; i++ ) {
String alias = selectExpressions[i].getAlias();
aliases[i] = alias==null ? Integer.toString(i) : alias;
aliases[i] = alias == null ? Integer.toString( i ) : alias;
}
}
else {
@ -400,13 +416,15 @@ public class SelectClause extends SelectExpressionList {
}
}
private void renderNonScalarSelects(SelectExpression[] selectExpressions, FromClause currentFromClause)
throws SemanticException {
private void renderNonScalarSelects(SelectExpression[] selectExpressions, FromClause currentFromClause)
throws SemanticException {
ASTAppender appender = new ASTAppender( getASTFactory(), this );
final int size = selectExpressions.length;
int nonscalarSize = 0;
for ( int i = 0; i < size; i++ ) {
if ( !selectExpressions[i].isScalar() ) nonscalarSize++;
if ( !selectExpressions[i].isScalar() ) {
nonscalarSize++;
}
}
int j = 0;
@ -436,7 +454,12 @@ public class SelectClause extends SelectExpressionList {
}
}
private void renderNonScalarIdentifiers(FromElement fromElement, int nonscalarSize, int j, SelectExpression expr, ASTAppender appender) {
private void renderNonScalarIdentifiers(
FromElement fromElement,
int nonscalarSize,
int j,
SelectExpression expr,
ASTAppender appender) {
String text = fromElement.renderIdentifierSelect( nonscalarSize, j );
if ( !fromElement.getFromClause().isSubQuery() ) {
if ( !scalarSelect && !getWalker().isShallowQuery() ) {
@ -459,7 +482,7 @@ public class SelectClause extends SelectExpressionList {
// Look through the FromElement's children to find any collections of values that should be fetched...
ASTIterator iter = new ASTIterator( fromElement );
while ( iter.hasNext() ) {
FromElement child = ( FromElement ) iter.next();
FromElement child = (FromElement) iter.next();
if ( child.isCollectionOfValuesOrComponents() && child.isFetch() ) {
// Need a better way to define the suffixes here...
text = child.renderValueCollectionSelectFragment( nonscalarSize, nonscalarSize + k );

View File

@ -40,7 +40,7 @@ import antlr.collections.AST;
public abstract class SelectExpressionList extends HqlSqlWalkerNode {
private List<Integer> parameterPositions = new ArrayList<Integer>();
/**
* Returns an array of SelectExpressions gathered from the children of the given parent AST node.
*
@ -50,22 +50,24 @@ public abstract class SelectExpressionList extends HqlSqlWalkerNode {
// Get the first child to be considered. Sub-classes may do this differently in order to skip nodes that
// are not select expressions (e.g. DISTINCT).
AST firstChild = getFirstSelectExpression();
AST parent = this;
ArrayList list = new ArrayList( parent.getNumberOfChildren() );
ArrayList<SelectExpression> list = new ArrayList<SelectExpression>();
int p = 0;
for ( AST n = firstChild; n != null; n = n.getNextSibling() ) {
if ( n instanceof SelectExpression ) {
list.add( n );
list.add( (SelectExpression) n );
}
else if( n instanceof ParameterNode ) {
parameterPositions.add(p);
else if ( n instanceof ParameterNode ) {
parameterPositions.add( p );
}
else {
throw new IllegalStateException( "Unexpected AST: " + n.getClass().getName() + " " + new ASTPrinter( SqlTokenTypes.class ).showAsString( n, "" ) );
throw new IllegalStateException(
"Unexpected AST: " + n.getClass().getName() + " "
+ new ASTPrinter( SqlTokenTypes.class ).showAsString( n, "" )
);
}
p++;
}
return ( SelectExpression[] ) list.toArray( new SelectExpression[list.size()] );
return list.toArray( new SelectExpression[list.size()] );
}
/**

View File

@ -23,6 +23,7 @@
*
*/
package org.hibernate.hql.internal.ast.tree;
import java.util.ArrayList;
import java.util.List;
@ -56,11 +57,12 @@ public class SqlFragment extends Node implements ParameterContainer {
// ParameterContainer impl ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
private List embeddedParameters;
private List<ParameterSpecification> embeddedParameters;
public void addEmbeddedParameter(ParameterSpecification specification) {
if ( embeddedParameters == null ) {
embeddedParameters = new ArrayList();
embeddedParameters = new ArrayList<ParameterSpecification>();
}
embeddedParameters.add( specification );
}
@ -70,6 +72,6 @@ public class SqlFragment extends Node implements ParameterContainer {
}
public ParameterSpecification[] getEmbeddedParameters() {
return ( ParameterSpecification[] ) embeddedParameters.toArray( new ParameterSpecification[ embeddedParameters.size() ] );
return embeddedParameters.toArray( new ParameterSpecification[ embeddedParameters.size() ] );
}
}

View File

@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
* Copyright (c) 2008, 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@ -20,10 +20,7 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*
*/
//$Id: UnaryArithmeticNode.java 8407 2005-10-14 17:23:18Z steveebersole $
package org.hibernate.hql.internal.ast.tree;
import org.hibernate.hql.internal.ast.util.ColumnHelper;
@ -34,7 +31,7 @@ import antlr.SemanticException;
public class UnaryArithmeticNode extends AbstractSelectExpression implements UnaryOperatorNode {
public Type getDataType() {
return ( ( SqlNode ) getOperand() ).getDataType();
return ( (SqlNode) getOperand() ).getDataType();
}
public void setScalarColumnText(int i) throws SemanticException {
@ -47,6 +44,6 @@ public class UnaryArithmeticNode extends AbstractSelectExpression implements Una
}
public Node getOperand() {
return ( Node ) getFirstChild();
return (Node) getFirstChild();
}
}

View File

@ -33,7 +33,7 @@ import org.hibernate.type.Type;
*/
public class UnaryLogicOperatorNode extends HqlSqlWalkerNode implements UnaryOperatorNode {
public Node getOperand() {
return ( Node ) getFirstChild();
return (Node) getFirstChild();
}
public void initialize() {

View File

@ -34,8 +34,8 @@ import antlr.collections.AST;
* @author josh
*/
public class ASTIterator implements Iterator {
private AST next, current;
private LinkedList parents = new LinkedList();
private AST next;
private LinkedList<AST> parents = new LinkedList<AST>();
/**
* Constructs an Iterator for depth-first iteration of an AST
@ -69,7 +69,7 @@ public class ASTIterator implements Iterator {
* @return The next node.
*/
public AST nextNode() {
current = next;
AST current = next;
if ( next != null ) {
AST nextSibling = next.getNextSibling();
if ( nextSibling == null ) {
@ -99,7 +99,7 @@ public class ASTIterator implements Iterator {
return null;
}
else {
return ( AST ) parents.removeFirst();
return parents.removeFirst();
}
}

View File

@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
* Copyright (c) 2008, 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@ -20,9 +20,9 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*
*/
package org.hibernate.hql.internal.ast.util;
import java.util.Iterator;
import java.util.LinkedList;
@ -34,8 +34,9 @@ import antlr.collections.AST;
* @author josh
*/
public class ASTParentsFirstIterator implements Iterator {
private AST next, current, tree;
private LinkedList parents = new LinkedList();
private AST next;
private AST tree;
private LinkedList<AST> parents = new LinkedList<AST>();
public void remove() {
throw new UnsupportedOperationException( "remove() is not supported" );
@ -54,15 +55,16 @@ public class ASTParentsFirstIterator implements Iterator {
}
public AST nextNode() {
current = next;
AST current = next;
if ( next != null ) {
AST child = next.getFirstChild();
if ( child == null ) {
AST sibling = next.getNextSibling();
if ( sibling == null ) {
AST parent = pop();
while ( parent != null && parent.getNextSibling() == null )
while ( parent != null && parent.getNextSibling() == null ) {
parent = pop();
}
next = ( parent != null ) ? parent.getNextSibling() : null;
}
else {
@ -88,7 +90,7 @@ public class ASTParentsFirstIterator implements Iterator {
return null;
}
else {
return ( AST ) parents.removeFirst();
return parents.removeFirst();
}
}

View File

@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
* Copyright (c) 2008, 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@ -20,7 +20,6 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*
*/
package org.hibernate.hql.internal.ast.util;
@ -58,7 +57,7 @@ public class ASTPrinter {
}
public ASTPrinter(boolean showClassNames) {
this( ( Map ) null, showClassNames );
this( (Map) null, showClassNames );
}
/**
@ -120,7 +119,7 @@ public class ASTPrinter {
* @param pw The print writer to which the AST should be written.
*/
public void showAst(AST ast, PrintWriter pw) {
ArrayList parents = new ArrayList();
ArrayList<AST> parents = new ArrayList<AST>();
showAst( parents, pw, ast );
pw.flush();
}
@ -129,6 +128,7 @@ public class ASTPrinter {
* Returns the token type name for the given token type.
*
* @param type The token type.
*
* @return String - The token type name from the token type constant class,
* or just the integer as a string if none exists.
*/
@ -136,7 +136,7 @@ public class ASTPrinter {
final Integer typeInteger = type;
String value = null;
if ( tokenTypeNameCache != null ) {
value = ( String ) tokenTypeNameCache.get( typeInteger );
value = (String) tokenTypeNameCache.get( typeInteger );
}
if ( value == null ) {
value = typeInteger.toString();
@ -144,14 +144,13 @@ public class ASTPrinter {
return value;
}
private void showAst(ArrayList parents, PrintWriter pw, AST ast) {
private void showAst(ArrayList<AST> parents, PrintWriter pw, AST ast) {
if ( ast == null ) {
pw.println( "AST is null!" );
return;
}
for ( int i = 0; i < parents.size(); i++ ) {
AST parent = ( AST ) parents.get( i );
for ( AST parent : parents ) {
if ( parent.getNextSibling() == null ) {
pw.print( " " );
@ -170,7 +169,7 @@ public class ASTPrinter {
showNode( pw, ast );
ArrayList newParents = new ArrayList( parents );
ArrayList<AST> newParents = new ArrayList<AST>( parents );
newParents.add( ast );
for ( AST child = ast.getFirstChild(); child != null; child = child.getNextSibling() ) {
showAst( newParents, pw, child );
@ -193,37 +192,37 @@ public class ASTPrinter {
buf.append( StringHelper.unqualify( ast.getClass().getName() ) ).append( ": " );
}
buf.append( "'" );
String text = ast.getText();
buf.append( "'" );
String text = ast.getText();
if ( text == null ) {
text = "{text:null}";
}
appendEscapedMultibyteChars(text, buf);
buf.append( "'" );
appendEscapedMultibyteChars( text, buf );
buf.append( "'" );
if ( ast instanceof DisplayableNode ) {
DisplayableNode displayableNode = ( DisplayableNode ) ast;
DisplayableNode displayableNode = (DisplayableNode) ast;
// Add a space before the display text.
buf.append( " " ).append( displayableNode.getDisplayText() );
}
return buf.toString();
}
public static void appendEscapedMultibyteChars(String text, StringBuilder buf) {
char[] chars = text.toCharArray();
for (int i = 0; i < chars.length; i++) {
char aChar = chars[i];
if (aChar > 256) {
buf.append("\\u");
buf.append(Integer.toHexString(aChar));
}
else
buf.append(aChar);
}
}
public static void appendEscapedMultibyteChars(String text, StringBuilder buf) {
char[] chars = text.toCharArray();
for ( char aChar : chars ) {
if ( aChar > 256 ) {
buf.append( "\\u" );
buf.append( Integer.toHexString( aChar ) );
}
else {
buf.append( aChar );
}
}
}
public static String escapeMultibyteChars(String text) {
StringBuilder buf = new StringBuilder();
appendEscapedMultibyteChars(text,buf);
return buf.toString();
}
public static String escapeMultibyteChars(String text) {
StringBuilder buf = new StringBuilder();
appendEscapedMultibyteChars( text, buf );
return buf.toString();
}
}

View File

@ -23,6 +23,7 @@
*
*/
package org.hibernate.hql.internal.ast.util;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
@ -114,7 +115,12 @@ public final class ASTUtil {
*
* @return AST - A new sub-tree of the form "(parent child1 child2)"
*/
public static AST createBinarySubtree(ASTFactory factory, int parentType, String parentText, AST child1, AST child2) {
public static AST createBinarySubtree(
ASTFactory factory,
int parentType,
String parentText,
AST child1,
AST child2) {
ASTArray array = createAstArray( factory, 3, parentType, parentText, child1 );
array.add( child2 );
return factory.make( array );
@ -155,6 +161,7 @@ public final class ASTUtil {
*
* @param fixture The node against which to testto be checked for children.
* @param test The node to be tested as being a subtree child of the parent.
*
* @return True if child is contained in the parent's collection of children.
*/
public static boolean isSubtreeChild(AST fixture, AST test) {
@ -316,7 +323,12 @@ public final class ASTUtil {
}
}
private static ASTArray createAstArray(ASTFactory factory, int size, int parentType, String parentText, AST child1) {
private static ASTArray createAstArray(
ASTFactory factory,
int size,
int parentType,
String parentText,
AST child1) {
ASTArray array = new ASTArray( size );
array.add( factory.create( parentType, parentText ) );
array.add( child1 );
@ -341,6 +353,7 @@ public final class ASTUtil {
* A predicate that uses inclusion, rather than exclusion semantics.
*/
public abstract static class IncludePredicate implements FilterPredicate {
@Override
public final boolean exclude(AST node) {
return !include( node );
}
@ -360,6 +373,7 @@ public final class ASTUtil {
this.predicate = predicate;
}
@Override
public void visit(AST node) {
if ( predicate == null || !predicate.exclude( node ) ) {
collectedNodes.add( node );
@ -381,17 +395,18 @@ public final class ASTUtil {
* Method to generate a map of token type names, keyed by their token type values.
*
* @param tokenTypeInterface The *TokenTypes interface (or implementor of said interface).
*
* @return The generated map.
*/
public static Map generateTokenNameCache(Class tokenTypeInterface) {
final Field[] fields = tokenTypeInterface.getFields();
Map cache = new HashMap( (int)( fields.length * .75 ) + 1 );
Map cache = new HashMap( (int) ( fields.length * .75 ) + 1 );
for ( final Field field : fields ) {
if ( Modifier.isStatic( field.getModifiers() ) ) {
try {
cache.put( field.get( null ), field.getName() );
}
catch ( Throwable ignore ) {
catch (Throwable ignore) {
}
}
}
@ -447,18 +462,18 @@ public final class ASTUtil {
try {
Object value = field.get( null );
if ( value instanceof Integer ) {
rtn = ( Integer ) value;
rtn = (Integer) value;
}
else if ( value instanceof Short ) {
rtn = ( ( Short ) value ).intValue();
rtn = ( (Short) value ).intValue();
}
else if ( value instanceof Long ) {
if ( ( Long ) value <= Integer.MAX_VALUE ) {
rtn = ( ( Long ) value ).intValue();
if ( (Long) value <= Integer.MAX_VALUE ) {
rtn = ( (Long) value ).intValue();
}
}
}
catch ( IllegalAccessException ignore ) {
catch (IllegalAccessException ignore) {
}
return rtn;
}

View File

@ -43,6 +43,7 @@ import org.hibernate.hql.internal.ast.tree.FromElement;
import org.hibernate.hql.internal.ast.tree.ParameterContainer;
import org.hibernate.hql.internal.ast.tree.QueryNode;
import org.hibernate.hql.internal.classic.ParserHelper;
import org.hibernate.internal.CoreLogging;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.FilterImpl;
import org.hibernate.internal.util.StringHelper;
@ -52,8 +53,6 @@ import org.hibernate.sql.JoinFragment;
import org.hibernate.sql.JoinType;
import org.hibernate.type.Type;
import org.jboss.logging.Logger;
/**
* Performs the post-processing of the join information gathered during semantic analysis.
* The join generating classes are complex, this encapsulates some of the JoinSequence-related
@ -62,8 +61,7 @@ import org.jboss.logging.Logger;
* @author Joshua Davis
*/
public class JoinProcessor implements SqlTokenTypes {
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, JoinProcessor.class.getName());
private static final CoreMessageLogger LOG = CoreLogging.messageLogger( JoinProcessor.class );
private final HqlSqlWalker walker;
private final SyntheticAndFactory syntheticAndFactory;
@ -82,22 +80,29 @@ public class JoinProcessor implements SqlTokenTypes {
* Translates an AST join type (i.e., the token type) into a JoinFragment.XXX join type.
*
* @param astJoinType The AST join type (from HqlSqlTokenTypes or SqlTokenTypes)
*
* @return a JoinFragment.XXX join type.
*
* @see JoinFragment
* @see SqlTokenTypes
*/
public static JoinType toHibernateJoinType(int astJoinType) {
switch ( astJoinType ) {
case LEFT_OUTER:
case LEFT_OUTER: {
return JoinType.LEFT_OUTER_JOIN;
case INNER:
}
case INNER: {
return JoinType.INNER_JOIN;
case RIGHT_OUTER:
}
case RIGHT_OUTER: {
return JoinType.RIGHT_OUTER_JOIN;
case FULL:
}
case FULL: {
return JoinType.FULL_JOIN;
default:
}
default: {
throw new AssertionFailure( "undefined join type " + astJoinType );
}
}
}
@ -125,26 +130,31 @@ public class JoinProcessor implements SqlTokenTypes {
// Iterate through the alias,JoinSequence pairs and generate SQL token nodes.
Iterator iter = fromElements.iterator();
while ( iter.hasNext() ) {
final FromElement fromElement = ( FromElement ) iter.next();
final FromElement fromElement = (FromElement) iter.next();
JoinSequence join = fromElement.getJoinSequence();
join.setSelector(new JoinSequence.Selector() {
public boolean includeSubclasses( String alias ) {
// The uber-rule here is that we need to include subclass joins if
// the FromElement is in any way dereferenced by a property from
// the subclass table; otherwise we end up with column references
// qualified by a non-existent table reference in the resulting SQL...
boolean containsTableAlias = fromClause.containsTableAlias(alias);
if (fromElement.isDereferencedBySubclassProperty()) {
// TODO : or should we return 'containsTableAlias'??
LOG.tracev( "Forcing inclusion of extra joins [alias={0}, containsTableAlias={1}]", alias, containsTableAlias );
return true;
}
boolean shallowQuery = walker.isShallowQuery();
boolean includeSubclasses = fromElement.isIncludeSubclasses();
boolean subQuery = fromClause.isSubQuery();
return includeSubclasses && containsTableAlias && !subQuery && !shallowQuery;
join.setSelector(
new JoinSequence.Selector() {
public boolean includeSubclasses(String alias) {
// The uber-rule here is that we need to include subclass joins if
// the FromElement is in any way dereferenced by a property from
// the subclass table; otherwise we end up with column references
// qualified by a non-existent table reference in the resulting SQL...
boolean containsTableAlias = fromClause.containsTableAlias( alias );
if ( fromElement.isDereferencedBySubclassProperty() ) {
// TODO : or should we return 'containsTableAlias'??
LOG.tracev(
"Forcing inclusion of extra joins [alias={0}, containsTableAlias={1}]",
alias,
containsTableAlias
);
return true;
}
boolean shallowQuery = walker.isShallowQuery();
boolean includeSubclasses = fromElement.isIncludeSubclasses();
boolean subQuery = fromClause.isSubQuery();
return includeSubclasses && containsTableAlias && !subQuery && !shallowQuery;
}
}
}
);
addJoinNodes( query, join, fromElement );
}
@ -205,16 +215,13 @@ public class JoinProcessor implements SqlTokenTypes {
final ParameterContainer container,
final HqlSqlWalker walker) {
if ( walker.getEnabledFilters().isEmpty()
&& ( ! hasDynamicFilterParam( sqlFragment ) )
&& ( ! ( hasCollectionFilterParam( sqlFragment ) ) ) ) {
&& ( !hasDynamicFilterParam( sqlFragment ) )
&& ( !( hasCollectionFilterParam( sqlFragment ) ) ) ) {
return;
}
Dialect dialect = walker.getSessionFactoryHelper().getFactory().getDialect();
String symbols = new StringBuilder().append( ParserHelper.HQL_SEPARATORS )
.append( dialect.openQuote() )
.append( dialect.closeQuote() )
.toString();
String symbols = ParserHelper.HQL_SEPARATORS + dialect.openQuote() + dialect.closeQuote();
StringTokenizer tokens = new StringTokenizer( sqlFragment, symbols, true );
StringBuilder result = new StringBuilder();
@ -223,20 +230,26 @@ public class JoinProcessor implements SqlTokenTypes {
if ( token.startsWith( ParserHelper.HQL_VARIABLE_PREFIX ) ) {
final String filterParameterName = token.substring( 1 );
final String[] parts = LoadQueryInfluencers.parseFilterParameterName( filterParameterName );
final FilterImpl filter = ( FilterImpl ) walker.getEnabledFilters().get( parts[0] );
final FilterImpl filter = (FilterImpl) walker.getEnabledFilters().get( parts[0] );
final Object value = filter.getParameter( parts[1] );
final Type type = filter.getFilterDefinition().getParameterType( parts[1] );
final String typeBindFragment = StringHelper.join(
",",
ArrayHelper.fillArray(
"?", type.getColumnSpan(
walker.getSessionFactoryHelper().getFactory()
)
"?",
type.getColumnSpan( walker.getSessionFactoryHelper().getFactory() )
)
);
final String bindFragment = ( value != null && Collection.class.isInstance( value ) )
? StringHelper.join( ",", ArrayHelper.fillArray( typeBindFragment, ( ( Collection ) value ).size() ) )
: typeBindFragment;
final String bindFragment;
if ( value != null && Collection.class.isInstance( value ) ) {
bindFragment = StringHelper.join(
",",
ArrayHelper.fillArray( typeBindFragment, ( (Collection) value ).size() )
);
}
else {
bindFragment = typeBindFragment;
}
result.append( bindFragment );
container.addEmbeddedParameter( new DynamicFilterParameterSpecification( parts[0], parts[1], type ) );
}
@ -249,10 +262,10 @@ public class JoinProcessor implements SqlTokenTypes {
}
private static boolean hasDynamicFilterParam(String sqlFragment) {
return sqlFragment.indexOf( ParserHelper.HQL_VARIABLE_PREFIX ) < 0;
return !sqlFragment.contains( ParserHelper.HQL_VARIABLE_PREFIX );
}
private static boolean hasCollectionFilterParam(String sqlFragment) {
return sqlFragment.indexOf( "?" ) < 0;
return !sqlFragment.contains( "?" );
}
}

View File

@ -59,7 +59,10 @@ import antlr.collections.AST;
* @author josh
*/
public class LiteralProcessor implements HqlSqlTokenTypes {
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, LiteralProcessor.class.getName());
private static final CoreMessageLogger LOG = Logger.getMessageLogger(
CoreMessageLogger.class,
LiteralProcessor.class.getName()
);
/**
* In what format should Float and Double literal values be sent to the database?
@ -75,23 +78,25 @@ public class LiteralProcessor implements HqlSqlTokenTypes {
public boolean isAlias(String alias) {
FromClause from = walker.getCurrentFromClause();
while ( from.isSubQuery() ) {
if ( from.containsClassAlias(alias) ) {
if ( from.containsClassAlias( alias ) ) {
return true;
}
from = from.getParentFromClause();
}
return from.containsClassAlias(alias);
return from.containsClassAlias( alias );
}
public void processConstant(AST constant, boolean resolveIdent) throws SemanticException {
// If the constant is an IDENT, figure out what it means...
boolean isIdent = ( constant.getType() == IDENT || constant.getType() == WEIRD_IDENT );
if ( resolveIdent && isIdent && isAlias( constant.getText() ) ) { // IDENT is a class alias in the FROM.
IdentNode ident = ( IdentNode ) constant;
if ( resolveIdent && isIdent && isAlias( constant.getText() ) ) {
// IDENT is a class alias in the FROM.
IdentNode ident = (IdentNode) constant;
// Resolve to an identity column.
ident.resolve(false, true);
ident.resolve( false, true );
}
else { // IDENT might be the name of a class.
else {
// IDENT might be the name of a class.
Queryable queryable = walker.getSessionFactoryHelper().findQueryableUsingImports( constant.getText() );
if ( isIdent && queryable != null ) {
constant.setText( queryable.getDiscriminatorSQLValue() );
@ -110,23 +115,29 @@ public class LiteralProcessor implements HqlSqlTokenTypes {
// the name of an entity class
final String discrim = persister.getDiscriminatorSQLValue();
node.setDataType( persister.getDiscriminatorType() );
if (InFragment.NULL.equals(discrim) || InFragment.NOT_NULL.equals(discrim)) throw new InvalidPathException(
"subclass test not allowed for null or not null discriminator: '"
+ text + "'");
setSQLValue(node, text, discrim); // the class discriminator value
if ( InFragment.NULL.equals( discrim ) || InFragment.NOT_NULL.equals( discrim ) ) {
throw new InvalidPathException(
"subclass test not allowed for null or not null discriminator: '" + text + "'"
);
}
// the class discriminator value
setSQLValue( node, text, discrim );
}
else {
Object value = ReflectHelper.getConstantValue( text );
if (value == null) throw new InvalidPathException("Invalid path: '" + text + "'");
setConstantValue(node, text, value);
if ( value == null ) {
throw new InvalidPathException( "Invalid path: '" + text + "'" );
}
setConstantValue( node, text, value );
}
}
private void setSQLValue(DotNode node, String text, String value) {
LOG.debugf( "setSQLValue() %s -> %s", text, value );
node.setFirstChild( null ); // Chop off the rest of the tree.
// Chop off the rest of the tree.
node.setFirstChild( null );
node.setType( SqlTokenTypes.SQL_TOKEN );
node.setText(value);
node.setText( value );
node.setResolvedConstant( text );
}
@ -134,7 +145,8 @@ public class LiteralProcessor implements HqlSqlTokenTypes {
if ( LOG.isDebugEnabled() ) {
LOG.debugf( "setConstantValue() %s -> %s %s", text, value, value.getClass().getName() );
}
node.setFirstChild( null ); // Chop off the rest of the tree.
// Chop off the rest of the tree.
node.setFirstChild( null );
if ( value instanceof String ) {
node.setType( SqlTokenTypes.QUOTED_STRING );
}
@ -164,21 +176,23 @@ public class LiteralProcessor implements HqlSqlTokenTypes {
}
Type type;
try {
type = walker.getSessionFactoryHelper().getFactory().getTypeResolver().heuristicType( value.getClass().getName() );
type = walker.getSessionFactoryHelper().getFactory().getTypeResolver().heuristicType(
value.getClass().getName()
);
}
catch ( MappingException me ) {
catch (MappingException me) {
throw new QueryException( me );
}
if ( type == null ) {
throw new QueryException( QueryTranslator.ERROR_CANNOT_DETERMINE_TYPE + node.getText() );
}
try {
LiteralType literalType = ( LiteralType ) type;
LiteralType literalType = (LiteralType) type;
Dialect dialect = walker.getSessionFactoryHelper().getFactory().getDialect();
//noinspection unchecked
node.setText( literalType.objectToSQLString( value, dialect ) );
}
catch ( Exception e ) {
catch (Exception e) {
throw new QueryException( QueryTranslator.ERROR_CANNOT_FORMAT_LITERAL + node.getText(), e );
}
node.setDataType( type );
@ -188,22 +202,22 @@ public class LiteralProcessor implements HqlSqlTokenTypes {
public void processBoolean(AST constant) {
// TODO: something much better - look at the type of the other expression!
// TODO: Have comparisonExpression and/or arithmeticExpression rules complete the resolution of boolean nodes.
String replacement = ( String ) walker.getTokenReplacements().get( constant.getText() );
String replacement = (String) walker.getTokenReplacements().get( constant.getText() );
if ( replacement != null ) {
constant.setText( replacement );
}
else {
boolean bool = "true".equals( constant.getText().toLowerCase() );
Dialect dialect = walker.getSessionFactoryHelper().getFactory().getDialect();
constant.setText( dialect.toBooleanValueString(bool) );
constant.setText( dialect.toBooleanValueString( bool ) );
}
}
private void processLiteral(AST constant) {
String replacement = ( String ) walker.getTokenReplacements().get( constant.getText() );
String replacement = (String) walker.getTokenReplacements().get( constant.getText() );
if ( replacement != null ) {
if ( LOG.isDebugEnabled() ) {
LOG.debugf("processConstant() : Replacing '%s' with '%s'", constant.getText(), replacement);
LOG.debugf( "processConstant() : Replacing '%s' with '%s'", constant.getText(), replacement );
}
constant.setText( replacement );
}
@ -214,11 +228,15 @@ public class LiteralProcessor implements HqlSqlTokenTypes {
|| literal.getType() == NUM_LONG
|| literal.getType() == NUM_BIG_INTEGER ) {
literal.setText( determineIntegerRepresentation( literal.getText(), literal.getType() ) );
} else if (literal.getType() == NUM_FLOAT
}
else if ( literal.getType() == NUM_FLOAT
|| literal.getType() == NUM_DOUBLE
|| literal.getType() == NUM_BIG_DECIMAL ) {
literal.setText( determineDecimalRepresentation( literal.getText(), literal.getType() ) );
} else LOG.unexpectedLiteralTokenType(literal.getType());
}
else {
LOG.unexpectedLiteralTokenType( literal.getType() );
}
}
private String determineIntegerRepresentation(String text, int type) {
@ -234,10 +252,11 @@ public class LiteralProcessor implements HqlSqlTokenTypes {
try {
return Integer.valueOf( text ).toString();
}
catch( NumberFormatException e ) {
catch (NumberFormatException e) {
LOG.tracev(
"Could not format incoming text [{0}] as a NUM_INT; assuming numeric overflow and attempting as NUM_LONG",
text );
text
);
}
}
String literalValue = text;
@ -246,7 +265,7 @@ public class LiteralProcessor implements HqlSqlTokenTypes {
}
return Long.valueOf( literalValue ).toString();
}
catch( Throwable t ) {
catch (Throwable t) {
throw new HibernateException( "Could not parse literal [" + text + "] as integer", t );
}
}
@ -273,7 +292,7 @@ public class LiteralProcessor implements HqlSqlTokenTypes {
try {
number = new BigDecimal( literalValue );
}
catch( Throwable t ) {
catch (Throwable t) {
throw new HibernateException( "Could not parse literal [" + text + "] as big-decimal", t );
}
@ -308,8 +327,11 @@ public class LiteralProcessor implements HqlSqlTokenTypes {
jdkFormatter.setMaximumFractionDigits( Integer.MAX_VALUE );
return jdkFormatter.format( number );
}
catch( Throwable t ) {
throw new HibernateException( "Unable to format decimal literal in approximate format [" + number.toString() + "]", t );
catch (Throwable t) {
throw new HibernateException(
"Unable to format decimal literal in approximate format [" + number.toString() + "]",
t
);
}
}
}
@ -329,7 +351,7 @@ public class LiteralProcessor implements HqlSqlTokenTypes {
* Indicates that Float and Double literal values should
* be treated using the SQL "approximate" format (i.e., '1E-3')
*/
@SuppressWarnings( {"UnusedDeclaration"})
@SuppressWarnings({"UnusedDeclaration"})
APPROXIMATE {
@Override
public DecimalFormatter getFormatter() {

View File

@ -33,6 +33,7 @@ import org.hibernate.hql.internal.ast.tree.Node;
import org.hibernate.hql.internal.ast.tree.QueryNode;
import org.hibernate.hql.internal.ast.tree.RestrictableStatement;
import org.hibernate.hql.internal.ast.tree.SqlFragment;
import org.hibernate.internal.CoreLogging;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.param.CollectionFilterKeyParameterSpecification;
@ -40,8 +41,6 @@ import org.hibernate.persister.entity.Queryable;
import org.hibernate.sql.JoinFragment;
import org.hibernate.type.Type;
import org.jboss.logging.Logger;
import antlr.collections.AST;
/**
@ -50,8 +49,7 @@ import antlr.collections.AST;
* @author josh
*/
public class SyntheticAndFactory implements HqlSqlTokenTypes {
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, SyntheticAndFactory.class.getName());
private static final CoreMessageLogger LOG = CoreLogging.messageLogger( SyntheticAndFactory.class );
private HqlSqlWalker hqlSqlWalker;
private AST thetaJoins;
@ -62,7 +60,7 @@ public class SyntheticAndFactory implements HqlSqlTokenTypes {
}
private Node create(int tokenType, String text) {
return ( Node ) ASTUtil.create( hqlSqlWalker.getASTFactory(), tokenType, text );
return (Node) hqlSqlWalker.getASTFactory().create( tokenType, text );
}
public void addWhereFragment(
@ -92,7 +90,7 @@ public class SyntheticAndFactory implements HqlSqlTokenTypes {
LOG.debugf( "Using unprocessed WHERE-fragment [%s]", whereFragment );
SqlFragment fragment = ( SqlFragment ) create( SQL_TOKEN, whereFragment );
SqlFragment fragment = (SqlFragment) create( SQL_TOKEN, whereFragment );
fragment.setJoinFragment( joinFragment );
fragment.setFromElement( fromElement );
@ -148,7 +146,7 @@ public class SyntheticAndFactory implements HqlSqlTokenTypes {
// Create a new THETA_JOINS node as a parent of all filters
thetaJoins = create( THETA_JOINS, "{theta joins}" );
// Put the THETA_JOINS node before the HQL condition, after the filters.
if (filters==null) {
if ( filters == null ) {
ASTUtil.insertChild( where, thetaJoins );
}
else {
@ -157,7 +155,7 @@ public class SyntheticAndFactory implements HqlSqlTokenTypes {
}
// add the current fragment to the THETA_JOINS node
thetaJoins.addChild(fragment);
thetaJoins.addChild( fragment );
}
}
@ -177,7 +175,11 @@ public class SyntheticAndFactory implements HqlSqlTokenTypes {
// Need to parse off the column qualifiers; this is assuming (which is true as of now)
// that this is only used from update and delete HQL statement parsing
whereFragment = StringHelper.replace( whereFragment, persister.generateFilterConditionAlias( alias ) + ".", "" );
whereFragment = StringHelper.replace(
whereFragment,
persister.generateFilterConditionAlias( alias ) + ".",
""
);
// Note: this simply constructs a "raw" SQL_TOKEN representing the
// where fragment and injects this into the tree. This "works";
@ -186,7 +188,7 @@ public class SyntheticAndFactory implements HqlSqlTokenTypes {
// At some point we probably want to apply an additional grammar to
// properly tokenize this where fragment into constituent parts
// focused on the operators embedded within the fragment.
SqlFragment discrimNode = ( SqlFragment ) create( SQL_TOKEN, whereFragment );
SqlFragment discrimNode = (SqlFragment) create( SQL_TOKEN, whereFragment );
JoinProcessor.processDynamicFilterParameters(
whereFragment,