Performance work
This commit is contained in:
parent
567eb38069
commit
611cdceeb2
|
@ -0,0 +1,97 @@
|
|||
= JBoss Logging ValidIdRange Mapping
|
||||
|
||||
[width="50%",cols=">s,>s,^2m",options="header"]
|
||||
|===
|
||||
|`ValidIdRange#min`
|
||||
|`ValidIdRange#max`
|
||||
|Logger
|
||||
|
||||
|1
|
||||
|10000
|
||||
|org.hibernate.internal.CoreMessageLogger
|
||||
|
||||
|10001
|
||||
|15000
|
||||
|org.hibernate.c3p0.internal.C3P0MessageLogger (extends ConnectionPoolingLogger)
|
||||
|
||||
|15000
|
||||
|20000
|
||||
|org.hibernate.internal.EntityManagerMessageLogger
|
||||
|
||||
|20001
|
||||
|25000
|
||||
|org.hibernate.cache.ehcache.EhCacheMessageLogger (extends CoreMessageLogger)
|
||||
|
||||
|25001
|
||||
|30000
|
||||
|org.hibernate.envers.internal.EnversMessageLogger
|
||||
|
||||
|25001
|
||||
|30000
|
||||
|org.hibernate.cache.infinispan.util.InfinispanMessageLogger
|
||||
|
||||
|30001
|
||||
|35000
|
||||
|org.hibernate.proxool.internal.ProxoolMessageLogger (extends ConnectionPoolingLogger)
|
||||
|
||||
|10000001
|
||||
|10001000
|
||||
|org.hibernate.internal.log.UrlMessageBundle
|
||||
|
||||
|10001001
|
||||
|10001500
|
||||
|org.hibernate.internal.log.ConnectionPoolingLogger
|
||||
|
||||
|10005001
|
||||
|10010000
|
||||
|org.hibernate.resource.cdi.internal.CdiMessageLogger
|
||||
|
||||
|80000001
|
||||
|80001000
|
||||
|org.hibernate.spatial.HSMessageLogger
|
||||
|
||||
|90000001
|
||||
|90001000
|
||||
|org.hibernate.internal.log.DeprecationLogger
|
||||
|
||||
|90001001
|
||||
|90002000
|
||||
|org.hibernate.cache.spi.SecondLevelCacheLogger
|
||||
|
||||
|90002001
|
||||
|90003000
|
||||
|org.hibernate.internal.log.UnsupportedLogger
|
||||
|
||||
|90003001
|
||||
|90003500
|
||||
|org.hibernate.query.spi.QueryLogger
|
||||
|
||||
|90003501
|
||||
|90004000
|
||||
|org.hibernate.query.hql.HqlLogger
|
||||
|
||||
|90004001
|
||||
|90005000
|
||||
|org.hibernate.sql.exec.SqlExecLogger
|
||||
|
||||
|90005001
|
||||
|90005100
|
||||
|org.hibernate.sql.results.SqlResultsLogger
|
||||
|
||||
|90005101
|
||||
|90005200
|
||||
|org.hibernate.sql.results.internal.domain.collection.CollectionLoadingLogger
|
||||
|
||||
|90005201
|
||||
|90005300
|
||||
|org.hibernate.sql.results.internal.domain.entity.EntityLoadingLogger
|
||||
|
||||
|90005301
|
||||
|90005400
|
||||
|org.hibernate.sql.results.internal.domain.embedded.CompositeLoadingLogger
|
||||
|
||||
|90005401
|
||||
|90005500
|
||||
|org.hibernate.sql.ast.tree.SqlAstTreeLogger
|
||||
|
||||
|===
|
|
@ -93,12 +93,14 @@ pathRoot
|
|||
;
|
||||
|
||||
/**
|
||||
* Rule for dotIdentifierSequence where we expect an entity-name. The extra
|
||||
* "rule layer" allows the walker to specially handle such a case (to use a special
|
||||
* org.hibernate.query.hql.DotIdentifierConsumer, etc)
|
||||
* Specialized dotIdentifierSequence for cases where we expect an entity-name. We handle it specially
|
||||
* for the sake of performance. Specifically we concatenate together the entity name as we walk the
|
||||
* parse tree. Relying on the `EntiytNameContext#getText` or `DotIdentifierSequenceContext#getText`
|
||||
* performs walk to determine the name.
|
||||
*/
|
||||
entityName
|
||||
: dotIdentifierSequence
|
||||
returns [String fullNameText]
|
||||
: (i=identifier { $fullNameText = _localctx.i.getText(); }) (DOT c=identifier { $fullNameText += ("." + _localctx.c.getText() ); })*
|
||||
;
|
||||
|
||||
identificationVariableDef
|
||||
|
@ -968,7 +970,7 @@ identifier
|
|||
| WITH
|
||||
| YEAR
|
||||
| trigFunctionName) {
|
||||
logUseOfReservedWordAsIdentifier(getCurrentToken());
|
||||
logUseOfReservedWordAsIdentifier( getCurrentToken() );
|
||||
}
|
||||
;
|
||||
|
||||
|
|
|
@ -101,14 +101,24 @@ public class ExportableColumn extends Column {
|
|||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean[] getColumnInsertability() {
|
||||
return new boolean[] { true };
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasAnyInsertableColumns() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean[] getColumnUpdateability() {
|
||||
return new boolean[] { true };
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean[] getColumnInsertability() {
|
||||
return new boolean[] { true };
|
||||
public boolean hasAnyUpdatableColumns() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -656,14 +656,26 @@ public abstract class Collection implements Fetchable, Value, Filterable {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean[] getColumnInsertability() {
|
||||
return ArrayHelper.EMPTY_BOOLEAN_ARRAY;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasAnyInsertableColumns() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean[] getColumnUpdateability() {
|
||||
return ArrayHelper.EMPTY_BOOLEAN_ARRAY;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasAnyUpdatableColumns() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public boolean isSubselectLoadable() {
|
||||
return subselectLoadable;
|
||||
}
|
||||
|
|
|
@ -295,6 +295,18 @@ public class Component extends SimpleValue implements MetaAttributable {
|
|||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasAnyInsertableColumns() {
|
||||
for ( int i = 0; i < properties.size(); i++ ) {
|
||||
final Property property = properties.get( i );
|
||||
if ( property.getValue().hasAnyInsertableColumns() ) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean[] getColumnUpdateability() {
|
||||
boolean[] result = new boolean[ getColumnSpan() ];
|
||||
|
@ -311,6 +323,18 @@ public class Component extends SimpleValue implements MetaAttributable {
|
|||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasAnyUpdatableColumns() {
|
||||
for ( int i = 0; i < properties.size(); i++ ) {
|
||||
final Property property = properties.get( i );
|
||||
if ( property.getValue().hasAnyUpdatableColumns() ) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean isKey() {
|
||||
return isKey;
|
||||
}
|
||||
|
|
|
@ -149,11 +149,21 @@ public class OneToMany implements Value {
|
|||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasAnyInsertableColumns() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean[] getColumnUpdateability() {
|
||||
//TODO: we could just return all false...
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasAnyUpdatableColumns() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean isIgnoreNotFound() {
|
||||
return ignoreNotFound;
|
||||
}
|
||||
|
|
|
@ -163,17 +163,13 @@ public class Property implements Serializable, MetaAttributable {
|
|||
public boolean isUpdateable() {
|
||||
// if the property mapping consists of all formulas,
|
||||
// make it non-updateable
|
||||
return updateable && !ArrayHelper.isAllFalse( value.getColumnUpdateability() );
|
||||
return updateable && value.hasAnyUpdatableColumns();
|
||||
}
|
||||
|
||||
public boolean isInsertable() {
|
||||
// if the property mapping consists of all formulas,
|
||||
// make it non-insertable
|
||||
final boolean[] columnInsertability = value.getColumnInsertability();
|
||||
return insertable && (
|
||||
columnInsertability.length==0 ||
|
||||
!ArrayHelper.isAllFalse( columnInsertability )
|
||||
);
|
||||
return insertable && value.hasAnyInsertableColumns();
|
||||
}
|
||||
|
||||
public ValueGeneration getValueGenerationStrategy() {
|
||||
|
|
|
@ -41,6 +41,7 @@ import org.hibernate.id.factory.IdentifierGeneratorFactory;
|
|||
import org.hibernate.internal.CoreLogging;
|
||||
import org.hibernate.internal.CoreMessageLogger;
|
||||
import org.hibernate.internal.util.ReflectHelper;
|
||||
import org.hibernate.internal.util.collections.ArrayHelper;
|
||||
import org.hibernate.metamodel.model.convert.spi.JpaAttributeConverter;
|
||||
import org.hibernate.resource.beans.spi.ManagedBeanRegistry;
|
||||
import org.hibernate.service.ServiceRegistry;
|
||||
|
@ -707,15 +708,39 @@ public abstract class SimpleValue implements KeyValue {
|
|||
public Object accept(ValueVisitor visitor) {
|
||||
return visitor.accept(this);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public boolean[] getColumnInsertability() {
|
||||
return extractBooleansFromList( insertability );
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public boolean hasAnyInsertableColumns() {
|
||||
for ( Boolean val : insertability ) {
|
||||
if ( val ) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean[] getColumnUpdateability() {
|
||||
return extractBooleansFromList( updatability );
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasAnyUpdatableColumns() {
|
||||
for ( Boolean val : updatability ) {
|
||||
if ( val ) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private static boolean[] extractBooleansFromList(List<Boolean> list) {
|
||||
final boolean[] array = new boolean[ list.size() ];
|
||||
int i = 0;
|
||||
|
|
|
@ -5,12 +5,12 @@
|
|||
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
||||
*/
|
||||
package org.hibernate.mapping;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Iterator;
|
||||
|
||||
import org.hibernate.FetchMode;
|
||||
import org.hibernate.MappingException;
|
||||
import org.hibernate.boot.spi.MetadataBuildingContext;
|
||||
import org.hibernate.engine.spi.Mapping;
|
||||
import org.hibernate.service.ServiceRegistry;
|
||||
import org.hibernate.type.Type;
|
||||
|
@ -25,22 +25,26 @@ import org.hibernate.type.Type;
|
|||
* @author Gavin King
|
||||
*/
|
||||
public interface Value extends Serializable {
|
||||
public int getColumnSpan();
|
||||
public Iterator<Selectable> getColumnIterator();
|
||||
public Type getType() throws MappingException;
|
||||
public FetchMode getFetchMode();
|
||||
public Table getTable();
|
||||
public boolean hasFormula();
|
||||
public boolean isAlternateUniqueKey();
|
||||
public boolean isNullable();
|
||||
public boolean[] getColumnUpdateability();
|
||||
public boolean[] getColumnInsertability();
|
||||
public void createForeignKey() throws MappingException;
|
||||
public boolean isSimpleValue();
|
||||
public boolean isValid(Mapping mapping) throws MappingException;
|
||||
public void setTypeUsingReflection(String className, String propertyName) throws MappingException;
|
||||
public Object accept(ValueVisitor visitor);
|
||||
public boolean isSame(Value other);
|
||||
int getColumnSpan();
|
||||
Iterator<Selectable> getColumnIterator();
|
||||
Type getType() throws MappingException;
|
||||
FetchMode getFetchMode();
|
||||
Table getTable();
|
||||
boolean hasFormula();
|
||||
boolean isAlternateUniqueKey();
|
||||
boolean isNullable();
|
||||
void createForeignKey() throws MappingException;
|
||||
boolean isSimpleValue();
|
||||
boolean isValid(Mapping mapping) throws MappingException;
|
||||
void setTypeUsingReflection(String className, String propertyName) throws MappingException;
|
||||
Object accept(ValueVisitor visitor);
|
||||
boolean isSame(Value other);
|
||||
|
||||
boolean[] getColumnInsertability();
|
||||
boolean hasAnyInsertableColumns();
|
||||
|
||||
boolean[] getColumnUpdateability();
|
||||
boolean hasAnyUpdatableColumns();
|
||||
|
||||
ServiceRegistry getServiceRegistry();
|
||||
}
|
||||
|
|
|
@ -115,11 +115,15 @@ public class BasicValuedSingularAttributeMapping extends AbstractSingularAttribu
|
|||
getContainingTableExpression(),
|
||||
getMappedColumnExpression()
|
||||
),
|
||||
sqlAstProcessingState -> new ColumnReference(
|
||||
sqlAstProcessingState -> tableGroup.resolveColumnReference(
|
||||
getContainingTableExpression(),
|
||||
getMappedColumnExpression(),
|
||||
tableGroup.resolveTableReference( getContainingTableExpression() ).getIdentificationVariable(),
|
||||
jdbcMapping,
|
||||
creationState.getSqlAstCreationState().getCreationContext().getSessionFactory()
|
||||
() -> new ColumnReference(
|
||||
getMappedColumnExpression(),
|
||||
tableGroup.resolveTableReference( getContainingTableExpression() ).getIdentificationVariable(),
|
||||
jdbcMapping,
|
||||
creationState.getSqlAstCreationState().getCreationContext().getSessionFactory()
|
||||
)
|
||||
)
|
||||
),
|
||||
valueConverter == null ? getMappedTypeDescriptor().getMappedJavaTypeDescriptor() : valueConverter.getRelationalJavaDescriptor(),
|
||||
|
|
|
@ -42,7 +42,7 @@ public class NavigablePath implements DotIdentifierSequence {
|
|||
final String parentFullPath = parent.getFullPath();
|
||||
this.fullPath = StringHelper.isEmpty( parentFullPath )
|
||||
? navigableName
|
||||
: parentFullPath + '.' + navigableName;
|
||||
: parentFullPath + "." + navigableName;
|
||||
}
|
||||
else {
|
||||
this.fullPath = navigableName;
|
||||
|
@ -59,7 +59,7 @@ public class NavigablePath implements DotIdentifierSequence {
|
|||
public NavigablePath(String rootName, String alias) {
|
||||
this.parent = null;
|
||||
|
||||
this.fullPath = alias == null ? rootName : rootName + '(' + alias + ')';
|
||||
this.fullPath = alias == null ? rootName : rootName + "(" + alias + ")";
|
||||
|
||||
this.hashCode = fullPath.hashCode();
|
||||
}
|
||||
|
@ -90,7 +90,7 @@ public class NavigablePath implements DotIdentifierSequence {
|
|||
|
||||
@Override
|
||||
public String toString() {
|
||||
return getClass().getSimpleName() + '[' + fullPath + ']';
|
||||
return getClass().getSimpleName() + "[" + fullPath + "]";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -22,7 +22,7 @@ import static org.jboss.logging.Logger.Level.ERROR;
|
|||
* @author Steve Ebersole
|
||||
*/
|
||||
@MessageLogger( projectCode = "HHH" )
|
||||
@ValidIdRange( min = 90003001, max = 90004000 )
|
||||
@ValidIdRange( min = 90003001, max = 90003500 )
|
||||
public interface QueryLogger extends BasicLogger {
|
||||
String LOGGER_NAME = "org.hibernate.orm.query";
|
||||
|
||||
|
|
|
@ -0,0 +1,50 @@
|
|||
/*
|
||||
* Hibernate, Relational Persistence for Idiomatic Java
|
||||
*
|
||||
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
|
||||
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
|
||||
*/
|
||||
package org.hibernate.query.hql;
|
||||
|
||||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.query.QueryLogger;
|
||||
|
||||
import org.jboss.logging.BasicLogger;
|
||||
import org.jboss.logging.Logger;
|
||||
import org.jboss.logging.annotations.Cause;
|
||||
import org.jboss.logging.annotations.LogMessage;
|
||||
import org.jboss.logging.annotations.Message;
|
||||
import org.jboss.logging.annotations.MessageLogger;
|
||||
import org.jboss.logging.annotations.ValidIdRange;
|
||||
|
||||
import static org.jboss.logging.Logger.Level.ERROR;
|
||||
|
||||
/**
|
||||
* @author Steve Ebersole
|
||||
*/
|
||||
@MessageLogger( projectCode = "HHH" )
|
||||
@ValidIdRange( min = 90003501, max = 90004000 )
|
||||
public interface HqlLogger extends BasicLogger {
|
||||
String LOGGER_NAME = QueryLogger.subLoggerName( "hql" );
|
||||
|
||||
HqlLogger QUERY_LOGGER = Logger.getMessageLogger( HqlLogger.class, LOGGER_NAME );
|
||||
|
||||
boolean TRACE_ENABLED = QUERY_LOGGER.isTraceEnabled();
|
||||
boolean DEBUG_ENABLED = QUERY_LOGGER.isDebugEnabled();
|
||||
|
||||
static String subLoggerName(String subName) {
|
||||
return LOGGER_NAME + '.' + subName;
|
||||
}
|
||||
|
||||
static Logger subLogger(String subName) {
|
||||
return Logger.getLogger( subLoggerName( subName ) );
|
||||
}
|
||||
|
||||
static <T> T subLogger(String subName, Class<T> loggerJavaType) {
|
||||
return Logger.getMessageLogger( loggerJavaType, subLoggerName( subName ) );
|
||||
}
|
||||
|
||||
@LogMessage(level = ERROR)
|
||||
@Message(value = "Error in named query: %s", id = 90003501)
|
||||
void namedQueryError(String queryName, @Cause HibernateException e);
|
||||
}
|
|
@ -6,7 +6,7 @@
|
|||
*/
|
||||
package org.hibernate.query.hql.internal;
|
||||
|
||||
import java.util.Arrays;
|
||||
import org.hibernate.query.hql.HqlLogger;
|
||||
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
|
@ -20,7 +20,8 @@ import org.antlr.v4.runtime.Token;
|
|||
* @author Steve Ebersole
|
||||
*/
|
||||
public class HqlParseTreeBuilder {
|
||||
private static final Logger log = Logger.getLogger( HqlParseTreeBuilder.class );
|
||||
private static final Logger LOGGER = HqlLogger.subLogger( "reservedWordAsIdentifier" );
|
||||
private static final boolean DEBUG_ENABLED = LOGGER.isDebugEnabled();
|
||||
|
||||
/**
|
||||
* Singleton access
|
||||
|
@ -35,7 +36,9 @@ public class HqlParseTreeBuilder {
|
|||
return new HqlParser( new CommonTokenStream( hqlLexer ) ) {
|
||||
@Override
|
||||
protected void logUseOfReservedWordAsIdentifier(Token token) {
|
||||
log.debugf( "Encountered use of reserved word as identifier : " + token.getText() );
|
||||
if ( DEBUG_ENABLED ) {
|
||||
LOGGER.debugf( "Encountered use of reserved word as identifier : %s", token.getText() );
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
package org.hibernate.query.hql.internal;
|
||||
|
||||
import org.hibernate.query.QueryLogger;
|
||||
import org.hibernate.query.hql.HqlLogger;
|
||||
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
|
@ -19,7 +20,7 @@ import org.antlr.v4.runtime.tree.ParseTreeWalker;
|
|||
*/
|
||||
@SuppressWarnings("WeakerAccess")
|
||||
public class HqlParseTreePrinter extends HqlParserBaseListener {
|
||||
private static final Logger HQL_LOGGER = QueryLogger.subLogger( "hql.parseTree" );
|
||||
private static final Logger HQL_LOGGER = HqlLogger.subLogger( "ast" );
|
||||
private static final boolean LOG_DEBUG_ENABLED = HQL_LOGGER.isDebugEnabled();
|
||||
|
||||
public static void logStatementParseTree(HqlParser parser) {
|
||||
|
|
|
@ -35,11 +35,11 @@ import org.hibernate.metamodel.model.domain.PluralPersistentAttribute;
|
|||
import org.hibernate.query.BinaryArithmeticOperator;
|
||||
import org.hibernate.query.ComparisonOperator;
|
||||
import org.hibernate.query.PathException;
|
||||
import org.hibernate.query.QueryLogger;
|
||||
import org.hibernate.query.SemanticException;
|
||||
import org.hibernate.query.TrimSpec;
|
||||
import org.hibernate.query.UnaryArithmeticOperator;
|
||||
import org.hibernate.query.hql.HqlInterpretationException;
|
||||
import org.hibernate.query.hql.HqlLogger;
|
||||
import org.hibernate.query.hql.spi.DotIdentifierConsumer;
|
||||
import org.hibernate.query.hql.spi.SemanticPathPart;
|
||||
import org.hibernate.query.hql.spi.SqmCreationOptions;
|
||||
|
@ -665,7 +665,7 @@ public class SemanticQueryBuilder extends HqlParserBaseVisitor implements SqmCre
|
|||
}
|
||||
if ( sortExpression instanceof SqmLiteral
|
||||
|| sortExpression instanceof SqmParameter ) {
|
||||
QueryLogger.QUERY_LOGGER.debugf( "Questionable sorting by constant value : %s", sortExpression );
|
||||
HqlLogger.QUERY_LOGGER.debugf( "Questionable sorting by constant value : %s", sortExpression );
|
||||
}
|
||||
|
||||
final String collation;
|
||||
|
@ -782,7 +782,7 @@ public class SemanticQueryBuilder extends HqlParserBaseVisitor implements SqmCre
|
|||
|
||||
@Override
|
||||
public EntityDomainType<?> visitEntityName(HqlParser.EntityNameContext parserEntityName) {
|
||||
final String entityName = parserEntityName.dotIdentifierSequence().getText();
|
||||
final String entityName = parserEntityName.fullNameText;
|
||||
final EntityDomainType entityReference = resolveEntityReference( entityName );
|
||||
if ( entityReference == null ) {
|
||||
throw new UnknownEntityException( "Could not resolve entity name [" + entityName + "] as DML target", entityName );
|
||||
|
@ -844,7 +844,7 @@ public class SemanticQueryBuilder extends HqlParserBaseVisitor implements SqmCre
|
|||
|
||||
@Override
|
||||
public SqmRoot visitPathRoot(HqlParser.PathRootContext ctx) {
|
||||
final String name = ctx.entityName().getText();
|
||||
final String name = ctx.entityName().fullNameText;
|
||||
|
||||
log.debugf( "Handling root path - %s", name );
|
||||
|
||||
|
@ -922,7 +922,7 @@ public class SemanticQueryBuilder extends HqlParserBaseVisitor implements SqmCre
|
|||
|
||||
@SuppressWarnings("unchecked")
|
||||
private void consumeCrossJoin(HqlParser.CrossJoinContext parserJoin, SqmRoot sqmRoot) {
|
||||
final String name = parserJoin.pathRoot().entityName().getText();
|
||||
final String name = parserJoin.pathRoot().entityName().fullNameText;
|
||||
|
||||
SqmTreeCreationLogger.LOGGER.debugf( "Handling root path - %s", name );
|
||||
|
||||
|
@ -954,21 +954,21 @@ public class SemanticQueryBuilder extends HqlParserBaseVisitor implements SqmCre
|
|||
protected void consumeQualifiedJoin(HqlParser.QualifiedJoinContext parserJoin, SqmRoot<?> sqmRoot) {
|
||||
final SqmJoinType joinType;
|
||||
final HqlParser.JoinTypeQualifierContext joinTypeQualifier = parserJoin.joinTypeQualifier();
|
||||
if ( joinTypeQualifier.OUTER() != null ) {
|
||||
// for outer joins, only left outer joins are currently supported
|
||||
if ( joinTypeQualifier.FULL() != null ) {
|
||||
throw new SemanticException( "FULL OUTER joins are not yet supported : " + parserJoin.getText() );
|
||||
}
|
||||
if ( joinTypeQualifier.RIGHT() != null ) {
|
||||
throw new SemanticException( "RIGHT OUTER joins are not yet supported : " + parserJoin.getText() );
|
||||
}
|
||||
|
||||
if ( joinTypeQualifier.FULL() != null ) {
|
||||
throw new SemanticException( "FULL OUTER joins are not yet supported : " + parserJoin.getText() );
|
||||
}
|
||||
else if ( joinTypeQualifier.RIGHT() != null ) {
|
||||
throw new SemanticException( "RIGHT OUTER joins are not yet supported : " + parserJoin.getText() );
|
||||
}
|
||||
else if ( joinTypeQualifier.OUTER() != null || joinTypeQualifier.LEFT() != null ) {
|
||||
joinType = SqmJoinType.LEFT;
|
||||
}
|
||||
else {
|
||||
joinType = SqmJoinType.INNER;
|
||||
}
|
||||
|
||||
|
||||
final String alias = visitIdentificationVariableDef( parserJoin.qualifiedJoinRhs().identificationVariableDef() );
|
||||
|
||||
dotIdentifierConsumerStack.push(
|
||||
|
|
|
@ -13,12 +13,12 @@ import java.util.Map;
|
|||
import java.util.function.Function;
|
||||
|
||||
import org.hibernate.query.NavigablePath;
|
||||
import org.hibernate.query.QueryLogger;
|
||||
import org.hibernate.query.hql.HqlLogger;
|
||||
import org.hibernate.query.hql.spi.SqmCreationProcessingState;
|
||||
import org.hibernate.query.hql.spi.SqmPathRegistry;
|
||||
import org.hibernate.query.sqm.AliasCollisionException;
|
||||
import org.hibernate.query.sqm.ParsingException;
|
||||
import org.hibernate.query.sqm.SqmPathSource;
|
||||
import org.hibernate.query.hql.spi.SqmCreationProcessingState;
|
||||
import org.hibernate.query.sqm.SqmTreeCreationLogger;
|
||||
import org.hibernate.query.sqm.tree.domain.SqmPath;
|
||||
import org.hibernate.query.sqm.tree.from.SqmFrom;
|
||||
|
@ -178,7 +178,7 @@ public class SqmPathRegistryImpl implements SqmPathRegistry {
|
|||
|
||||
if ( found == null ) {
|
||||
if ( associatedProcessingState.getParentProcessingState() != null ) {
|
||||
QueryLogger.QUERY_LOGGER.debugf(
|
||||
HqlLogger.QUERY_LOGGER.debugf(
|
||||
"Unable to resolve unqualified attribute [%s] in local from-clause; checking parent ",
|
||||
navigableName
|
||||
);
|
||||
|
|
|
@ -32,8 +32,6 @@ public class StandardHqlTranslator implements HqlTranslator {
|
|||
|
||||
@Override
|
||||
public SqmStatement interpret(String query) {
|
||||
// final ParsingContext parsingContext = ;
|
||||
|
||||
// first, ask Antlr to build the parse tree
|
||||
final HqlParser parser = HqlParseTreeBuilder.INSTANCE.parseHql( query );
|
||||
|
||||
|
@ -48,6 +46,7 @@ public class StandardHqlTranslator implements HqlTranslator {
|
|||
sqmCreationContext
|
||||
);
|
||||
|
||||
// Log the SQM tree (if enabled)
|
||||
SqmTreePrinter.logTree( sqmStatement );
|
||||
|
||||
return sqmStatement;
|
||||
|
|
|
@ -16,10 +16,10 @@ import java.util.TreeMap;
|
|||
|
||||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.internal.util.collections.CollectionHelper;
|
||||
import org.hibernate.query.QueryLogger;
|
||||
import org.hibernate.query.internal.QueryParameterNamedImpl;
|
||||
import org.hibernate.query.internal.QueryParameterPositionalImpl;
|
||||
import org.hibernate.query.spi.QueryParameterImplementor;
|
||||
import org.hibernate.query.sqm.SqmTreeTransformationLogger;
|
||||
import org.hibernate.query.sqm.tree.SqmStatement;
|
||||
import org.hibernate.query.sqm.tree.expression.JpaCriteriaParameter;
|
||||
import org.hibernate.query.sqm.tree.expression.SqmJpaCriteriaParameterWrapper;
|
||||
|
@ -30,10 +30,6 @@ import org.hibernate.query.sqm.tree.expression.SqmPositionalParameter;
|
|||
/**
|
||||
* Maintains a cross-reference between SqmParameter and QueryParameter references.
|
||||
*
|
||||
* @apiNote The difference between {@link #addCriteriaAdjustment} and {@link #addExpansion}
|
||||
* is the durability of given parameter. A Criteria-adjustment lives beyond
|
||||
* {@link #clearExpansions()} while an expansion does not.
|
||||
*
|
||||
* @author Steve Ebersole
|
||||
*/
|
||||
@SuppressWarnings("WeakerAccess")
|
||||
|
@ -124,7 +120,7 @@ public class DomainParameterXref {
|
|||
|
||||
if ( ! sqmParameter.allowMultiValuedBinding() ) {
|
||||
if ( queryParameter.allowsMultiValuedBinding() ) {
|
||||
QueryLogger.QUERY_LOGGER.debugf(
|
||||
SqmTreeTransformationLogger.LOGGER.debugf(
|
||||
"SqmParameter [%s] does not allow multi-valued binding, " +
|
||||
"but mapped to existing QueryParameter [%s] that does - " +
|
||||
"disallowing multi-valued binding" ,
|
||||
|
@ -190,6 +186,19 @@ public class DomainParameterXref {
|
|||
return sqmParamsByQueryParam.size();
|
||||
}
|
||||
|
||||
public int getSqmParameterCount() {
|
||||
return queryParamBySqmParam.size();
|
||||
}
|
||||
|
||||
public int getNumberOfSqmParameters(QueryParameterImplementor<?> queryParameter) {
|
||||
final List<SqmParameter> sqmParameters = sqmParamsByQueryParam.get( queryParameter );
|
||||
if ( sqmParameters == null ) {
|
||||
// this should maybe be an exception instead
|
||||
return 0;
|
||||
}
|
||||
return sqmParameters.size();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the mapping of all QueryParameters to the List of its corresponding
|
||||
* SqmParameters
|
||||
|
@ -213,20 +222,11 @@ public class DomainParameterXref {
|
|||
return queryParamBySqmParam.get( sqmParameter );
|
||||
}
|
||||
|
||||
public void addCriteriaAdjustment(
|
||||
QueryParameterImplementor<?> domainParam,
|
||||
JpaCriteriaParameter originalSqmParameter,
|
||||
SqmParameter adjustment) {
|
||||
QueryLogger.QUERY_LOGGER.debugf( "Adding JPA-param xref adjustment : %s", originalSqmParameter );
|
||||
sqmParamsByQueryParam.get( domainParam ).add( adjustment );
|
||||
queryParamBySqmParam.put( adjustment, domainParam );
|
||||
}
|
||||
|
||||
public void addExpansion(
|
||||
QueryParameterImplementor<?> domainParam,
|
||||
SqmParameter originalSqmParameter,
|
||||
SqmParameter expansion) {
|
||||
QueryLogger.QUERY_LOGGER.debugf( "Adding domain-param xref expansion : %s", originalSqmParameter );
|
||||
SqmTreeTransformationLogger.LOGGER.debugf( "Adding domain-param xref expansion : %s", originalSqmParameter );
|
||||
queryParamBySqmParam.put( expansion, domainParam );
|
||||
|
||||
if ( expansions == null ) {
|
||||
|
|
|
@ -97,10 +97,14 @@ import org.jboss.logging.Logger;
|
|||
public class SqmTreePrinter implements SemanticQueryWalker<Object> {
|
||||
private static final Logger log = Logger.getLogger( SqmTreePrinter.class );
|
||||
|
||||
private static final Logger LOGGER = QueryLogger.subLogger( "sqm.sqmTree" );
|
||||
private static final Logger LOGGER = QueryLogger.subLogger( "sqm.ast" );
|
||||
private static final boolean DEBUG_ENABLED = LOGGER.isDebugEnabled();
|
||||
|
||||
public static void logTree(SqmStatement sqmStatement) {
|
||||
if ( ! DEBUG_ENABLED ) {
|
||||
return;
|
||||
}
|
||||
|
||||
final SqmTreePrinter printer = new SqmTreePrinter();
|
||||
|
||||
if ( sqmStatement instanceof SqmSelectStatement ) {
|
||||
|
|
|
@ -17,7 +17,6 @@ import java.util.function.Consumer;
|
|||
|
||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||
import org.hibernate.engine.spi.SharedSessionContractImplementor;
|
||||
import org.hibernate.internal.util.collections.CollectionHelper;
|
||||
import org.hibernate.metamodel.mapping.Bindable;
|
||||
import org.hibernate.metamodel.mapping.JdbcMapping;
|
||||
import org.hibernate.metamodel.mapping.MappingModelExpressable;
|
||||
|
@ -83,18 +82,15 @@ public class SqmUtil {
|
|||
}
|
||||
|
||||
final int queryParameterCount = domainParameterXref.getQueryParameterCount();
|
||||
final Map<QueryParameterImplementor<?>, Map<SqmParameter, List<JdbcParameter>>> result = new IdentityHashMap<>(
|
||||
CollectionHelper.determineProperSizing( queryParameterCount )
|
||||
);
|
||||
final Map<QueryParameterImplementor<?>, Map<SqmParameter, List<JdbcParameter>>> result = new IdentityHashMap<>( queryParameterCount );
|
||||
|
||||
for ( Map.Entry<QueryParameterImplementor<?>, List<SqmParameter>> entry :
|
||||
domainParameterXref.getSqmParamByQueryParam().entrySet() ) {
|
||||
for ( Map.Entry<QueryParameterImplementor<?>, List<SqmParameter>> entry : domainParameterXref.getSqmParamByQueryParam().entrySet() ) {
|
||||
final QueryParameterImplementor<?> queryParam = entry.getKey();
|
||||
final List<SqmParameter> sqmParams = entry.getValue();
|
||||
|
||||
final Map<SqmParameter, List<JdbcParameter>> sqmParamMap = result.computeIfAbsent(
|
||||
queryParam,
|
||||
qp -> new IdentityHashMap<>()
|
||||
qp -> new IdentityHashMap<>( sqmParams.size() )
|
||||
);
|
||||
|
||||
for ( SqmParameter sqmParam : sqmParams ) {
|
||||
|
@ -159,7 +155,7 @@ public class SqmUtil {
|
|||
Map<QueryParameterImplementor<?>, Map<SqmParameter, List<JdbcParameter>>> jdbcParamXref,
|
||||
SqlAstCreationState sqlAstCreationState,
|
||||
SharedSessionContractImplementor session) {
|
||||
final JdbcParameterBindings jdbcParameterBindings = new JdbcParameterBindingsImpl();
|
||||
final JdbcParameterBindings jdbcParameterBindings = new JdbcParameterBindingsImpl( domainParameterXref );
|
||||
|
||||
for ( Map.Entry<QueryParameterImplementor<?>, List<SqmParameter>> entry :
|
||||
domainParameterXref.getSqmParamByQueryParam().entrySet() ) {
|
||||
|
|
|
@ -50,7 +50,7 @@ public class SqlAliasBaseManager implements SqlAliasBaseGenerator {
|
|||
@Override
|
||||
public String generateNewAlias() {
|
||||
synchronized ( this ) {
|
||||
final String alias = stem + '_' + ( aliasCount++ );
|
||||
final String alias = stem + "_" + ( aliasCount++ );
|
||||
if ( SqlTreeCreationLogger.DEBUG_ENABLED ) {
|
||||
SqlTreeCreationLogger.LOGGER.debugf( "Created new SQL alias : %s", alias );
|
||||
}
|
||||
|
|
|
@ -31,16 +31,21 @@ import org.hibernate.type.spi.TypeConfiguration;
|
|||
* @author Steve Ebersole
|
||||
*/
|
||||
public interface SqlExpressionResolver {
|
||||
/**
|
||||
* Helper for generating an expression key for a column reference.
|
||||
*
|
||||
* @see #resolveSqlExpression
|
||||
*/
|
||||
static String createColumnReferenceKey(String tableExpression, String columnExpression) {
|
||||
return tableExpression + columnExpression;
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a qualifier + a qualifiable SqlExpressable, resolve the
|
||||
* (Sql)Expression reference.
|
||||
*/
|
||||
Expression resolveSqlExpression(String key, Function<SqlAstProcessingState,Expression> creator);
|
||||
|
||||
static String createColumnReferenceKey(String tableExpression, String columnExpression) {
|
||||
return tableExpression + '.' + columnExpression;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve the SqlSelection for the given expression
|
||||
*/
|
||||
|
|
|
@ -80,10 +80,16 @@ public class SqlAstProcessingStateImpl implements SqlAstProcessingState, SqlExpr
|
|||
public Expression resolveSqlExpression(
|
||||
String key,
|
||||
Function<SqlAstProcessingState,Expression> creator) {
|
||||
final Expression expression = expressionMap.computeIfAbsent(
|
||||
key,
|
||||
s -> creator.apply( this )
|
||||
);
|
||||
final Expression existing = expressionMap.get( key );
|
||||
|
||||
final Expression expression;
|
||||
if ( existing != null ) {
|
||||
expression = existing;
|
||||
}
|
||||
else {
|
||||
expression = creator.apply( this );
|
||||
expressionMap.put( key, expression );
|
||||
}
|
||||
|
||||
final Expression result = normalize( expression );
|
||||
|
||||
|
|
|
@ -57,7 +57,13 @@ import org.hibernate.sql.results.internal.EmptySqlSelection;
|
|||
import org.hibernate.type.descriptor.sql.SqlTypeDescriptorIndicators;
|
||||
import org.hibernate.type.spi.TypeConfiguration;
|
||||
|
||||
import static org.hibernate.sql.ast.spi.SqlAppender.*;
|
||||
import static org.hibernate.sql.ast.spi.SqlAppender.CLOSE_PARENTHESIS;
|
||||
import static org.hibernate.sql.ast.spi.SqlAppender.COMA_SEPARATOR;
|
||||
import static org.hibernate.sql.ast.spi.SqlAppender.EMPTY_STRING;
|
||||
import static org.hibernate.sql.ast.spi.SqlAppender.NO_SEPARATOR;
|
||||
import static org.hibernate.sql.ast.spi.SqlAppender.NULL_KEYWORD;
|
||||
import static org.hibernate.sql.ast.spi.SqlAppender.OPEN_PARENTHESIS;
|
||||
import static org.hibernate.sql.ast.spi.SqlAppender.PARAM_MARKER;
|
||||
|
||||
/**
|
||||
* @author Steve Ebersole
|
||||
|
@ -131,14 +137,14 @@ public abstract class AbstractSqlAstWalker
|
|||
@Override
|
||||
public void visitQuerySpec(QuerySpec querySpec) {
|
||||
if ( !querySpec.isRoot() ) {
|
||||
appendSql( EMPTY_STRING + OPEN_PARENTHESIS );
|
||||
appendSql( " (" );
|
||||
}
|
||||
|
||||
visitSelectClause( querySpec.getSelectClause() );
|
||||
visitFromClause( querySpec.getFromClause() );
|
||||
|
||||
if ( querySpec.getWhereClauseRestrictions() != null && !querySpec.getWhereClauseRestrictions().isEmpty() ) {
|
||||
appendSql( EMPTY_STRING + WHERE_KEYWORD + EMPTY_STRING );
|
||||
appendSql( " where " );
|
||||
|
||||
clauseStack.push( Clause.WHERE );
|
||||
try {
|
||||
|
@ -151,7 +157,7 @@ public abstract class AbstractSqlAstWalker
|
|||
|
||||
final List<SortSpecification> sortSpecifications = querySpec.getSortSpecifications();
|
||||
if ( sortSpecifications != null && !sortSpecifications.isEmpty() ) {
|
||||
appendSql( EMPTY_STRING + ORDER_BY_KEYWORD + EMPTY_STRING );
|
||||
appendSql( " order by " );
|
||||
|
||||
String separator = NO_SEPARATOR;
|
||||
for (SortSpecification sortSpecification : sortSpecifications ) {
|
||||
|
@ -164,7 +170,7 @@ public abstract class AbstractSqlAstWalker
|
|||
visitLimitOffsetClause( querySpec );
|
||||
|
||||
if ( !querySpec.isRoot() ) {
|
||||
appendSql( COLLATE_KEYWORD + EMPTY_STRING );
|
||||
appendSql( ")" );
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -178,16 +184,16 @@ public abstract class AbstractSqlAstWalker
|
|||
|
||||
final String collation = sortSpecification.getCollation();
|
||||
if ( collation != null ) {
|
||||
appendSql( EMPTY_STRING + COLLATE_KEYWORD + EMPTY_STRING );
|
||||
appendSql( " collate " );
|
||||
appendSql( collation );
|
||||
}
|
||||
|
||||
final SortOrder sortOrder = sortSpecification.getSortOrder();
|
||||
if ( sortOrder == SortOrder.ASCENDING ) {
|
||||
appendSql( EMPTY_STRING + ASC_KEYWORD );
|
||||
appendSql( " asc" );
|
||||
}
|
||||
else if ( sortOrder == SortOrder.DESCENDING ) {
|
||||
appendSql( EMPTY_STRING + DESC_KEYWORD );
|
||||
appendSql( " desc" );
|
||||
}
|
||||
|
||||
// TODO: null precedence handling
|
||||
|
@ -230,9 +236,9 @@ public abstract class AbstractSqlAstWalker
|
|||
clauseStack.push( Clause.SELECT );
|
||||
|
||||
try {
|
||||
appendSql( SELECT_KEYWORD + EMPTY_STRING );
|
||||
appendSql( "select " );
|
||||
if ( selectClause.isDistinct() ) {
|
||||
appendSql( DISTINCT_KEYWORD + EMPTY_STRING );
|
||||
appendSql( "distinct " );
|
||||
}
|
||||
|
||||
String separator = NO_SEPARATOR;
|
||||
|
@ -261,7 +267,7 @@ public abstract class AbstractSqlAstWalker
|
|||
|
||||
@Override
|
||||
public void visitFromClause(FromClause fromClause) {
|
||||
appendSql( EMPTY_STRING + FROM_KEYWORD + EMPTY_STRING );
|
||||
appendSql( " from " );
|
||||
|
||||
String separator = NO_SEPARATOR;
|
||||
for ( TableGroup root : fromClause.getRoots() ) {
|
||||
|
@ -297,7 +303,8 @@ public abstract class AbstractSqlAstWalker
|
|||
|
||||
final String identificationVariable = tableReference.getIdentificationVariable();
|
||||
if ( identificationVariable != null ) {
|
||||
sqlAppender.appendSql( EMPTY_STRING + AS_KEYWORD + EMPTY_STRING + identificationVariable );
|
||||
sqlAppender.appendSql( " as " );
|
||||
sqlAppender.appendSql( identificationVariable );
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -311,12 +318,12 @@ public abstract class AbstractSqlAstWalker
|
|||
for ( TableReferenceJoin tableJoin : joins ) {
|
||||
sqlAppender.appendSql( EMPTY_STRING );
|
||||
sqlAppender.appendSql( tableJoin.getJoinType().getText() );
|
||||
sqlAppender.appendSql( EMPTY_STRING + JOIN_KEYWORD + EMPTY_STRING);
|
||||
sqlAppender.appendSql( " join " );
|
||||
|
||||
renderTableReference( tableJoin.getJoinedTableReference() );
|
||||
|
||||
if ( tableJoin.getJoinPredicate() != null && !tableJoin.getJoinPredicate().isEmpty() ) {
|
||||
sqlAppender.appendSql( EMPTY_STRING + ON_KEYWORD + EMPTY_STRING );
|
||||
sqlAppender.appendSql( " on " );
|
||||
tableJoin.getJoinPredicate().accept( this );
|
||||
}
|
||||
}
|
||||
|
@ -339,14 +346,14 @@ public abstract class AbstractSqlAstWalker
|
|||
else {
|
||||
appendSql( EMPTY_STRING );
|
||||
appendSql( tableGroupJoin.getJoinType().getText() );
|
||||
appendSql( EMPTY_STRING + JOIN_KEYWORD + EMPTY_STRING);
|
||||
appendSql( " join " );
|
||||
|
||||
renderTableGroup( joinedGroup );
|
||||
|
||||
clauseStack.push( Clause.WHERE );
|
||||
try {
|
||||
if ( tableGroupJoin.getPredicate() != null && !tableGroupJoin.getPredicate().isEmpty() ) {
|
||||
appendSql( EMPTY_STRING + ON_KEYWORD + EMPTY_STRING );
|
||||
appendSql( " on " );
|
||||
tableGroupJoin.getPredicate().accept( this );
|
||||
}
|
||||
}
|
||||
|
@ -733,33 +740,34 @@ public abstract class AbstractSqlAstWalker
|
|||
|
||||
@Override
|
||||
public void visitCaseSearchedExpression(CaseSearchedExpression caseSearchedExpression) {
|
||||
appendSql( CASE_KEYWORD + EMPTY_STRING );
|
||||
appendSql( "case " );
|
||||
|
||||
for ( CaseSearchedExpression.WhenFragment whenFragment : caseSearchedExpression.getWhenFragments() ) {
|
||||
appendSql( EMPTY_STRING + WHEN_KEYWORD + EMPTY_STRING );
|
||||
appendSql( " when " );
|
||||
whenFragment.getPredicate().accept( this );
|
||||
appendSql( EMPTY_STRING + THEN_KEYWORD + EMPTY_STRING );
|
||||
appendSql( " then " );
|
||||
whenFragment.getResult().accept( this );
|
||||
}
|
||||
appendSql( ELSE_KEYWORD );
|
||||
|
||||
appendSql( " else " );
|
||||
caseSearchedExpression.getOtherwise().accept( this );
|
||||
appendSql( EMPTY_STRING + END_KEYWORD );
|
||||
|
||||
appendSql( " end" );
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visitCaseSimpleExpression(CaseSimpleExpression caseSimpleExpression) {
|
||||
appendSql( CASE_KEYWORD + EMPTY_STRING );
|
||||
appendSql( " case" );
|
||||
caseSimpleExpression.getFixture().accept( this );
|
||||
for ( CaseSimpleExpression.WhenFragment whenFragment : caseSimpleExpression.getWhenFragments() ) {
|
||||
appendSql( EMPTY_STRING + WHEN_KEYWORD + EMPTY_STRING );
|
||||
appendSql( " when " );
|
||||
whenFragment.getCheckValue().accept( this );
|
||||
appendSql( EMPTY_STRING + THEN_KEYWORD + EMPTY_STRING );
|
||||
appendSql( " then " );
|
||||
whenFragment.getResult().accept( this );
|
||||
}
|
||||
appendSql( EMPTY_STRING + ELSE_KEYWORD + EMPTY_STRING );
|
||||
|
||||
appendSql( " else " );
|
||||
caseSimpleExpression.getOtherwise().accept( this );
|
||||
appendSql( EMPTY_STRING + END_KEYWORD );
|
||||
appendSql( " end" );
|
||||
}
|
||||
|
||||
|
||||
|
@ -881,11 +889,11 @@ public abstract class AbstractSqlAstWalker
|
|||
public void visitBetweenPredicate(BetweenPredicate betweenPredicate) {
|
||||
betweenPredicate.getExpression().accept( this );
|
||||
if ( betweenPredicate.isNegated() ) {
|
||||
appendSql( EMPTY_STRING + NOT_KEYWORD );
|
||||
appendSql( " not" );
|
||||
}
|
||||
appendSql( EMPTY_STRING + BETWEEN_KEYWORD + EMPTY_STRING );
|
||||
appendSql( " between " );
|
||||
betweenPredicate.getLowerBound().accept( this );
|
||||
appendSql( EMPTY_STRING + AND_KEYWORD + EMPTY_STRING );
|
||||
appendSql( " and " );
|
||||
betweenPredicate.getUpperBound().accept( this );
|
||||
}
|
||||
|
||||
|
@ -909,9 +917,9 @@ public abstract class AbstractSqlAstWalker
|
|||
public void visitInListPredicate(InListPredicate inListPredicate) {
|
||||
inListPredicate.getTestExpression().accept( this );
|
||||
if ( inListPredicate.isNegated() ) {
|
||||
appendSql( NOT_KEYWORD );
|
||||
appendSql( " not" );
|
||||
}
|
||||
appendSql( IN_KEYWORD + ' ' + OPEN_PARENTHESIS );
|
||||
appendSql( " in (" );
|
||||
if ( inListPredicate.getListExpressions().isEmpty() ) {
|
||||
appendSql( NULL_KEYWORD );
|
||||
}
|
||||
|
@ -930,9 +938,9 @@ public abstract class AbstractSqlAstWalker
|
|||
public void visitInSubQueryPredicate(InSubQueryPredicate inSubQueryPredicate) {
|
||||
inSubQueryPredicate.getTestExpression().accept( this );
|
||||
if ( inSubQueryPredicate.isNegated() ) {
|
||||
appendSql( ' ' + NOT_KEYWORD );
|
||||
appendSql( " not" );
|
||||
}
|
||||
appendSql( ' ' + IN_KEYWORD + ' ' );
|
||||
appendSql( " in " );
|
||||
visitQuerySpec( inSubQueryPredicate.getSubQuery() );
|
||||
}
|
||||
|
||||
|
@ -946,9 +954,11 @@ public abstract class AbstractSqlAstWalker
|
|||
for ( Predicate predicate : junction.getPredicates() ) {
|
||||
appendSql( separator );
|
||||
predicate.accept( this );
|
||||
separator = junction.getNature() == Junction.Nature.CONJUNCTION
|
||||
? EMPTY_STRING + AND_KEYWORD + EMPTY_STRING
|
||||
: EMPTY_STRING + OR_KEYWORD + EMPTY_STRING;
|
||||
if ( separator == NO_SEPARATOR ) {
|
||||
separator = junction.getNature() == Junction.Nature.CONJUNCTION
|
||||
? " and "
|
||||
: " or ";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -956,12 +966,12 @@ public abstract class AbstractSqlAstWalker
|
|||
public void visitLikePredicate(LikePredicate likePredicate) {
|
||||
likePredicate.getMatchExpression().accept( this );
|
||||
if ( likePredicate.isNegated() ) {
|
||||
appendSql( EMPTY_STRING + NOT_KEYWORD );
|
||||
appendSql( " not" );
|
||||
}
|
||||
appendSql( EMPTY_STRING + LIKE_KEYWORD + EMPTY_STRING );
|
||||
appendSql( " like " );
|
||||
likePredicate.getPattern().accept( this );
|
||||
if ( likePredicate.getEscapeCharacter() != null ) {
|
||||
appendSql( EMPTY_STRING + ESCAPE_KEYWORD + EMPTY_STRING );
|
||||
appendSql( " escape " );
|
||||
likePredicate.getEscapeCharacter().accept( this );
|
||||
}
|
||||
}
|
||||
|
@ -972,19 +982,19 @@ public abstract class AbstractSqlAstWalker
|
|||
return;
|
||||
}
|
||||
|
||||
appendSql( NOT_KEYWORD + EMPTY_STRING + OPEN_PARENTHESIS );
|
||||
appendSql( "not (" );
|
||||
negatedPredicate.getPredicate().accept( this );
|
||||
appendSql( CLOSE_PARENTHESIS );
|
||||
appendSql( ")" );
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visitNullnessPredicate(NullnessPredicate nullnessPredicate) {
|
||||
nullnessPredicate.getExpression().accept( this );
|
||||
if ( nullnessPredicate.isNegated() ) {
|
||||
appendSql( IS_NOT_NULL_FRAGMENT );
|
||||
appendSql( " is not null" );
|
||||
}
|
||||
else {
|
||||
appendSql( IS_NULL_FRAGMENT );
|
||||
appendSql( " is null" );
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -21,6 +21,9 @@ import org.hibernate.sql.ast.tree.from.TableGroup;
|
|||
public class SimpleFromClauseAccessImpl implements FromClauseAccess {
|
||||
protected final Map<NavigablePath, TableGroup> tableGroupMap = new HashMap<>();
|
||||
|
||||
public SimpleFromClauseAccessImpl() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public TableGroup findTableGroup(NavigablePath navigablePath) {
|
||||
return tableGroupMap.get( navigablePath );
|
||||
|
|
|
@ -12,54 +12,16 @@ package org.hibernate.sql.ast.spi;
|
|||
* @author Steve Ebersole
|
||||
*/
|
||||
public interface SqlAppender {
|
||||
// todo (6.0) : add all the others sql keywords
|
||||
|
||||
String COMA_SEPARATOR = ", ";
|
||||
String NO_SEPARATOR = "";
|
||||
String COMA_SEPARATOR = ", ";
|
||||
String EMPTY_STRING = " ";
|
||||
|
||||
String OPEN_PARENTHESIS = "(";
|
||||
String CLOSE_PARENTHESIS = ")";
|
||||
|
||||
String SELECT_KEYWORD = "select";
|
||||
String DISTINCT_KEYWORD = "distinct";
|
||||
String ORDER_BY_KEYWORD = "order by";
|
||||
String COLLATE_KEYWORD = "collate";
|
||||
|
||||
String FROM_KEYWORD = "from";
|
||||
String JOIN_KEYWORD = "join";
|
||||
String AS_KEYWORD = "as";
|
||||
String ON_KEYWORD = "on";
|
||||
|
||||
String WHERE_KEYWORD = "where";
|
||||
|
||||
String PARAM_MARKER = "?";
|
||||
|
||||
String NOT_KEYWORD = "not";
|
||||
String IS_KEYWORD = "is";
|
||||
|
||||
String NULL_KEYWORD = "null";
|
||||
String IS_NULL_FRAGMENT = IS_KEYWORD + EMPTY_STRING + NULL_KEYWORD;
|
||||
String IS_NOT_NULL_FRAGMENT = IS_KEYWORD + EMPTY_STRING + NOT_KEYWORD + EMPTY_STRING + NULL_KEYWORD;
|
||||
|
||||
String AND_KEYWORD = "and";
|
||||
String OR_KEYWORD = "or";
|
||||
|
||||
String LIKE_KEYWORD = "like";
|
||||
String ESCAPE_KEYWORD = "escape";
|
||||
|
||||
String BETWEEN_KEYWORD = "between";
|
||||
|
||||
String IN_KEYWORD = "in";
|
||||
|
||||
String CASE_KEYWORD = "case";
|
||||
String WHEN_KEYWORD = "when";
|
||||
String THEN_KEYWORD = "then";
|
||||
String ELSE_KEYWORD = "else";
|
||||
String END_KEYWORD = "end";
|
||||
|
||||
String ASC_KEYWORD = "asc";
|
||||
String DESC_KEYWORD = "desc";
|
||||
|
||||
/**
|
||||
* Add the passed fragment into the in-flight buffer
|
||||
|
|
|
@ -16,7 +16,6 @@ import org.hibernate.metamodel.mapping.MappingModelExpressable;
|
|||
import org.hibernate.sql.ast.spi.SqlAstWalker;
|
||||
import org.hibernate.sql.ast.spi.SqlSelection;
|
||||
import org.hibernate.sql.results.internal.SqlSelectionImpl;
|
||||
import org.hibernate.type.descriptor.ValueExtractor;
|
||||
import org.hibernate.type.descriptor.java.JavaTypeDescriptor;
|
||||
import org.hibernate.type.spi.TypeConfiguration;
|
||||
|
||||
|
@ -26,8 +25,7 @@ import org.hibernate.type.spi.TypeConfiguration;
|
|||
* @author Steve Ebersole
|
||||
*/
|
||||
public class ColumnReference implements Expression {
|
||||
private final String columnExpression;
|
||||
private final String qualifier;
|
||||
private final String referenceExpression;
|
||||
private final JdbcMapping jdbcMapping;
|
||||
|
||||
public ColumnReference(
|
||||
|
@ -35,17 +33,29 @@ public class ColumnReference implements Expression {
|
|||
String qualifier,
|
||||
JdbcMapping jdbcMapping,
|
||||
SessionFactoryImplementor sessionFactory) {
|
||||
this.columnExpression = columnExpression;
|
||||
this.qualifier = qualifier;
|
||||
this(
|
||||
qualifier == null
|
||||
? columnExpression
|
||||
: qualifier + "." + columnExpression,
|
||||
jdbcMapping,
|
||||
sessionFactory
|
||||
);
|
||||
}
|
||||
|
||||
public ColumnReference(
|
||||
String referenceExpression,
|
||||
JdbcMapping jdbcMapping,
|
||||
SessionFactoryImplementor sessionFactory) {
|
||||
this.referenceExpression = referenceExpression;
|
||||
this.jdbcMapping = jdbcMapping;
|
||||
}
|
||||
|
||||
public String getReferencedColumnExpression() {
|
||||
return columnExpression;
|
||||
public String getExpressionText() {
|
||||
return referenceExpression;
|
||||
}
|
||||
|
||||
public String getQualifier() {
|
||||
return qualifier;
|
||||
public String renderSqlFragment(SessionFactoryImplementor sessionFactory) {
|
||||
return getExpressionText();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -77,7 +87,7 @@ public class ColumnReference implements Expression {
|
|||
Locale.ROOT,
|
||||
"%s(%s)",
|
||||
getClass().getSimpleName(),
|
||||
columnExpression
|
||||
referenceExpression
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -91,21 +101,11 @@ public class ColumnReference implements Expression {
|
|||
}
|
||||
|
||||
final ColumnReference that = (ColumnReference) o;
|
||||
return Objects.equals( qualifier, that.qualifier )
|
||||
&& Objects.equals( columnExpression, that.columnExpression );
|
||||
return Objects.equals( referenceExpression, that.referenceExpression );
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int hash = Objects.hash( columnExpression );
|
||||
return qualifier == null ? hash : hash + Objects.hash( qualifier );
|
||||
}
|
||||
|
||||
public String renderSqlFragment(SessionFactoryImplementor sessionFactory) {
|
||||
if ( getQualifier() != null ) {
|
||||
return getQualifier() + '.' + getReferencedColumnExpression();
|
||||
}
|
||||
|
||||
return getReferencedColumnExpression();
|
||||
return referenceExpression.hashCode();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,6 +12,8 @@ import java.util.HashMap;
|
|||
import java.util.Map;
|
||||
import java.util.function.BiConsumer;
|
||||
|
||||
import org.hibernate.internal.util.collections.CollectionHelper;
|
||||
import org.hibernate.query.sqm.internal.DomainParameterXref;
|
||||
import org.hibernate.sql.exec.spi.JdbcParameter;
|
||||
import org.hibernate.sql.exec.spi.JdbcParameterBinding;
|
||||
import org.hibernate.sql.exec.spi.JdbcParameterBindings;
|
||||
|
@ -24,6 +26,12 @@ import org.hibernate.sql.exec.spi.JdbcParameterBindings;
|
|||
public class JdbcParameterBindingsImpl implements JdbcParameterBindings {
|
||||
private Map<JdbcParameter, JdbcParameterBinding> bindingMap;
|
||||
|
||||
public JdbcParameterBindingsImpl(DomainParameterXref domainParameterXref) {
|
||||
if ( domainParameterXref.getSqmParameterCount() > 0 ) {
|
||||
bindingMap = CollectionHelper.mapOfSize( domainParameterXref.getSqmParameterCount() );
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addBinding(JdbcParameter parameter, JdbcParameterBinding binding) {
|
||||
if ( bindingMap == null ) {
|
||||
|
|
|
@ -34,6 +34,7 @@ public class JdbcValuesResultSetImpl extends AbstractJdbcValues {
|
|||
private final ExecutionContext executionContext;
|
||||
|
||||
private final SqlSelection[] sqlSelections;
|
||||
private final Object[] currentRowJdbcValues;
|
||||
|
||||
// todo (6.0) - manage limit-based skips
|
||||
|
||||
|
@ -43,7 +44,6 @@ public class JdbcValuesResultSetImpl extends AbstractJdbcValues {
|
|||
// increments position to 0, which is the first row
|
||||
private int position = -1;
|
||||
|
||||
private Object[] currentRowJdbcValues;
|
||||
|
||||
public JdbcValuesResultSetImpl(
|
||||
ResultSetAccess resultSetAccess,
|
||||
|
@ -60,6 +60,7 @@ public class JdbcValuesResultSetImpl extends AbstractJdbcValues {
|
|||
this.numberOfRowsToProcess = interpretNumberOfRowsToProcess( queryOptions );
|
||||
|
||||
this.sqlSelections = valuesMapping.getSqlSelections().toArray( new SqlSelection[0] );
|
||||
this.currentRowJdbcValues = new Object[ sqlSelections.length ];
|
||||
}
|
||||
|
||||
private static int interpretNumberOfRowsToProcess(QueryOptions queryOptions) {
|
||||
|
@ -98,8 +99,6 @@ public class JdbcValuesResultSetImpl extends AbstractJdbcValues {
|
|||
|
||||
@Override
|
||||
protected final boolean processNext(RowProcessingState rowProcessingState) {
|
||||
currentRowJdbcValues = null;
|
||||
|
||||
if ( numberOfRowsToProcess != -1 && position > numberOfRowsToProcess ) {
|
||||
// numberOfRowsToProcess != -1 means we had some limit, and
|
||||
// position > numberOfRowsToProcess means we have exceeded the
|
||||
|
@ -119,7 +118,7 @@ public class JdbcValuesResultSetImpl extends AbstractJdbcValues {
|
|||
}
|
||||
|
||||
try {
|
||||
currentRowJdbcValues = readCurrentRowValues( rowProcessingState );
|
||||
readCurrentRowValues( rowProcessingState );
|
||||
return true;
|
||||
}
|
||||
catch (SQLException e) {
|
||||
|
@ -137,17 +136,14 @@ public class JdbcValuesResultSetImpl extends AbstractJdbcValues {
|
|||
);
|
||||
}
|
||||
|
||||
private Object[] readCurrentRowValues(RowProcessingState rowProcessingState) throws SQLException {
|
||||
final int numberOfSqlSelections = sqlSelections.length;
|
||||
final Object[] row = new Object[numberOfSqlSelections];
|
||||
for ( SqlSelection sqlSelection : sqlSelections ) {
|
||||
row[ sqlSelection.getValuesArrayPosition() ] = sqlSelection.getJdbcValueExtractor().extract(
|
||||
private void readCurrentRowValues(RowProcessingState rowProcessingState) throws SQLException {
|
||||
for ( final SqlSelection sqlSelection : sqlSelections ) {
|
||||
currentRowJdbcValues[ sqlSelection.getValuesArrayPosition() ] = sqlSelection.getJdbcValueExtractor().extract(
|
||||
resultSetAccess.getResultSet(),
|
||||
sqlSelection.getJdbcResultSetIndex(),
|
||||
executionContext.getSession()
|
||||
);
|
||||
}
|
||||
return row;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -12,7 +12,6 @@ import org.hibernate.engine.spi.SessionFactoryImplementor;
|
|||
import org.hibernate.query.named.RowReaderMemento;
|
||||
import org.hibernate.sql.exec.spi.Callback;
|
||||
import org.hibernate.sql.results.spi.DomainResultAssembler;
|
||||
import org.hibernate.sql.results.spi.EntityInitializer;
|
||||
import org.hibernate.sql.results.spi.Initializer;
|
||||
import org.hibernate.sql.results.spi.JdbcValuesSourceProcessingOptions;
|
||||
import org.hibernate.sql.results.spi.JdbcValuesSourceProcessingState;
|
||||
|
@ -35,6 +34,8 @@ public class StandardRowReader<T> implements RowReader<T> {
|
|||
private final int assemblerCount;
|
||||
private final Callback callback;
|
||||
|
||||
private final Object[] resultRow;
|
||||
|
||||
public StandardRowReader(
|
||||
List<DomainResultAssembler> resultAssemblers,
|
||||
List<Initializer> initializers,
|
||||
|
@ -46,6 +47,8 @@ public class StandardRowReader<T> implements RowReader<T> {
|
|||
|
||||
this.assemblerCount = resultAssemblers.size();
|
||||
this.callback = callback;
|
||||
|
||||
this.resultRow = new Object[assemblerCount];
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -68,6 +71,7 @@ public class StandardRowReader<T> implements RowReader<T> {
|
|||
return rowTransformer.determineNumberOfResultElements( assemblerCount );
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public T readRow(RowProcessingState rowProcessingState, JdbcValuesSourceProcessingOptions options) {
|
||||
LOG.info( "---Processing Row---" );
|
||||
|
@ -75,14 +79,13 @@ public class StandardRowReader<T> implements RowReader<T> {
|
|||
|
||||
// finally assemble the results
|
||||
|
||||
final Object[] result = new Object[assemblerCount];
|
||||
for ( int i = 0; i < assemblerCount; i++ ) {
|
||||
result[i] = resultAssemblers.get( i ).assemble( rowProcessingState, options );
|
||||
resultRow[i] = resultAssemblers.get( i ).assemble( rowProcessingState, options );
|
||||
}
|
||||
|
||||
afterRow( rowProcessingState, options );
|
||||
|
||||
return rowTransformer.transformRow( result );
|
||||
return rowTransformer.transformRow( resultRow );
|
||||
}
|
||||
|
||||
private void afterRow(RowProcessingState rowProcessingState, JdbcValuesSourceProcessingOptions options) {
|
||||
|
|
|
@ -10,6 +10,7 @@ import java.util.HashMap;
|
|||
import java.util.Map;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import org.hibernate.internal.util.collections.CollectionHelper;
|
||||
import org.hibernate.metamodel.mapping.EmbeddableValuedModelPart;
|
||||
import org.hibernate.metamodel.mapping.SingularAttributeMapping;
|
||||
import org.hibernate.metamodel.mapping.StateArrayContributorMapping;
|
||||
|
@ -33,11 +34,11 @@ public abstract class AbstractCompositeInitializer extends AbstractFetchParentAc
|
|||
private final EmbeddableValuedModelPart embeddedModelPartDescriptor;
|
||||
private final FetchParentAccess fetchParentAccess;
|
||||
|
||||
private final Map<StateArrayContributorMapping, DomainResultAssembler> assemblerMap = new HashMap<>();
|
||||
private final Map<StateArrayContributorMapping, DomainResultAssembler> assemblerMap;
|
||||
|
||||
// per-row state
|
||||
private final Object[] resolvedValues;
|
||||
private Object compositeInstance;
|
||||
private Object[] resolvedValues;
|
||||
|
||||
|
||||
public AbstractCompositeInitializer(
|
||||
|
@ -49,7 +50,11 @@ public abstract class AbstractCompositeInitializer extends AbstractFetchParentAc
|
|||
this.embeddedModelPartDescriptor = resultDescriptor.getReferencedMappingContainer();
|
||||
this.fetchParentAccess = fetchParentAccess;
|
||||
|
||||
embeddedModelPartDescriptor.getEmbeddableTypeDescriptor().visitStateArrayContributors(
|
||||
final int numOfAttrs = embeddedModelPartDescriptor.getEmbeddableTypeDescriptor().getNumberOfAttributeMappings();
|
||||
this.resolvedValues = new Object[ numOfAttrs ];
|
||||
this.assemblerMap = CollectionHelper.mapOfSize( numOfAttrs );
|
||||
|
||||
this.embeddedModelPartDescriptor.getEmbeddableTypeDescriptor().visitStateArrayContributors(
|
||||
stateArrayContributor -> {
|
||||
final Fetch fetch = resultDescriptor.findFetch( stateArrayContributor.getFetchableName() );
|
||||
|
||||
|
@ -60,6 +65,7 @@ public abstract class AbstractCompositeInitializer extends AbstractFetchParentAc
|
|||
assemblerMap.put( stateArrayContributor, stateAssembler );
|
||||
}
|
||||
);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -129,8 +135,6 @@ public abstract class AbstractCompositeInitializer extends AbstractFetchParentAc
|
|||
compositeInstance
|
||||
);
|
||||
|
||||
resolvedValues = new Object[ assemblerMap.size() ];
|
||||
|
||||
for ( Map.Entry<StateArrayContributorMapping, DomainResultAssembler> entry : assemblerMap.entrySet() ) {
|
||||
final Object contributorValue = entry.getValue().assemble(
|
||||
rowProcessingState,
|
||||
|
@ -150,7 +154,6 @@ public abstract class AbstractCompositeInitializer extends AbstractFetchParentAc
|
|||
@Override
|
||||
public void finishUpRow(RowProcessingState rowProcessingState) {
|
||||
compositeInstance = null;
|
||||
resolvedValues = null;
|
||||
|
||||
clearParentResolutionListeners();
|
||||
}
|
||||
|
|
|
@ -44,6 +44,7 @@ import org.hibernate.testing.orm.junit.SessionFactory;
|
|||
import org.hibernate.testing.orm.junit.SessionFactoryScope;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.endsWith;
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
import static org.hamcrest.CoreMatchers.instanceOf;
|
||||
import static org.hamcrest.CoreMatchers.is;
|
||||
|
@ -170,7 +171,6 @@ public class SmokeTests {
|
|||
final Expression selectedExpression = ( (SqlSelectionImpl) sqlSelection ).getWrappedSqlExpression();
|
||||
assertThat( selectedExpression, instanceOf( ColumnReference.class ) );
|
||||
final ColumnReference columnReference = (ColumnReference) selectedExpression;
|
||||
assertThat( columnReference.getReferencedColumnExpression(), is( "gender" ) );
|
||||
assertThat( columnReference.renderSqlFragment( scope.getSessionFactory() ), is( "s1_0.gender" ) );
|
||||
|
||||
final MappingModelExpressable selectedExpressable = selectedExpression.getExpressionType();
|
||||
|
|
|
@ -19,6 +19,7 @@ log4j.rootLogger=info, stdout
|
|||
|
||||
log4j.logger.org.hibernate.orm.graph=debug
|
||||
log4j.logger.org.hibernate.orm.query.sqm=debug
|
||||
log4j.logger.org.hibernate.orm.query.hql=debug
|
||||
|
||||
|
||||
log4j.logger.org.hibernate.tool.hbm2ddl=trace
|
||||
|
|
Loading…
Reference in New Issue