* Handle quoted identifiers in HQL and the Ordering parser

* Switch from the "expression" to "expressionOrPredicate" rule in the HQL grammar where it makes sense as required by some HQL tests
* Cleanup parser rule ordering to allow more keywords in the identifier rule
* Implement literal support for Ordering parser
* Add special AvgFunction as needed by H2, HSQL, DB2, SQL Server and Sybase that casts arguments to double if necessary
* Fix wrong deduplication of order by fragments in case a plural attribute is fetched multiple times
* Implement support for de-referencing any-valued mappings in HQL
* Avoid unnecessary entity subtypes in polymorphic splitted queries if a base type also matches the requested type
* Implement pagination support for polymorphic splitted queries
* Cleanup path part resolving by removing lots of duplicate code
* Aligh HQL parsing expectations to the expected behavior of 5.x
* Add method to `JavaType` that allows determining if a type is can be widened to another which is used for arithmetic type resolving
* Implement validations for fetch owner checking
* Fix issues with the id table creation due to lacking column lengths in the column DDL type
* Fix issues and add some optimizations related to multi-table delete handling
* Add the notion of a special "implicit" alias to avoid generating a unique alias for unaliased or implicit HQL joins
* Properly implement multiple bag fetch validation
* Make sure filter predicates are applied for all plural attribute joins
* Fix some issues with undecidable parameter type inference
* Fix some issues with negated SQM predicates not being converted to the proper SQL AST predicates
* Fix issues with qualifying DML target referencing columns
* Fix `is null` semantics for tuples referring to embeddable types
* Capture necessary details from JdbcValuesMetadata in the cached data to avoid executing a query on cache hit when types should be inferred
* Get rid of special CollectionPropertyNames and writeup a migration guide section for the replacements
This commit is contained in:
Christian Beikov 2021-11-23 18:16:48 +01:00
parent 39dae088ec
commit cbcec73d4f
153 changed files with 3141 additions and 1572 deletions

View File

@ -270,6 +270,8 @@ exec sp_dboption $SYBASE_DB, 'full logging for alter table', true
go
sp_dboption $SYBASE_DB, \"select into\", true
go
sp_dboption tempdb, 'ddl in tran', true
go
EOSQL
/opt/sybase/OCS-16_0/bin/isql -Usa -P myPassword -S MYSYBASE -i ./init1.sql

View File

@ -870,7 +870,7 @@ public class FirebirdDialect extends Dialect {
return getVersion() < 210
? super.getFallbackSqmMutationStrategy( entityDescriptor, runtimeModelCreationContext )
: new GlobalTemporaryTableStrategy(
new IdTable( entityDescriptor, name -> "HT_" + name, this ),
new IdTable( entityDescriptor, name -> "HT_" + name, this, runtimeModelCreationContext ),
() -> new TempIdTableExporter( false, this::getTypeName ) {
@Override
protected String getCreateOptions() {

View File

@ -388,7 +388,7 @@ public class InformixDialect extends Dialect {
EntityMappingType rootEntityDescriptor,
RuntimeModelCreationContext runtimeModelCreationContext) {
return new LocalTemporaryTableStrategy(
new IdTable( rootEntityDescriptor, basename -> "HT_" + basename, this ),
new IdTable( rootEntityDescriptor, basename -> "HT_" + basename, this, runtimeModelCreationContext ),
() -> new TempIdTableExporter( true, this::getTypeName ) {
@Override
protected String getCreateCommand() {

View File

@ -413,7 +413,7 @@ public class IngresDialect extends Dialect {
EntityMappingType rootEntityDescriptor,
RuntimeModelCreationContext runtimeModelCreationContext) {
return new GlobalTemporaryTableStrategy(
new IdTable( rootEntityDescriptor, name -> "session." + name, this ),
new IdTable( rootEntityDescriptor, name -> "session." + name, this, runtimeModelCreationContext ),
() -> new TempIdTableExporter( false, this::getTypeName ) {
@Override
protected String getCreateOptions() {

View File

@ -295,7 +295,7 @@ public class TeradataDialect extends Dialect {
EntityMappingType rootEntityDescriptor,
RuntimeModelCreationContext runtimeModelCreationContext) {
return new GlobalTemporaryTableStrategy(
new IdTable( rootEntityDescriptor, basename -> "HT_" + basename, this ),
new IdTable( rootEntityDescriptor, basename -> "HT_" + basename, this, runtimeModelCreationContext ),
() -> new TempIdTableExporter( false, this::getTypeName ) {
@Override
public String getCreateOptions() {

View File

@ -314,8 +314,12 @@ public class TimesTenDialect extends Dialect {
EntityMappingType rootEntityDescriptor,
RuntimeModelCreationContext runtimeModelCreationContext) {
return new GlobalTemporaryTableStrategy(
new IdTable( rootEntityDescriptor,
name -> name.length() > 30 ? name.substring( 0, 30 ) : name, this ),
new IdTable(
rootEntityDescriptor,
name -> name.length() > 30 ? name.substring( 0, 30 ) : name,
this,
runtimeModelCreationContext
),
() -> new TempIdTableExporter( false, this::getTypeName ) {
@Override
protected String getCreateOptions() {

View File

@ -42,6 +42,9 @@ BIG_DECIMAL_SUFFIX : [bB] [dD];
fragment
BIG_INTEGER_SUFFIX : [bB] [iI];
// Although this is not 100% correct because this accepts leading zeros,
// we stick to this because temporal literals use this rule for simplicity.
// Since we don't support octal literals, this shouldn't really be a big issue
fragment
INTEGER_NUMBER
: DIGIT+
@ -73,8 +76,8 @@ fragment SINGLE_QUOTE : '\'';
fragment DOUBLE_QUOTE : '"';
STRING_LITERAL
: DOUBLE_QUOTE ( ~('"') | ESCAPE_SEQUENCE | DOUBLE_QUOTE DOUBLE_QUOTE )* DOUBLE_QUOTE
| SINGLE_QUOTE ( ~('\'') | ESCAPE_SEQUENCE | SINGLE_QUOTE SINGLE_QUOTE )* SINGLE_QUOTE
: DOUBLE_QUOTE ( ESCAPE_SEQUENCE | DOUBLE_QUOTE DOUBLE_QUOTE | ~('"') )* DOUBLE_QUOTE
| SINGLE_QUOTE ( ESCAPE_SEQUENCE | SINGLE_QUOTE SINGLE_QUOTE | ~('\'') )* SINGLE_QUOTE
;
fragment BACKSLASH : '\\';
@ -318,5 +321,5 @@ fragment
BACKTICK : '`';
QUOTED_IDENTIFIER
: BACKTICK ( ~([\\`]) | ESCAPE_SEQUENCE )* BACKTICK
: BACKTICK ( ESCAPE_SEQUENCE | '\\' BACKTICK | ~([`]) )* BACKTICK
;

View File

@ -52,7 +52,7 @@ setClause
;
assignment
: dotIdentifierSequence EQUAL expression
: dotIdentifierSequence EQUAL expressionOrPredicate
;
insertStatement
@ -68,7 +68,7 @@ valuesList
;
values
: LEFT_PAREN expression (COMMA expression)* RIGHT_PAREN
: LEFT_PAREN expressionOrPredicate (COMMA expressionOrPredicate)* RIGHT_PAREN
;
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@ -116,20 +116,14 @@ pathRoot
: entityName identificationVariableDef?
;
/**
* Specialized dotIdentifierSequence for cases where we expect an entity-name. We handle it specially
* for the sake of performance. Specifically we concatenate together the entity name as we walk the
* parse tree. Relying on the `EntiytNameContext#getText` or `DotIdentifierSequenceContext#getText`
* performs walk to determine the name.
*/
entityName
returns [String fullNameText]
: (i=identifier { $fullNameText = _localctx.i.getText(); }) (DOT c=identifier { $fullNameText += ("." + _localctx.c.getText() ); })*
: identifier (DOT identifier)*
;
identificationVariableDef
: (AS identifier)
| IDENTIFIER
| QUOTED_IDENTIFIER
;
crossJoin
@ -171,19 +165,14 @@ selectionList
;
selection
: selectExpression resultIdentifier?
: selectExpression identificationVariableDef?
;
selectExpression
: dynamicInstantiation
| jpaSelectObjectSyntax
| mapEntrySelection
| expression
;
resultIdentifier
: (AS identifier)
| IDENTIFIER
| expressionOrPredicate
;
@ -206,11 +195,11 @@ dynamicInstantiationArgs
;
dynamicInstantiationArg
: dynamicInstantiationArgExpression (AS? identifier)?
: dynamicInstantiationArgExpression identificationVariableDef?
;
dynamicInstantiationArgExpression
: expression
: expressionOrPredicate
| dynamicInstantiation
;
@ -428,10 +417,10 @@ comparisonOperator
;
inList
: (ELEMENTS|INDICES) LEFT_PAREN dotIdentifierSequence RIGHT_PAREN # PersistentCollectionReferenceInList
| LEFT_PAREN expression (COMMA expression)* RIGHT_PAREN # ExplicitTupleInList
| LEFT_PAREN subQuery RIGHT_PAREN # SubQueryInList
| parameter # ParamInList
: (ELEMENTS|INDICES) LEFT_PAREN dotIdentifierSequence RIGHT_PAREN # PersistentCollectionReferenceInList
| LEFT_PAREN (expressionOrPredicate (COMMA expressionOrPredicate)*)? RIGHT_PAREN# ExplicitTupleInList
| LEFT_PAREN subQuery RIGHT_PAREN # SubQueryInList
| parameter # ParamInList
;
likeEscape
@ -444,16 +433,17 @@ likeEscape
expression
//highest to lowest precedence
: LEFT_PAREN expression RIGHT_PAREN # GroupedExpression
| LEFT_PAREN expression (COMMA expression)* RIGHT_PAREN # TupleExpression
| LEFT_PAREN subQuery RIGHT_PAREN # SubQueryExpression
| primaryExpression collationSpecification? # CollateExpression
| signOperator expression # UnaryExpression
| expression datetimeField # ToDurationExpression
| expression BY datetimeField # FromDurationExpression
| expression multiplicativeOperator expression # MultiplicationExpression
| expression additiveOperator expression # AdditionExpression
| expression DOUBLE_PIPE expression # ConcatenationExpression
: LEFT_PAREN expression RIGHT_PAREN # GroupedExpression
| LEFT_PAREN expressionOrPredicate (COMMA expressionOrPredicate)+ RIGHT_PAREN # TupleExpression
| LEFT_PAREN subQuery RIGHT_PAREN # SubQueryExpression
| primaryExpression collationSpecification? # CollateExpression
| signOperator numericLiteral # UnaryNumericLiteralExpression
| signOperator expression # UnaryExpression
| expression datetimeField # ToDurationExpression
| expression BY datetimeField # FromDurationExpression
| expression multiplicativeOperator expression # MultiplicationExpression
| expression additiveOperator expression # AdditionExpression
| expression DOUBLE_PIPE expression # ConcatenationExpression
;
primaryExpression
@ -464,8 +454,14 @@ primaryExpression
| entityIdReference # EntityIdExpression
| entityVersionReference # EntityVersionExpression
| entityNaturalIdReference # EntityNaturalIdExpression
| path # PathExpression
| syntacticDomainPath (pathContinuation)? # SyntacticPathExpression
| function # FunctionExpression
| generalPathFragment # GeneralPathExpression
;
expressionOrPredicate
: expression
| predicate
;
multiplicativeOperator
@ -506,15 +502,15 @@ caseList
;
simpleCaseList
: CASE expression (simpleCaseWhen)+ (caseOtherwise)? END
: CASE expressionOrPredicate (simpleCaseWhen)+ (caseOtherwise)? END
;
simpleCaseWhen
: WHEN expression THEN expression
: WHEN expression THEN expressionOrPredicate
;
caseOtherwise
: ELSE expression
: ELSE expressionOrPredicate
;
searchedCaseList
@ -522,24 +518,28 @@ searchedCaseList
;
searchedCaseWhen
: WHEN predicate THEN expression
: WHEN predicate THEN expressionOrPredicate
;
literal
: STRING_LITERAL
| INTEGER_LITERAL
| NULL
| TRUE
| FALSE
| numericLiteral
| binaryLiteral
| temporalLiteral
| generalizedLiteral
;
numericLiteral
: INTEGER_LITERAL
| LONG_LITERAL
| BIG_INTEGER_LITERAL
| FLOAT_LITERAL
| DOUBLE_LITERAL
| BIG_DECIMAL_LITERAL
| HEX_LITERAL
| NULL
| TRUE
| FALSE
| binaryLiteral
| temporalLiteral
| generalizedLiteral
;
binaryLiteral
@ -652,7 +652,7 @@ genericFunctionName
;
nonStandardFunctionArguments
: (DISTINCT | datetimeField COMMA)? expression (COMMA expression)*
: (DISTINCT | datetimeField COMMA)? expressionOrPredicate (COMMA expressionOrPredicate)*
;
jpaCollectionFunction
@ -897,11 +897,11 @@ positionFunctionStringArgument
;
cube
: CUBE LEFT_PAREN expression (COMMA expression)* RIGHT_PAREN
: CUBE LEFT_PAREN expressionOrPredicate (COMMA expressionOrPredicate)* RIGHT_PAREN
;
rollup
: ROLLUP LEFT_PAREN expression (COMMA expression)* RIGHT_PAREN
: ROLLUP LEFT_PAREN expressionOrPredicate (COMMA expressionOrPredicate)* RIGHT_PAREN
;
/**
@ -941,13 +941,15 @@ identifier
| COS
| COUNT
| CROSS
| CUBE
| CURRENT
| CURRENT_DATE
| CURRENT_INSTANT
| CURRENT_TIME
| CURRENT_TIMESTAMP
| DATE
| DAY
| DAY
| DATETIME
| DELETE
| DESC
| DISTINCT
@ -958,11 +960,13 @@ identifier
| ENTRY
| ESCAPE
| EVERY
| EXCEPT
| EXISTS
| EXP
| EXTRACT
| FETCH
| FILTER
| FIRST
| FLOOR
| FOR
| FORMAT
@ -971,6 +975,7 @@ identifier
| FUNCTION
| GREATEST
| GROUP
| HAVING
| HOUR
| ID
| IFNULL
@ -981,10 +986,12 @@ identifier
| INNER
| INSERT
| INSTANT
| INTERSECT
| INTO
| IS
| JOIN
| KEY
| LAST
| LEADING
| LEAST
| LEFT
@ -993,6 +1000,10 @@ identifier
| LIMIT
| LIST
| LN
| LOCAL
| LOCAL_DATE
| LOCAL_DATETIME
| LOCAL_TIME
| LOCATE
| LOWER
| MAP
@ -1000,7 +1011,6 @@ identifier
| MAXELEMENT
| MAXINDEX
| MEMBER
| MEMBER
| MICROSECOND
| MILLISECOND
| MIN
@ -1012,22 +1022,32 @@ identifier
| NANOSECOND
| NATURALID
| NEW
| NEXT
| NOT
| NULLIF
| NULLS
| OBJECT
| OF
| OFFSET
| OFFSET_DATETIME
| ON
| ONLY
| OR
| ORDER
| OUTER
| OVERLAY
| PAD
| PERCENT
| PLACING
| POSITION
| POWER
| QUARTER
| REPLACE
| RIGHT
| RIGHT
| ROLLUP
| ROUND
| ROW
| ROWS
| SECOND
| SELECT
| SET
@ -1041,6 +1061,7 @@ identifier
| SUM
| TAN
| THEN
| TIES
| TIME
| TIMESTAMP
| TIMEZONE_HOUR
@ -1049,6 +1070,7 @@ identifier
| TREAT
| TRIM
| TYPE
| UNION
| UPDATE
| UPPER
| VALUE
@ -1056,6 +1078,7 @@ identifier
| VERSION
| VERSIONED
| WEEK
| WHEN
| WHERE
| WITH
| YEAR) {

View File

@ -22,10 +22,13 @@ WS : ( ' ' | '\t' | '\f' | EOL ) -> skip;
fragment
EOL : [\r\n]+;
fragment
DIGIT : [0-9];
INTEGER_LITERAL : INTEGER_NUMBER ;
fragment
INTEGER_NUMBER : ('0' | '1'..'9' '0'..'9'*) ;
INTEGER_NUMBER : ('0' | '1'..'9' DIGIT*) ;
LONG_LITERAL : INTEGER_NUMBER ('l'|'L');
@ -34,7 +37,7 @@ BIG_INTEGER_LITERAL : INTEGER_NUMBER ('bi'|'BI') ;
HEX_LITERAL : '0' ('x'|'X') HEX_DIGIT+ ('l'|'L')? ;
fragment
HEX_DIGIT : ('0'..'9'|'a'..'f'|'A'..'F') ;
HEX_DIGIT : (DIGIT|'a'..'f'|'A'..'F') ;
OCTAL_LITERAL : '0' ('0'..'7')+ ('l'|'L')? ;
@ -42,10 +45,10 @@ FLOAT_LITERAL : FLOATING_POINT_NUMBER ('f'|'F')? ;
fragment
FLOATING_POINT_NUMBER
: ('0'..'9')+ '.' ('0'..'9')* EXPONENT?
| '.' ('0'..'9')+ EXPONENT?
| ('0'..'9')+ EXPONENT
| ('0'..'9')+
: DIGIT+ '.' DIGIT* EXPONENT?
| '.' DIGIT+ EXPONENT?
| DIGIT+ EXPONENT
| DIGIT+
;
DOUBLE_LITERAL : FLOATING_POINT_NUMBER ('d'|'D') ;
@ -53,15 +56,18 @@ DOUBLE_LITERAL : FLOATING_POINT_NUMBER ('d'|'D') ;
BIG_DECIMAL_LITERAL : FLOATING_POINT_NUMBER ('bd'|'BD') ;
fragment
EXPONENT : ('e'|'E') ('+'|'-')? ('0'..'9')+ ;
EXPONENT : ('e'|'E') ('+'|'-')? DIGIT+ ;
fragment SINGLE_QUOTE : '\'';
fragment DOUBLE_QUOTE : '"';
CHARACTER_LITERAL
: '\'' ( ESCAPE_SEQUENCE | ~('\''|'\\') ) '\'' {setText(getText().substring(1, getText().length()-1));}
: SINGLE_QUOTE ( ESCAPE_SEQUENCE | SINGLE_QUOTE SINGLE_QUOTE | ~('\'') ) SINGLE_QUOTE
;
STRING_LITERAL
: '"' ( ESCAPE_SEQUENCE | ~('\\'|'"') )* '"' {setText(getText().substring(1, getText().length()-1));}
| ('\'' ( ESCAPE_SEQUENCE | ~('\\'|'\'') )* '\'')+ {setText(getText().substring(1, getText().length()-1).replace("''", "'"));}
: DOUBLE_QUOTE ( ESCAPE_SEQUENCE | DOUBLE_QUOTE DOUBLE_QUOTE | ~('"') )* DOUBLE_QUOTE
| SINGLE_QUOTE ( ESCAPE_SEQUENCE | SINGLE_QUOTE SINGLE_QUOTE | ~('\'') )* SINGLE_QUOTE
;
fragment
@ -107,12 +113,19 @@ DESC : [dD] [eE] [sS] [cC] ( [eE] [nN] [dD] [iI] [nN] [gG] )?;
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Identifiers
fragment
LETTER : [a-zA-Z\u0080-\ufffe_$];
// Identifiers
IDENTIFIER
: ('a'..'z'|'A'..'Z'|'_'|'$'|'\u0080'..'\ufffe')('a'..'z'|'A'..'Z'|'_'|'$'|'0'..'9'|'\u0080'..'\ufffe')*
: LETTER (LETTER | DIGIT)*
;
fragment
BACKTICK : '`';
QUOTED_IDENTIFIER
: '`' ( ESCAPE_SEQUENCE | ~('\\'|'`') )* '`'
: BACKTICK ( ESCAPE_SEQUENCE | '\\' BACKTICK | ~([`]) )* BACKTICK
;

View File

@ -33,9 +33,9 @@ sortSpecification
;
expression
: function
| identifier
| dotIdentifier
: function # FunctionExpression
| identifier # IdentifierExpression
| dotIdentifier # DotIdentifierExpression
;
function
@ -52,7 +52,23 @@ packagedFunction
;
functionArguments
: OPEN_PAREN expression* CLOSE_PAREN
: OPEN_PAREN (functionArgument ( COMMA functionArgument )* )? CLOSE_PAREN
;
functionArgument
: expression
| literal
;
literal
: STRING_LITERAL
| INTEGER_LITERAL
| LONG_LITERAL
| BIG_INTEGER_LITERAL
| FLOAT_LITERAL
| DOUBLE_LITERAL
| BIG_DECIMAL_LITERAL
| HEX_LITERAL
;
collationSpecification
@ -69,6 +85,7 @@ nullsPrecedence
identifier
: IDENTIFIER
| QUOTED_IDENTIFIER
// keyword-as-identifier
| FIRST
| LAST
@ -78,5 +95,5 @@ identifier
;
dotIdentifier
: IDENTIFIER (DOT IDENTIFIER)+
: identifier (DOT identifier)+
;

View File

@ -26,25 +26,18 @@ public class Identifier implements Comparable<Identifier> {
* If passed text is {@code null}, {@code null} is returned.
* <p/>
* If passed text is surrounded in quote markers, the generated Identifier
* is considered quoted. Quote markers include back-ticks (`), and
* double-quotes (").
* is considered quoted. Quote markers include back-ticks (`),
* double-quotes (") and brackets ([ and ]).
*
* If the text, after trimming, contains a character that is not a valid identifier character,
* the identifier is treated as quoted.
*
* @param text The text form
*
* @return The identifier form, or {@code null} if text was {@code null}
*/
public static Identifier toIdentifier(String text) {
if ( StringHelper.isEmpty( text ) ) {
return null;
}
final String trimmedText = text.trim();
if ( isQuoted( trimmedText ) ) {
final String bareName = trimmedText.substring( 1, trimmedText.length() - 1 );
return new Identifier( bareName, true );
}
else {
return new Identifier( trimmedText, false );
}
return toIdentifier( text, false );
}
/**
@ -53,8 +46,11 @@ public class Identifier implements Comparable<Identifier> {
* If passed text is {@code null}, {@code null} is returned.
* <p/>
* If passed text is surrounded in quote markers, the generated Identifier
* is considered quoted. Quote markers include back-ticks (`), and
* double-quotes (").
* is considered quoted. Quote markers include back-ticks (`),
* double-quotes (") and brackets ([ and ]).
*
* If the text, after trimming, contains a character that is not a valid identifier character,
* the identifier is treated as quoted.
*
* @param text The text form
* @param quote Whether to quote unquoted text forms
@ -62,17 +58,65 @@ public class Identifier implements Comparable<Identifier> {
* @return The identifier form, or {@code null} if text was {@code null}
*/
public static Identifier toIdentifier(String text, boolean quote) {
return toIdentifier( text, quote, true );
}
/**
* Means to generate an {@link Identifier} instance from its simple text form.
* <p/>
* If passed text is {@code null}, {@code null} is returned.
* <p/>
* If passed text is surrounded in quote markers, the generated Identifier
* is considered quoted. Quote markers include back-ticks (`),
* double-quotes (") and brackets ([ and ]).
*
* @param text The text form
* @param quote Whether to quote unquoted text forms
* @param quoteOnNonIdentifierChar Controls whether to treat the result as quoted if text contains characters that are invalid for identifiers
*
* @return The identifier form, or {@code null} if text was {@code null}
*/
public static Identifier toIdentifier(String text, boolean quote, boolean quoteOnNonIdentifierChar) {
if ( StringHelper.isEmpty( text ) ) {
return null;
}
final String trimmedText = text.trim();
if ( isQuoted( trimmedText ) ) {
final String bareName = trimmedText.substring( 1, trimmedText.length() - 1 );
return new Identifier( bareName, true );
int start = 0;
int end = text.length();
while ( start < end ) {
if ( !Character.isWhitespace( text.charAt( start ) ) ) {
break;
}
start++;
}
else {
return new Identifier( trimmedText, quote );
while ( start < end ) {
if ( !Character.isWhitespace( text.charAt( end - 1 ) ) ) {
break;
}
end--;
}
if ( isQuoted( text, start, end ) ) {
start++;
end--;
quote = true;
}
else if ( quoteOnNonIdentifierChar && !quote ) {
// Check the letters to determine if we must quote the text
char c = text.charAt( start );
if ( !Character.isLetter( c ) && c != '_' ) {
// SQL identifiers must begin with a letter or underscore
quote = true;
}
else {
for ( int i = start + 1; i < end; i++ ) {
c = text.charAt( i );
if ( !Character.isLetterOrDigit( c ) && c != '_' ) {
quote = true;
break;
}
}
}
}
return new Identifier( text.substring( start, end ), quote );
}
/**
@ -91,9 +135,21 @@ public class Identifier implements Comparable<Identifier> {
* @return {@code true} if the given identifier text is considered quoted; {@code false} otherwise.
*/
public static boolean isQuoted(String name) {
return ( name.startsWith( "`" ) && name.endsWith( "`" ) )
|| ( name.startsWith( "[" ) && name.endsWith( "]" ) )
|| ( name.startsWith( "\"" ) && name.endsWith( "\"" ) );
return isQuoted( name, 0, name.length() );
}
public static boolean isQuoted(String name, int start, int end) {
if ( start + 2 < end ) {
switch ( name.charAt( start ) ) {
case '`':
return name.charAt( end - 1 ) == '`';
case '[':
return name.charAt( end - 1 ) == ']';
case '"':
return name.charAt( end - 1 ) == '"';
}
}
return false;
}
public static String unQuote(String name) {

View File

@ -260,7 +260,7 @@ public abstract class AbstractTransactSQLDialect extends Dialect {
EntityMappingType entityDescriptor,
RuntimeModelCreationContext runtimeModelCreationContext) {
return new LocalTemporaryTableStrategy(
new IdTable( entityDescriptor, basename -> "#" + basename, this ),
new IdTable( entityDescriptor, basename -> "#" + basename, this, runtimeModelCreationContext ),
() -> new TempIdTableExporter( true, this::getTypeName ) {
@Override
protected String getCreateCommand() {

View File

@ -34,6 +34,7 @@ import org.hibernate.query.spi.QueryEngine;
import org.hibernate.query.sqm.mutation.internal.cte.CteStrategy;
import org.hibernate.query.sqm.mutation.spi.SqmMultiTableMutationStrategy;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.SqlAstTranslatorFactory;
import org.hibernate.sql.ast.spi.SqlAppender;
@ -155,6 +156,9 @@ public class DB2Dialect extends Dialect {
public void initializeFunctionRegistry(QueryEngine queryEngine) {
super.initializeFunctionRegistry( queryEngine );
// AVG by default uses the input type, so we possibly need to cast the argument type, hence a special function
CommonFunctionFactory.avg_castingNonDoubleArguments( this, queryEngine, SqlAstNodeRenderingMode.DEFAULT );
CommonFunctionFactory.cot( queryEngine );
CommonFunctionFactory.degrees( queryEngine );
CommonFunctionFactory.log( queryEngine );

View File

@ -782,7 +782,12 @@ public class DerbyDialect extends Dialect {
EntityMappingType rootEntityDescriptor,
RuntimeModelCreationContext runtimeModelCreationContext) {
return new LocalTemporaryTableStrategy(
new IdTable( rootEntityDescriptor, basename -> "session.HT_" + basename, this ),
new IdTable(
rootEntityDescriptor,
basename -> "session.HT_" + basename,
this,
runtimeModelCreationContext
),
() -> new TempIdTableExporter( true, this::getTypeName ) {
@Override
protected String getCreateCommand() {

View File

@ -513,7 +513,7 @@ public abstract class Dialect implements ConversionContext {
//aggregate functions, supported on every database
CommonFunctionFactory.aggregates( this, queryEngine, SqlAstNodeRenderingMode.DEFAULT );
CommonFunctionFactory.aggregates( this, queryEngine, SqlAstNodeRenderingMode.DEFAULT, "||", null );
//the ANSI SQL-defined aggregate functions any() and every() are only
//supported on one database, but can be emulated using sum() and case,
@ -1834,7 +1834,7 @@ public abstract class Dialect implements ConversionContext {
EntityMappingType entityDescriptor,
RuntimeModelCreationContext runtimeModelCreationContext) {
return new PersistentTableStrategy(
new IdTable( entityDescriptor, name -> name, this ),
new IdTable( entityDescriptor, name -> name, this, runtimeModelCreationContext ),
AfterUseAction.CLEAN,
PhysicalIdTableExporter::new,
runtimeModelCreationContext.getSessionFactory()

View File

@ -192,7 +192,9 @@ public class H2Dialect extends Dialect {
super.initializeFunctionRegistry( queryEngine );
// H2 needs an actual argument type for aggregates like SUM, AVG, MIN, MAX to determine the result type
CommonFunctionFactory.aggregates( this, queryEngine, SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER );
CommonFunctionFactory.aggregates( this, queryEngine, SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER, "||", null );
// AVG by default uses the input type, so we possibly need to cast the argument type, hence a special function
CommonFunctionFactory.avg_castingNonDoubleArguments( this, queryEngine, SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER );
CommonFunctionFactory.pi( queryEngine );
CommonFunctionFactory.cot( queryEngine );
@ -374,7 +376,7 @@ public class H2Dialect extends Dialect {
EntityMappingType entityDescriptor,
RuntimeModelCreationContext runtimeModelCreationContext) {
return new LocalTemporaryTableStrategy(
new IdTable( entityDescriptor, basename -> "HT_" + basename, this ),
new IdTable( entityDescriptor, basename -> "HT_" + basename, this, runtimeModelCreationContext ),
this::getTypeName,
AfterUseAction.CLEAN,
TempTableDdlTransactionHandling.NONE,

View File

@ -128,7 +128,7 @@ public class HANAColumnStoreDialect extends AbstractHANADialect {
EntityMappingType entityDescriptor,
RuntimeModelCreationContext runtimeModelCreationContext) {
return new GlobalTemporaryTableStrategy(
new IdTable( entityDescriptor, basename -> "HT_" + basename, this ),
new IdTable( entityDescriptor, basename -> "HT_" + basename, this, runtimeModelCreationContext ),
() -> new PhysicalIdTableExporter() {
@Override
protected String getCreateCommand() {

View File

@ -50,7 +50,7 @@ public class HANARowStoreDialect extends AbstractHANADialect {
EntityMappingType entityDescriptor,
RuntimeModelCreationContext runtimeModelCreationContext) {
return new GlobalTemporaryTableStrategy(
new IdTable( entityDescriptor, basename -> "HT_" + basename, this ),
new IdTable( entityDescriptor, basename -> "HT_" + basename, this, runtimeModelCreationContext ),
() -> new PhysicalIdTableExporter() {
@Override
protected String getCreateCommand() {

View File

@ -55,6 +55,7 @@ import org.hibernate.query.sqm.mutation.internal.idtable.IdTable;
import org.hibernate.query.sqm.mutation.internal.idtable.LocalTemporaryTableStrategy;
import org.hibernate.query.sqm.mutation.internal.idtable.TempIdTableExporter;
import org.hibernate.query.sqm.mutation.spi.SqmMultiTableMutationStrategy;
import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.SqlAstTranslatorFactory;
import org.hibernate.sql.ast.spi.SqlAppender;
@ -155,6 +156,9 @@ public class HSQLDialect extends Dialect {
public void initializeFunctionRegistry(QueryEngine queryEngine) {
super.initializeFunctionRegistry( queryEngine );
// AVG by default uses the input type, so we possibly need to cast the argument type, hence a special function
CommonFunctionFactory.avg_castingNonDoubleArguments( this, queryEngine, SqlAstNodeRenderingMode.DEFAULT );
CommonFunctionFactory.cot( queryEngine );
CommonFunctionFactory.radians( queryEngine );
CommonFunctionFactory.degrees( queryEngine );
@ -511,7 +515,7 @@ public class HSQLDialect extends Dialect {
if ( version < 200 ) {
return new GlobalTemporaryTableStrategy(
new IdTable( rootEntityDescriptor, name -> "HT_" + name, this ),
new IdTable( rootEntityDescriptor, name -> "HT_" + name, this, runtimeModelCreationContext ),
() -> new TempIdTableExporter( false, this::getTypeName ),
// Version 1.8 GLOBAL TEMPORARY table definitions persist beyond the end
// of the session (by default, data is cleared at commit).
@ -523,7 +527,7 @@ public class HSQLDialect extends Dialect {
return new LocalTemporaryTableStrategy(
// With HSQLDB 2.0, the table name is qualified with MODULE to assist the drop
// statement (in-case there is a global name beginning with HT_)
new IdTable( rootEntityDescriptor, name -> "MODULE.HT_" + name, this ),
new IdTable( rootEntityDescriptor, name -> "MODULE.HT_" + name, this, runtimeModelCreationContext ),
() -> new TempIdTableExporter( true, this::getTypeName ) {
@Override
protected String getCreateCommand() {

View File

@ -697,7 +697,7 @@ public class MySQLDialect extends Dialect {
RuntimeModelCreationContext runtimeModelCreationContext) {
return new LocalTemporaryTableStrategy(
new IdTable( rootEntityDescriptor, basename -> "HT_" + basename, this ),
new IdTable( rootEntityDescriptor, basename -> "HT_" + basename, this, runtimeModelCreationContext ),
() -> new TempIdTableExporter( true, this::getTypeName ) {
@Override
protected String getCreateCommand() {

View File

@ -906,7 +906,8 @@ public class OracleDialect extends Dialect {
new IdTable(
rootEntityDescriptor,
name -> "HT_" + ( name.length() > 27 ? name.substring( 0, 27 ) : name ),
this
this,
runtimeModelCreationContext
),
() -> new TempIdTableExporter( false, this::getTypeName ) {
@Override

View File

@ -185,6 +185,9 @@ public class SQLServerDialect extends AbstractTransactSQLDialect {
// For SQL-Server we need to cast certain arguments to varchar(max) to be able to concat them
CommonFunctionFactory.aggregates( this, queryEngine, SqlAstNodeRenderingMode.DEFAULT, "+", "varchar(max)" );
// AVG by default uses the input type, so we possibly need to cast the argument type, hence a special function
CommonFunctionFactory.avg_castingNonDoubleArguments( this, queryEngine, SqlAstNodeRenderingMode.DEFAULT );
CommonFunctionFactory.truncate_round( queryEngine );
CommonFunctionFactory.everyAny_sumIif( queryEngine );
CommonFunctionFactory.bitLength_pattern( queryEngine, "datalength(?1) * 8" );

View File

@ -301,7 +301,8 @@ public class SybaseASESqlAstTranslator<T extends JdbcOperation> extends Abstract
@Override
public void visitColumnReference(ColumnReference columnReference) {
if ( getDmlTargetTableAlias() != null && getDmlTargetTableAlias().equals( columnReference.getQualifier() ) ) {
final String dmlTargetTableAlias = getDmlTargetTableAlias();
if ( dmlTargetTableAlias != null && dmlTargetTableAlias.equals( columnReference.getQualifier() ) ) {
// Sybase needs a table name prefix
// but not if this is a restricted union table reference subquery
final QuerySpec currentQuerySpec = (QuerySpec) getQueryPartStack().getCurrent();
@ -317,11 +318,11 @@ public class SybaseASESqlAstTranslator<T extends JdbcOperation> extends Abstract
// This is fine for now as this is only temporary anyway until we render aliases for table references
appendSql(
columnReference.getColumnExpression()
.replaceAll( "(\\b)(" + getDmlTargetTableAlias() + "\\.)(\\b)", "$1$3" )
.replaceAll( "(\\b)(" + dmlTargetTableAlias + "\\.)(\\b)", "$1$3" )
);
}
else {
appendSql( ( (MutationStatement) getStatement() ).getTargetTable().getTableExpression() );
appendSql( getCurrentDmlStatement().getTargetTable().getTableExpression() );
appendSql( '.' );
appendSql( columnReference.getColumnExpression() );
}

View File

@ -211,6 +211,9 @@ public class SybaseDialect extends AbstractTransactSQLDialect {
// For SQL-Server we need to cast certain arguments to varchar(16384) to be able to concat them
CommonFunctionFactory.aggregates( this, queryEngine, SqlAstNodeRenderingMode.DEFAULT, "+", "varchar(16384)" );
// AVG by default uses the input type, so we possibly need to cast the argument type, hence a special function
CommonFunctionFactory.avg_castingNonDoubleArguments( this, queryEngine, SqlAstNodeRenderingMode.DEFAULT );
queryEngine.getSqmFunctionRegistry().register( "concat", new SybaseConcatFunction( this, queryEngine.getTypeConfiguration() ) );
//this doesn't work 100% on earlier versions of Sybase

View File

@ -0,0 +1,115 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.dialect.function;
import java.util.Collections;
import java.util.List;
import org.hibernate.dialect.Dialect;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.query.CastType;
import org.hibernate.query.sqm.function.AbstractSqmSelfRenderingFunctionDescriptor;
import org.hibernate.query.sqm.function.FunctionKind;
import org.hibernate.query.sqm.produce.function.StandardArgumentsValidators;
import org.hibernate.query.sqm.produce.function.StandardFunctionReturnTypeResolvers;
import org.hibernate.query.sqm.produce.function.internal.PatternRenderer;
import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.sql.ast.tree.SqlAstNode;
import org.hibernate.sql.ast.tree.expression.Distinct;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.predicate.Predicate;
import org.hibernate.type.StandardBasicTypes;
import org.hibernate.type.spi.TypeConfiguration;
/**
* @author Christian Beikov
*/
public class AvgFunction extends AbstractSqmSelfRenderingFunctionDescriptor {
public static final String FUNCTION_NAME = "avg";
private final Dialect dialect;
private final SqlAstNodeRenderingMode defaultArgumentRenderingMode;
private final String doubleCastType;
public AvgFunction(
Dialect dialect,
TypeConfiguration typeConfiguration,
SqlAstNodeRenderingMode defaultArgumentRenderingMode,
String doubleCastType) {
super(
FUNCTION_NAME,
FunctionKind.AGGREGATE,
StandardArgumentsValidators.exactly( 1 ),
StandardFunctionReturnTypeResolvers.invariant(
typeConfiguration.getBasicTypeRegistry().resolve( StandardBasicTypes.DOUBLE )
)
);
this.dialect = dialect;
this.defaultArgumentRenderingMode = defaultArgumentRenderingMode;
this.doubleCastType = doubleCastType;
}
@Override
public void render(SqlAppender sqlAppender, List<SqlAstNode> sqlAstArguments, SqlAstTranslator<?> walker) {
render( sqlAppender, sqlAstArguments, null, walker );
}
@Override
public void render(
SqlAppender sqlAppender,
List<SqlAstNode> sqlAstArguments,
Predicate filter,
SqlAstTranslator<?> translator) {
final boolean caseWrapper = filter != null && !translator.supportsFilterClause();
sqlAppender.appendSql( "avg(" );
final Expression arg;
if ( sqlAstArguments.get( 0 ) instanceof Distinct ) {
sqlAppender.appendSql( "distinct " );
arg = ( (Distinct) sqlAstArguments.get( 0 ) ).getExpression();
}
else {
arg = (Expression) sqlAstArguments.get( 0 );
}
if ( caseWrapper ) {
sqlAppender.appendSql( "case when " );
filter.accept( translator );
sqlAppender.appendSql( " then " );
renderArgument( sqlAppender, translator, arg );
sqlAppender.appendSql( " else null end)" );
}
else {
renderArgument( sqlAppender, translator, arg );
sqlAppender.appendSql( ')' );
if ( filter != null ) {
sqlAppender.appendSql( " filter (where " );
filter.accept( translator );
sqlAppender.appendSql( ')' );
}
}
}
private void renderArgument(SqlAppender sqlAppender, SqlAstTranslator<?> translator, Expression realArg) {
final JdbcMapping sourceMapping = realArg.getExpressionType().getJdbcMappings().get( 0 );
// Only cast to float/double if this is an integer
if ( sourceMapping.getJdbcTypeDescriptor().isInteger() ) {
final String cast = dialect.castPattern( sourceMapping.getCastType(), CastType.DOUBLE );
new PatternRenderer( cast.replace( "?2", doubleCastType ) )
.render( sqlAppender, Collections.singletonList( realArg ), translator );
}
else {
translator.render( realArg, defaultArgumentRenderingMode );
}
}
@Override
public String getArgumentListSignature() {
return "(arg)";
}
}

View File

@ -27,6 +27,7 @@ import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
import org.hibernate.sql.ast.tree.SqlAstNode;
import org.hibernate.type.BasicType;
import org.hibernate.type.BasicTypeRegistry;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.StandardBasicTypes;
import org.hibernate.type.spi.TypeConfiguration;
@ -1701,13 +1702,6 @@ public class CommonFunctionFactory {
.register();
}
public static void aggregates(
Dialect dialect,
QueryEngine queryEngine,
SqlAstNodeRenderingMode inferenceArgumentRenderingMode) {
aggregates( dialect, queryEngine, inferenceArgumentRenderingMode, "||", null );
}
public static void aggregates(
Dialect dialect,
QueryEngine queryEngine,
@ -1837,7 +1831,28 @@ public class CommonFunctionFactory {
queryEngine.getSqmFunctionRegistry().register(
CountFunction.FUNCTION_NAME,
new CountFunction( dialect, queryEngine.getTypeConfiguration(), concatOperator, concatArgumentCastType )
new CountFunction(
dialect,
queryEngine.getTypeConfiguration(),
inferenceArgumentRenderingMode,
concatOperator,
concatArgumentCastType
)
);
}
public static void avg_castingNonDoubleArguments(
Dialect dialect,
QueryEngine queryEngine,
SqlAstNodeRenderingMode inferenceArgumentRenderingMode) {
queryEngine.getSqmFunctionRegistry().register(
AvgFunction.FUNCTION_NAME,
new AvgFunction(
dialect,
queryEngine.getTypeConfiguration(),
inferenceArgumentRenderingMode,
dialect.getTypeName( SqlTypes.DOUBLE )
)
);
}

View File

@ -43,10 +43,16 @@ public class CountFunction extends AbstractSqmSelfRenderingFunctionDescriptor {
public static final String FUNCTION_NAME = "count";
private final Dialect dialect;
private final SqlAstNodeRenderingMode defaultArgumentRenderingMode;
private final String concatOperator;
private final String concatArgumentCastType;
public CountFunction(Dialect dialect, TypeConfiguration typeConfiguration, String concatOperator, String concatArgumentCastType) {
public CountFunction(
Dialect dialect,
TypeConfiguration typeConfiguration,
SqlAstNodeRenderingMode defaultArgumentRenderingMode,
String concatOperator,
String concatArgumentCastType) {
super(
FUNCTION_NAME,
FunctionKind.AGGREGATE,
@ -56,6 +62,7 @@ public class CountFunction extends AbstractSqmSelfRenderingFunctionDescriptor {
)
);
this.dialect = dialect;
this.defaultArgumentRenderingMode = defaultArgumentRenderingMode;
this.concatOperator = concatOperator;
this.concatArgumentCastType = concatArgumentCastType;
}
@ -159,11 +166,11 @@ public class CountFunction extends AbstractSqmSelfRenderingFunctionDescriptor {
filter.accept( translator );
sqlAppender.appendSql( " and " );
}
translator.render( expressions.get( 0 ), SqlAstNodeRenderingMode.DEFAULT );
translator.render( expressions.get( 0 ), defaultArgumentRenderingMode );
sqlAppender.appendSql( " is not null" );
for ( int i = 1; i < expressions.size(); i++ ) {
sqlAppender.appendSql( " and " );
translator.render( expressions.get( i ), SqlAstNodeRenderingMode.DEFAULT );
translator.render( expressions.get( i ), defaultArgumentRenderingMode );
sqlAppender.appendSql( " is not null" );
}
sqlAppender.appendSql( " then 1 else null end" );
@ -219,7 +226,7 @@ public class CountFunction extends AbstractSqmSelfRenderingFunctionDescriptor {
}
// Rendering the tuple will add parenthesis around
else if ( requiresParenthesis ) {
translator.render( tuple, SqlAstNodeRenderingMode.DEFAULT );
translator.render( tuple, defaultArgumentRenderingMode );
}
else {
renderCommaSeparatedList( sqlAppender, translator, expressions );
@ -230,10 +237,10 @@ public class CountFunction extends AbstractSqmSelfRenderingFunctionDescriptor {
SqlAppender sqlAppender,
SqlAstTranslator<?> translator,
List<? extends Expression> expressions) {
translator.render( expressions.get( 0 ), SqlAstNodeRenderingMode.DEFAULT );
translator.render( expressions.get( 0 ), defaultArgumentRenderingMode );
for ( int i = 1; i < expressions.size(); i++ ) {
sqlAppender.appendSql( ',' );
translator.render( expressions.get( i ), SqlAstNodeRenderingMode.DEFAULT );
translator.render( expressions.get( i ), defaultArgumentRenderingMode );
}
}
@ -251,24 +258,24 @@ public class CountFunction extends AbstractSqmSelfRenderingFunctionDescriptor {
sqlAppender.appendSql( "1" );
}
else {
translator.render( realArg, SqlAstNodeRenderingMode.DEFAULT );
translator.render( realArg, defaultArgumentRenderingMode );
}
sqlAppender.appendSql( " else null end" );
}
else {
translator.render( realArg, SqlAstNodeRenderingMode.DEFAULT );
translator.render( realArg, defaultArgumentRenderingMode );
}
}
private void renderCastedArgument(SqlAppender sqlAppender, SqlAstTranslator<?> translator, Expression realArg) {
if ( concatArgumentCastType == null ) {
translator.render( realArg, SqlAstNodeRenderingMode.DEFAULT );
translator.render( realArg, defaultArgumentRenderingMode );
}
else {
final JdbcMapping sourceMapping = realArg.getExpressionType().getJdbcMappings().get( 0 );
// No need to cast if we already have a string
if ( sourceMapping.getCastType() == CastType.STRING ) {
translator.render( realArg, SqlAstNodeRenderingMode.DEFAULT );
translator.render( realArg, defaultArgumentRenderingMode );
}
else {
final String cast = dialect.castPattern( sourceMapping.getCastType(), CastType.STRING );

View File

@ -46,4 +46,8 @@ public class CurrentFunction
return "";
}
@Override
public boolean alwaysIncludesParentheses() {
return sql.indexOf( '(' ) != -1;
}
}

View File

@ -91,7 +91,7 @@ public class NormalizingIdentifierHelperImpl implements IdentifierHelper {
@Override
public Identifier applyGlobalQuoting(String text) {
return Identifier.toIdentifier( text, globallyQuoteIdentifiers && !globallyQuoteIdentifiersSkipColumnDefinitions );
return Identifier.toIdentifier( text, globallyQuoteIdentifiers && !globallyQuoteIdentifiersSkipColumnDefinitions, false );
}
@Override

View File

@ -0,0 +1,141 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.internal.util;
public final class QuotingHelper {
private QuotingHelper() { /* static methods only - hide constructor */
}
public static String unquoteIdentifier(String text) {
final int end = text.length() - 1;
assert text.charAt( 0 ) == '`' && text.charAt( end ) == '`';
// Unquote a parsed quoted identifier and handle escape sequences
final StringBuilder sb = new StringBuilder( text.length() - 2 );
for ( int i = 1; i < end; i++ ) {
char c = text.charAt( i );
switch ( c ) {
case '\\':
if ( ( i + 1 ) < end ) {
char nextChar = text.charAt( ++i );
switch ( nextChar ) {
case 'b':
c = '\b';
break;
case 't':
c = '\t';
break;
case 'n':
c = '\n';
break;
case 'f':
c = '\f';
break;
case 'r':
c = '\r';
break;
case '\\':
c = '\\';
break;
case '\'':
c = '\'';
break;
case '"':
c = '"';
break;
case '`':
c = '`';
break;
case 'u':
c = (char) Integer.parseInt( text.substring( i + 1, i + 5 ), 16 );
i += 4;
break;
default:
sb.append( '\\' );
c = nextChar;
break;
}
}
break;
default:
break;
}
sb.append( c );
}
return sb.toString();
}
public static String unquoteStringLiteral(String text) {
assert text.length() > 1;
final int end = text.length() - 1;
final char delimiter = text.charAt( 0 );
assert delimiter == text.charAt( end );
// Unescape the parsed literal and handle escape sequences
final StringBuilder sb = new StringBuilder( text.length() - 2 );
for ( int i = 1; i < end; i++ ) {
char c = text.charAt( i );
switch ( c ) {
case '\'':
if ( delimiter == '\'' ) {
i++;
}
break;
case '"':
if ( delimiter == '"' ) {
i++;
}
break;
case '\\':
if ( ( i + 1 ) < end ) {
char nextChar = text.charAt( ++i );
switch ( nextChar ) {
case 'b':
c = '\b';
break;
case 't':
c = '\t';
break;
case 'n':
c = '\n';
break;
case 'f':
c = '\f';
break;
case 'r':
c = '\r';
break;
case '\\':
c = '\\';
break;
case '\'':
c = '\'';
break;
case '"':
c = '"';
break;
case '`':
c = '`';
break;
case 'u':
c = (char) Integer.parseInt( text.substring( i + 1, i + 5 ), 16 );
i += 4;
break;
default:
sb.append( '\\' );
c = nextChar;
break;
}
}
break;
default:
break;
}
sb.append( c );
}
return sb.toString();
}
}

View File

@ -29,7 +29,7 @@ public final class ArrayHelper {
public static int indexOf(Object[] array, Object object) {
for ( int i = 0; i < array.length; i++ ) {
if ( array[i].equals( object ) ) {
if ( object.equals( array[i] ) ) {
return i;
}
}

View File

@ -6,6 +6,7 @@
*/
package org.hibernate.loader.ast.internal;
import java.util.AbstractMap;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@ -251,7 +252,7 @@ public class LoaderSelectBuilder {
private final EntityGraphTraversalState entityGraphTraversalState;
private int fetchDepth;
private Map<OrderByFragment, TableGroup> orderByFragments;
private List<Map.Entry<OrderByFragment, TableGroup>> orderByFragments;
private boolean hasCollectionJoinFetches;
private String currentBagRole;
@ -483,9 +484,9 @@ public class LoaderSelectBuilder {
if ( orderByFragments != null ) {
orderByFragments.forEach(
(orderByFragment, tableGroup) -> orderByFragment.apply(
entry -> entry.getKey().apply(
rootQuerySpec,
tableGroup,
entry.getValue(),
sqlAstCreationState
)
);
@ -652,9 +653,9 @@ public class LoaderSelectBuilder {
private void applyOrdering(TableGroup tableGroup, OrderByFragment orderByFragment) {
if ( orderByFragments == null ) {
orderByFragments = new LinkedHashMap<>();
orderByFragments = new ArrayList<>();
}
orderByFragments.put( orderByFragment, tableGroup );
orderByFragments.add( new AbstractMap.SimpleEntry<>( orderByFragment, tableGroup ) );
}
private List<Fetch> visitFetches(
@ -1010,13 +1011,14 @@ public class LoaderSelectBuilder {
if ( jdbcTypeCount == 1 ) {
assert fkDescriptor instanceof SimpleForeignKeyDescriptor;
final SimpleForeignKeyDescriptor simpleFkDescriptor = (SimpleForeignKeyDescriptor) fkDescriptor;
final TableReference tableReference = rootTableGroup.resolveTableReference(
navigablePath,
simpleFkDescriptor.getContainingTableExpression()
);
fkExpression = sqlAstCreationState.getSqlExpressionResolver().resolveSqlExpression(
createColumnReferenceKey(
simpleFkDescriptor.getContainingTableExpression(),
simpleFkDescriptor.getSelectionExpression()
),
createColumnReferenceKey( tableReference, simpleFkDescriptor.getSelectionExpression() ),
sqlAstProcessingState -> new ColumnReference(
rootTableGroup.resolveTableReference( navigablePath, simpleFkDescriptor.getContainingTableExpression() ),
tableReference,
simpleFkDescriptor.getSelectionExpression(),
false,
null,
@ -1029,21 +1031,26 @@ public class LoaderSelectBuilder {
else {
final List<ColumnReference> columnReferences = new ArrayList<>( jdbcTypeCount );
fkDescriptor.forEachSelectable(
(columnIndex, selection) ->
columnReferences.add(
(ColumnReference) sqlAstCreationState.getSqlExpressionResolver()
.resolveSqlExpression(
createColumnReferenceKey(
selection.getContainingTableExpression(),
selection.getSelectionExpression()
),
sqlAstProcessingState -> new ColumnReference(
rootTableGroup.resolveTableReference( navigablePath, selection.getContainingTableExpression() ),
selection,
this.creationContext.getSessionFactory()
)
)
)
(columnIndex, selection) -> {
final TableReference tableReference = rootTableGroup.resolveTableReference(
navigablePath,
selection.getContainingTableExpression()
);
columnReferences.add(
(ColumnReference) sqlAstCreationState.getSqlExpressionResolver()
.resolveSqlExpression(
createColumnReferenceKey(
tableReference,
selection.getSelectionExpression()
),
sqlAstProcessingState -> new ColumnReference(
tableReference,
selection,
this.creationContext.getSessionFactory()
)
)
);
}
);
fkExpression = new SqlTuple( columnReferences, fkDescriptor );

View File

@ -542,6 +542,22 @@ public abstract class PersistentClass implements AttributeContainer, Serializabl
}
}
public Property getSubclassProperty(String propertyName) throws MappingException {
Property identifierProperty = getIdentifierProperty();
if ( identifierProperty != null
&& identifierProperty.getName().equals( StringHelper.root( propertyName ) ) ) {
return identifierProperty;
}
else {
Iterator<Property> iter = getSubclassPropertyClosureIterator();
Component identifierMapper = getIdentifierMapper();
if ( identifierMapper != null ) {
iter = new JoinedIterator<>( identifierMapper.getPropertyIterator(), iter );
}
return getProperty( propertyName, iter );
}
}
/**
* Check to see if this PersistentClass defines a property with the given name.
*

View File

@ -7,11 +7,12 @@
package org.hibernate.metamodel.mapping;
import org.hibernate.sql.results.graph.Fetchable;
import org.hibernate.type.descriptor.java.JavaTypedExpressable;
/**
* @author Steve Ebersole
*/
public interface CollectionPart extends ModelPart, Fetchable {
public interface CollectionPart extends ModelPart, Fetchable, JavaTypedExpressable {
enum Nature {
ELEMENT( "{element}" ),
INDEX( "{index}" ),

View File

@ -57,7 +57,7 @@ public class MappingModelHelper {
}
else {
colRef = (ColumnReference) sqlExpressionResolver.resolveSqlExpression(
createColumnReferenceKey( selection.getContainingTableExpression(), selection.getSelectionExpression() ),
createColumnReferenceKey( qualifier, selection.getSelectionExpression() ),
sqlAstProcessingState -> new ColumnReference(
qualifier,
selection,
@ -89,7 +89,7 @@ public class MappingModelHelper {
}
else {
return sqlExpressionResolver.resolveSqlExpression(
createColumnReferenceKey( basicPart.getContainingTableExpression(), basicPart.getSelectionExpression() ),
createColumnReferenceKey( qualifier, basicPart.getSelectionExpression() ),
sqlAstProcessingState -> new ColumnReference(
qualifier,
basicPart,

View File

@ -61,7 +61,7 @@ public abstract class AbstractDomainPath implements DomainPath {
final TableReference tableReference = tableGroup.resolveTableReference( getNavigablePath(), selection.getContainingTableExpression() );
return creationState.getSqlExpressionResolver().resolveSqlExpression(
SqlExpressionResolver.createColumnReferenceKey(
selection.getContainingTableExpression(),
tableReference,
selection.getSelectionExpression()
),
sqlAstProcessingState -> new ColumnReference(
@ -247,7 +247,7 @@ public abstract class AbstractDomainPath implements DomainPath {
final TableReference tableReference = tableGroup.resolveTableReference( getNavigablePath(), selection.getContainingTableExpression() );
final Expression expression = creationState.getSqlExpressionResolver().resolveSqlExpression(
SqlExpressionResolver.createColumnReferenceKey(
selection.getContainingTableExpression(),
tableReference,
selection.getSelectionExpression()
),
sqlAstProcessingState -> new ColumnReference(

View File

@ -10,7 +10,6 @@ import java.io.Serializable;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
import org.hibernate.NotYetImplementedFor6Exception;
import org.hibernate.SharedSessionContract;
import org.hibernate.engine.FetchStyle;
import org.hibernate.engine.FetchTiming;
@ -33,8 +32,17 @@ import org.hibernate.metamodel.model.domain.NavigableRole;
import org.hibernate.property.access.spi.PropertyAccess;
import org.hibernate.query.NavigablePath;
import org.hibernate.sql.ast.Clause;
import org.hibernate.sql.ast.SqlAstJoinType;
import org.hibernate.sql.ast.spi.FromClauseAccess;
import org.hibernate.sql.ast.spi.SqlAliasBaseGenerator;
import org.hibernate.sql.ast.spi.SqlAstCreationContext;
import org.hibernate.sql.ast.spi.SqlExpressionResolver;
import org.hibernate.sql.ast.spi.SqlSelection;
import org.hibernate.sql.ast.tree.from.StandardVirtualTableGroup;
import org.hibernate.sql.ast.tree.from.TableGroup;
import org.hibernate.sql.ast.tree.from.TableGroupJoin;
import org.hibernate.sql.ast.tree.from.TableGroupJoinProducer;
import org.hibernate.sql.ast.tree.predicate.Predicate;
import org.hibernate.sql.results.graph.DomainResult;
import org.hibernate.sql.results.graph.DomainResultCreationState;
import org.hibernate.sql.results.graph.Fetch;
@ -53,7 +61,7 @@ import org.hibernate.type.descriptor.java.MutabilityPlan;
*/
public class DiscriminatedAssociationAttributeMapping
extends AbstractSingularAttributeMapping
implements DiscriminatedAssociationModelPart {
implements DiscriminatedAssociationModelPart, TableGroupJoinProducer {
private final NavigableRole navigableRole;
private final DiscriminatedAssociationMapping discriminatorMapping;
private final SessionFactoryImplementor sessionFactory;
@ -141,17 +149,24 @@ public class DiscriminatedAssociationAttributeMapping
TableGroup tableGroup,
String resultVariable,
DomainResultCreationState creationState) {
throw new NotYetImplementedFor6Exception( getClass() );
return discriminatorMapping.createDomainResult(
navigablePath,
tableGroup,
resultVariable,
creationState
);
}
@Override
public void applySqlSelections(NavigablePath navigablePath, TableGroup tableGroup, DomainResultCreationState creationState) {
throw new NotYetImplementedFor6Exception( getClass() );
discriminatorMapping.getDiscriminatorPart().applySqlSelections( navigablePath, tableGroup, creationState );
discriminatorMapping.getKeyPart().applySqlSelections( navigablePath, tableGroup, creationState );
}
@Override
public void applySqlSelections(NavigablePath navigablePath, TableGroup tableGroup, DomainResultCreationState creationState, BiConsumer<SqlSelection, JdbcMapping> selectionConsumer) {
throw new NotYetImplementedFor6Exception( getClass() );
discriminatorMapping.getDiscriminatorPart().applySqlSelections( navigablePath, tableGroup, creationState, selectionConsumer );
discriminatorMapping.getKeyPart().applySqlSelections( navigablePath, tableGroup, creationState, selectionConsumer );
}
@Override
@ -361,4 +376,62 @@ public class DiscriminatedAssociationAttributeMapping
return anyType.assemble( cached, persistenceContext, null );
}
}
@Override
public TableGroupJoin createTableGroupJoin(
NavigablePath navigablePath,
TableGroup lhs,
String explicitSourceAlias,
SqlAstJoinType sqlAstJoinType,
boolean fetched,
boolean addsPredicate,
SqlAliasBaseGenerator aliasBaseGenerator,
SqlExpressionResolver sqlExpressionResolver,
FromClauseAccess fromClauseAccess,
SqlAstCreationContext creationContext) {
final TableGroup tableGroup = createRootTableGroupJoin(
navigablePath,
lhs,
explicitSourceAlias,
sqlAstJoinType,
fetched,
null,
aliasBaseGenerator,
sqlExpressionResolver,
fromClauseAccess,
creationContext
);
return new TableGroupJoin( navigablePath, sqlAstJoinType, tableGroup );
}
@Override
public TableGroup createRootTableGroupJoin(
NavigablePath navigablePath,
TableGroup lhs,
String explicitSourceAlias,
SqlAstJoinType sqlAstJoinType,
boolean fetched,
Consumer<Predicate> predicateConsumer,
SqlAliasBaseGenerator aliasBaseGenerator,
SqlExpressionResolver sqlExpressionResolver,
FromClauseAccess fromClauseAccess,
SqlAstCreationContext creationContext) {
return new StandardVirtualTableGroup(
navigablePath,
this,
lhs,
fetched
);
}
@Override
public SqlAstJoinType getDefaultSqlAstJoinType(TableGroup parentTableGroup) {
return SqlAstJoinType.LEFT;
}
@Override
public String getSqlAliasStem() {
return getAttributeName();
}
}

View File

@ -144,6 +144,11 @@ public class DiscriminatedCollectionPart implements DiscriminatedAssociationMode
return discriminatorMapping.getJavaTypeDescriptor();
}
@Override
public JavaType<?> getExpressableJavaTypeDescriptor() {
return getJavaTypeDescriptor();
}
@Override
public NavigableRole getNavigableRole() {
return partRole;

View File

@ -42,6 +42,7 @@ import org.hibernate.sql.ast.tree.from.PluralTableGroup;
import org.hibernate.sql.ast.tree.from.StandardVirtualTableGroup;
import org.hibernate.sql.ast.tree.from.TableGroup;
import org.hibernate.sql.ast.tree.from.TableGroupJoin;
import org.hibernate.sql.ast.tree.from.TableReference;
import org.hibernate.sql.ast.tree.predicate.Predicate;
import org.hibernate.sql.results.graph.DomainResult;
import org.hibernate.sql.results.graph.DomainResultCreationState;
@ -199,15 +200,16 @@ public class EmbeddedCollectionPart implements CollectionPart, EmbeddableValuedF
getEmbeddableTypeDescriptor().forEachSelectable(
(columnIndex, selection) -> {
assert containingTableExpression.equals( selection.getContainingTableExpression() );
final TableReference tableReference = tableGroup.resolveTableReference(
tableGroup.getNavigablePath()
.append( getNavigableRole().getNavigableName() ),
selection.getContainingTableExpression()
);
expressions.add(
sqlExpressionResolver.resolveSqlExpression(
SqlExpressionResolver.createColumnReferenceKey( selection.getContainingTableExpression(), selection.getSelectionExpression() ),
SqlExpressionResolver.createColumnReferenceKey( tableReference, selection.getSelectionExpression() ),
sqlAstProcessingState -> new ColumnReference(
tableGroup.resolveTableReference(
tableGroup.getNavigablePath()
.append( getNavigableRole().getNavigableName() ),
selection.getContainingTableExpression()
),
tableReference,
selection,
sqlAstCreationState.getCreationContext().getSessionFactory()
)
@ -300,6 +302,11 @@ public class EmbeddedCollectionPart implements CollectionPart, EmbeddableValuedF
return getEmbeddableTypeDescriptor().getJavaTypeDescriptor();
}
@Override
public JavaType<?> getExpressableJavaTypeDescriptor() {
return getJavaTypeDescriptor();
}
@Override
public NavigableRole getNavigableRole() {
return navigableRole;

View File

@ -359,6 +359,11 @@ public class EntityCollectionPart
return getEntityMappingType().getJavaTypeDescriptor();
}
@Override
public JavaType<?> getExpressableJavaTypeDescriptor() {
return getJavaTypeDescriptor();
}
@Override
public NavigableRole getNavigableRole() {
return navigableRole;

View File

@ -10,7 +10,6 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.hibernate.NotYetImplementedFor6Exception;
import org.hibernate.metamodel.mapping.MappingModelCreationLogger;
import org.hibernate.metamodel.mapping.ModelPart;
import org.hibernate.metamodel.mapping.NonTransientException;

View File

@ -31,6 +31,7 @@ import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.metamodel.mapping.ManagedMappingType;
import org.hibernate.metamodel.mapping.ModelPart;
import org.hibernate.metamodel.mapping.PluralAttributeMapping;
import org.hibernate.metamodel.mapping.Queryable;
import org.hibernate.metamodel.mapping.StateArrayContributorMetadataAccess;
import org.hibernate.metamodel.mapping.ordering.OrderByFragment;
import org.hibernate.metamodel.mapping.ordering.OrderByFragmentTranslator;
@ -212,7 +213,7 @@ public class PluralAttributeMappingImpl
final boolean hasManyToManyOrder = bootDescriptor.getManyToManyOrdering() != null;
if ( hasOrder || hasManyToManyOrder ) {
final TranslationContext context = () -> collectionDescriptor.getFactory().getSessionFactoryOptions().getJpaCompliance();
final TranslationContext context = collectionDescriptor::getFactory;
if ( hasOrder ) {
if ( log.isDebugEnabled() ) {
@ -796,29 +797,22 @@ public class PluralAttributeMappingImpl
@Override
public ModelPart findSubPart(String name, EntityMappingType treatTargetType) {
if ( elementDescriptor instanceof Queryable ) {
final ModelPart subPart = ( (Queryable) elementDescriptor ).findSubPart( name, null );
if ( subPart != null ) {
return subPart;
}
}
final CollectionPart.Nature nature = CollectionPart.Nature.fromName( name );
if ( nature == CollectionPart.Nature.ELEMENT ) {
return elementDescriptor;
}
if ( nature == CollectionPart.Nature.INDEX ) {
return indexDescriptor;
}
if ( nature == CollectionPart.Nature.ID ) {
return identifierDescriptor;
}
if ( elementDescriptor instanceof EntityCollectionPart ) {
return ( (EntityCollectionPart) elementDescriptor ).findSubPart( name );
}
if ( elementDescriptor instanceof EmbeddedCollectionPart ) {
return ( (EmbeddedCollectionPart) elementDescriptor ).findSubPart( name, treatTargetType );
}
if ( elementDescriptor instanceof DiscriminatedCollectionPart ) {
return ( (DiscriminatedCollectionPart) elementDescriptor ).findSubPart( name, treatTargetType );
if ( nature != null ) {
switch ( nature ) {
case ELEMENT:
return elementDescriptor;
case INDEX:
return indexDescriptor;
case ID:
return identifierDescriptor;
}
}
return null;

View File

@ -6,6 +6,7 @@
*/
package org.hibernate.metamodel.mapping.ordering;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.jpa.spi.JpaCompliance;
/**
@ -14,5 +15,10 @@ import org.hibernate.jpa.spi.JpaCompliance;
* @author Steve Ebersole
*/
public interface TranslationContext {
JpaCompliance getJpaCompliance();
SessionFactoryImplementor getFactory();
default JpaCompliance getJpaCompliance() {
return getFactory().getSessionFactoryOptions().getJpaCompliance();
}
}

View File

@ -56,6 +56,7 @@ public class CollectionPartPath extends AbstractDomainPath {
@Override
public SequencePart resolvePathPart(
String name,
String identifier,
boolean isTerminal,
TranslationContext translationContext) {
if ( referencedPart instanceof EmbeddedCollectionPart ) {

View File

@ -34,12 +34,10 @@ import org.hibernate.sql.ast.tree.select.SortSpecification;
public class ColumnReference implements OrderingExpression, SequencePart {
private final String columnExpression;
private final boolean isColumnExpressionFormula;
private final NavigablePath rootPath;
public ColumnReference(String columnExpression, boolean isColumnExpressionFormula, NavigablePath rootPath) {
public ColumnReference(String columnExpression, boolean isColumnExpressionFormula) {
this.columnExpression = columnExpression;
this.isColumnExpressionFormula = isColumnExpressionFormula;
this.rootPath = rootPath;
}
public String getColumnExpression() {
@ -50,14 +48,6 @@ public class ColumnReference implements OrderingExpression, SequencePart {
return isColumnExpressionFormula;
}
@Override
public SequencePart resolvePathPart(
String name,
boolean isTerminal,
TranslationContext translationContext) {
throw new UnsupportedMappingException( "ColumnReference cannot be de-referenced" );
}
@Override
public Expression resolve(
QuerySpec ast,
@ -85,6 +75,15 @@ public class ColumnReference implements OrderingExpression, SequencePart {
);
}
@Override
public SequencePart resolvePathPart(
String name,
String identifier,
boolean isTerminal,
TranslationContext translationContext) {
throw new UnsupportedMappingException( "ColumnReference cannot be de-referenced" );
}
@Override
public void apply(
QuerySpec ast,

View File

@ -48,6 +48,7 @@ public class DomainPathContinuation extends AbstractDomainPath {
@Override
public SequencePart resolvePathPart(
String name,
String identifier,
boolean isTerminal,
TranslationContext translationContext) {
if ( referencedModelPart instanceof EmbeddableValuedModelPart ) {

View File

@ -35,7 +35,11 @@ public class FkDomainPathContinuation extends DomainPathContinuation {
}
@Override
public SequencePart resolvePathPart(String name, boolean isTerminal, TranslationContext translationContext) {
public SequencePart resolvePathPart(
String name,
String identifier,
boolean isTerminal,
TranslationContext translationContext) {
HashSet<String> furtherPaths = new LinkedHashSet<>( possiblePaths.size() );
for ( String path : possiblePaths ) {
if ( !path.startsWith( name ) ) {

View File

@ -22,20 +22,11 @@ public class OrderingSpecification implements Node {
private NullPrecedence nullPrecedence = NullPrecedence.NONE;
private String orderByValue;
public String getOrderByValue() {
return orderByValue;
}
public OrderingSpecification(OrderingExpression orderingExpression, String orderByValue) {
this.orderingExpression = orderingExpression;
this.orderByValue = orderByValue;
}
public OrderingSpecification(OrderingExpression orderingExpression, String orderByValue,SortOrder sortOrder) {
this(orderingExpression, orderByValue);
this.sortOrder = sortOrder;
}
public OrderingExpression getExpression() {
return orderingExpression;
}
@ -64,6 +55,10 @@ public class OrderingSpecification implements Node {
this.nullPrecedence = nullPrecedence;
}
public String getOrderByValue() {
return orderByValue;
}
public void setOrderByValue(String orderByValue) {
this.orderByValue = orderByValue;
}

View File

@ -10,13 +10,17 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.hibernate.internal.util.QuotingHelper;
import org.hibernate.query.NullPrecedence;
import org.hibernate.query.SortOrder;
import org.hibernate.grammars.ordering.OrderingParser;
import org.hibernate.grammars.ordering.OrderingParser.ExpressionContext;
import org.hibernate.grammars.ordering.OrderingParserBaseVisitor;
import org.hibernate.metamodel.mapping.PluralAttributeMapping;
import org.hibernate.metamodel.mapping.ordering.TranslationContext;
import org.hibernate.query.sqm.ParsingException;
import org.hibernate.query.sqm.function.SqmFunctionDescriptor;
import org.antlr.v4.runtime.tree.ParseTree;
import org.antlr.v4.runtime.tree.TerminalNode;
/**
@ -34,18 +38,19 @@ public class ParseTreeVisitor extends OrderingParserBaseVisitor<Object> {
}
@Override
public List<OrderingSpecification> visitOrderByFragment(OrderingParser.OrderByFragmentContext parsedFragment) {
final List<OrderingParser.SortSpecificationContext> parsedSortSpecifications = parsedFragment.sortSpecification();
assert parsedSortSpecifications != null;
public List<OrderingSpecification> visitOrderByFragment(OrderingParser.OrderByFragmentContext ctx) {
final int size = ctx.getChildCount();
// Shift 1 bit instead of division by 2
final int specificationCount = ( size + 1 ) >> 1;
if ( parsedSortSpecifications.size() == 1 ) {
return Collections.singletonList( visitSortSpecification( parsedSortSpecifications.get( 0 ) ) );
if ( specificationCount == 1 ) {
return Collections.singletonList( visitSortSpecification( (OrderingParser.SortSpecificationContext) ctx.getChild( 0 ) ) );
}
final List<OrderingSpecification> specifications = new ArrayList<>( parsedSortSpecifications.size() );
final List<OrderingSpecification> specifications = new ArrayList<>( specificationCount );
for ( OrderingParser.SortSpecificationContext parsedSortSpecification : parsedSortSpecifications ) {
specifications.add( visitSortSpecification( parsedSortSpecification ) );
for ( int i = 0; i < size; i += 2 ) {
specifications.add( visitSortSpecification( (OrderingParser.SortSpecificationContext) ctx.getChild( i ) ) );
}
return specifications;
@ -56,7 +61,7 @@ public class ParseTreeVisitor extends OrderingParserBaseVisitor<Object> {
assert parsedSpec != null;
assert parsedSpec.expression() != null;
final OrderingExpression orderingExpression = visitExpression( parsedSpec.expression() );
final OrderingExpression orderingExpression = (OrderingExpression) parsedSpec.getChild( 0 ).accept( this );
if ( translationContext.getJpaCompliance().isJpaOrderByMappingComplianceEnabled() ) {
if ( orderingExpression instanceof DomainPath ) {
// nothing to do
@ -70,85 +75,157 @@ public class ParseTreeVisitor extends OrderingParserBaseVisitor<Object> {
}
}
final OrderingSpecification result = new OrderingSpecification( orderingExpression, parsedSpec.expression().getText());
final OrderingSpecification result = new OrderingSpecification( orderingExpression, parsedSpec.getChild( 0 ).getText() );
int i = 1;
if ( parsedSpec.collationSpecification() != null ) {
result.setCollation( parsedSpec.collationSpecification().identifier().getText() );
if ( parsedSpec.getChildCount() > i ) {
final ParseTree parseTree = parsedSpec.getChild( i );
if ( parseTree instanceof OrderingParser.CollationSpecificationContext ) {
result.setCollation( (String) parseTree.getChild( 1 ).getChild( 0 ).accept( this ) );
i++;
}
}
if ( parsedSpec.direction() != null && parsedSpec.direction().DESC() != null ) {
result.setSortOrder( SortOrder.DESCENDING );
if ( parsedSpec.getChildCount() > i ) {
final ParseTree parseTree = parsedSpec.getChild( i );
if ( parseTree instanceof OrderingParser.DirectionContext ) {
final OrderingParser.DirectionContext directionCtx = (OrderingParser.DirectionContext) parseTree;
if ( ( (TerminalNode) directionCtx.getChild( 0 ) ).getSymbol().getType() == OrderingParser.ASC ) {
result.setSortOrder( SortOrder.ASCENDING );
}
else {
result.setSortOrder( SortOrder.DESCENDING );
}
i++;
}
}
else {
result.setSortOrder( SortOrder.ASCENDING );
if ( parsedSpec.getChildCount() > i ) {
final ParseTree parseTree = parsedSpec.getChild( i );
if ( parseTree instanceof OrderingParser.NullsPrecedenceContext ) {
final OrderingParser.NullsPrecedenceContext nullsCtx = (OrderingParser.NullsPrecedenceContext) parseTree;
if ( ( (TerminalNode) nullsCtx.getChild( 1 ) ).getSymbol().getType() == OrderingParser.FIRST ) {
result.setNullPrecedence( NullPrecedence.FIRST );
}
else {
result.setNullPrecedence( NullPrecedence.LAST );
}
}
}
// todo (6.0) : null-precedence (see grammar notes)
return result;
}
@Override
public OrderingExpression visitExpression(ExpressionContext ctx) {
if ( ctx.function() != null ) {
return visitFunction( ctx.function() );
}
public OrderingExpression visitFunctionExpression(OrderingParser.FunctionExpressionContext ctx) {
return visitFunction( (OrderingParser.FunctionContext) ctx.getChild( 0 ) );
}
if ( ctx.identifier() != null ) {
pathConsumer.consumeIdentifier( ctx.identifier().getText(), true, true );
return (OrderingExpression) pathConsumer.getConsumedPart();
}
@Override
public OrderingExpression visitIdentifierExpression(OrderingParser.IdentifierExpressionContext ctx) {
return visitIdentifier( (OrderingParser.IdentifierContext) ctx.getChild( 0 ) );
}
assert ctx.dotIdentifier() != null;
final int numberOfParts = ctx.dotIdentifier().IDENTIFIER().size();
boolean firstPass = true;
@Override
public OrderingExpression visitDotIdentifierExpression(OrderingParser.DotIdentifierExpressionContext ctx) {
return visitDotIdentifier( (OrderingParser.DotIdentifierContext) ctx.getChild( 0 ) );
}
for ( int i = 0; i < numberOfParts; i++ ) {
final TerminalNode partNode = ctx.dotIdentifier().IDENTIFIER().get( i );
@Override
public OrderingExpression visitDotIdentifier(OrderingParser.DotIdentifierContext ctx) {
final int size = ctx.getChildCount();
final int end = size - 1;
// For nested paths, which must be on the domain model, we don't care about the possibly quoted identifier,
// so we just pass the unquoted one
String partName = (String) ctx.getChild( 0 ).getChild( 0 ).accept( this );
pathConsumer.consumeIdentifier(
partName,
partName,
true,
false
);
for ( int i = 2; i < end; i += 2 ) {
partName = (String) ctx.getChild( i ).getChild( 0 ).accept( this );
pathConsumer.consumeIdentifier(
partNode.getText(),
firstPass,
true
partName,
partName,
false,
false
);
firstPass = false;
}
partName = (String) ctx.getChild( end ).getChild( 0 ).accept( this );
pathConsumer.consumeIdentifier(
partName,
partName,
false,
true
);
return (OrderingExpression) pathConsumer.getConsumedPart();
}
@Override
public FunctionExpression visitFunction(OrderingParser.FunctionContext ctx) {
if ( ctx.simpleFunction() != null ) {
final FunctionExpression function = new FunctionExpression(
ctx.simpleFunction().identifier().getText(),
ctx.simpleFunction().functionArguments().expression().size()
);
for ( int i = 0; i < ctx.simpleFunction().functionArguments().expression().size(); i++ ) {
final ExpressionContext arg = ctx.simpleFunction().functionArguments().expression( i );
function.addArgument( visitExpression( arg ) );
}
return function;
public OrderingExpression visitIdentifier(OrderingParser.IdentifierContext ctx) {
final String unquotedIdentifier = (String) ctx.getChild( 0 ).accept( this );
final SqmFunctionDescriptor descriptor = translationContext.getFactory()
.getQueryEngine()
.getSqmFunctionRegistry()
.findFunctionDescriptor( unquotedIdentifier );
// If there is no function with this name, it always requires parenthesis or if this is a quoted identifiers
// then we interpret this as a path part instead of as function
final String identifier = ctx.getChild( 0 ).getText();
if ( descriptor == null || descriptor.alwaysIncludesParentheses() || !unquotedIdentifier.equals( identifier ) ) {
pathConsumer.consumeIdentifier( unquotedIdentifier, identifier, true, true );
return (OrderingExpression) pathConsumer.getConsumedPart();
}
return new SelfRenderingOrderingExpression( unquotedIdentifier );
}
assert ctx.packagedFunction() != null;
@Override
public FunctionExpression visitFunction(OrderingParser.FunctionContext ctx) {
final ParseTree functionCtx = ctx.getChild( 0 );
final OrderingParser.FunctionArgumentsContext argumentsCtx = (OrderingParser.FunctionArgumentsContext) functionCtx.getChild( 1 );
final int size = argumentsCtx.getChildCount();
// Shift 1 bit instead of division by 2
final int expressionsCount = ( ( size - 1 ) >> 1 );
final FunctionExpression function = new FunctionExpression(
ctx.packagedFunction().dotIdentifier().getText(),
ctx.packagedFunction().functionArguments().expression().size()
functionCtx.getChild( 0 ).getText(),
expressionsCount
);
for ( int i = 0; i < ctx.packagedFunction().functionArguments().expression().size(); i++ ) {
final ExpressionContext arg = ctx.packagedFunction().functionArguments().expression( i );
function.addArgument( visitExpression( arg ) );
for ( int i = 1; i < size; i += 2 ) {
function.addArgument( (OrderingExpression) argumentsCtx.getChild( i ).accept( this ) );
}
return function;
}
@Override
public OrderingExpression visitFunctionArgument(OrderingParser.FunctionArgumentContext ctx) {
return (OrderingExpression) ctx.getChild( 0 ).accept( this );
}
@Override
public OrderingExpression visitLiteral(OrderingParser.LiteralContext ctx) {
return new SelfRenderingOrderingExpression( ctx.getText() );
}
@Override
public String visitCollationSpecification(OrderingParser.CollationSpecificationContext ctx) {
throw new IllegalStateException( "Unexpected call to #visitCollationSpecification" );
}
@Override
public Object visitTerminal(TerminalNode node) {
if ( node.getSymbol().getType() == OrderingParser.EOF ) {
return null;
}
switch ( node.getSymbol().getType() ) {
case OrderingParser.IDENTIFIER:
return node.getText();
case OrderingParser.QUOTED_IDENTIFIER:
return QuotingHelper.unquoteIdentifier( node.getText() );
default:
throw new ParsingException( "Unexpected terminal node [" + node.getText() + "]");
}
}
}

View File

@ -26,7 +26,7 @@ public class PathConsumer {
private final SequencePart rootSequencePart;
private String pathSoFar;
private StringBuilder pathSoFar = new StringBuilder();
private SequencePart currentPart;
public PathConsumer(
@ -41,32 +41,33 @@ public class PathConsumer {
return currentPart;
}
public void consumeIdentifier(String identifier, boolean isBase, boolean isTerminal) {
public void consumeIdentifier(
String unquotedIdentifier,
String identifier, boolean isBase,
boolean isTerminal) {
if ( isBase ) {
// each time we start a new sequence we need to reset our state
reset();
}
if ( pathSoFar == null ) {
pathSoFar = identifier;
}
else {
pathSoFar += ( '.' + identifier );
if ( pathSoFar.length() != 0 ) {
pathSoFar.append( '.' );
}
pathSoFar.append( unquotedIdentifier );
log.tracef(
"BasicDotIdentifierHandler#consumeIdentifier( %s, %s, %s ) - %s",
identifier,
unquotedIdentifier,
isBase,
isTerminal,
pathSoFar
);
currentPart = currentPart.resolvePathPart( identifier, isTerminal, translationContext );
currentPart = currentPart.resolvePathPart( unquotedIdentifier, identifier, isTerminal, translationContext );
}
private void reset() {
pathSoFar = null;
pathSoFar.setLength( 0 );
currentPart = rootSequencePart;
}
}

View File

@ -48,6 +48,7 @@ public class PluralAttributePath extends AbstractDomainPath {
@Override
public DomainPath resolvePathPart(
String name,
String identifier,
boolean isTerminal,
TranslationContext translationContext) {
final ModelPart subPart = pluralAttributeMapping.findSubPart( name, null );
@ -60,8 +61,11 @@ public class PluralAttributePath extends AbstractDomainPath {
return new DomainPathContinuation( navigablePath.append( name ), this, subPart );
}
if ( subPart instanceof ToOneAttributeMapping ) {
return new FkDomainPathContinuation( navigablePath.append( name ), this,
(ToOneAttributeMapping) subPart );
return new FkDomainPathContinuation(
navigablePath.append( name ),
this,
(ToOneAttributeMapping) subPart
);
}
// leaf case:

View File

@ -26,18 +26,30 @@ public class RootSequencePart implements SequencePart {
@Override
public SequencePart resolvePathPart(
String name,
String identifier,
boolean isTerminal,
TranslationContext translationContext) {
// could be a column-reference (isTerminal would have to be true) or a domain-path
final DomainPath subDomainPath = pluralAttributePath.resolvePathPart( name, isTerminal, translationContext );
final DomainPath subDomainPath = pluralAttributePath.resolvePathPart(
name,
identifier,
isTerminal,
translationContext
);
if ( subDomainPath != null ) {
return subDomainPath;
}
if ( isTerminal ) {
// assume a column-reference
return new ColumnReference( name, false, pluralAttributePath.getNavigablePath() );
return new ColumnReference(
translationContext.getFactory()
.getJdbcServices()
.getDialect()
.quote( identifier ),
false
);
}
throw new PathResolutionException(

View File

@ -0,0 +1,86 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.metamodel.mapping.ordering.ast;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
import org.hibernate.query.NullPrecedence;
import org.hibernate.query.SortOrder;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.sql.ast.spi.SqlAstCreationState;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.expression.SelfRenderingExpression;
import org.hibernate.sql.ast.tree.from.TableGroup;
import org.hibernate.sql.ast.tree.select.QuerySpec;
import org.hibernate.sql.ast.tree.select.SortSpecification;
/**
* Represents a self rendering expression i.e. usually a literal used in an order-by fragment
*
* @apiNote This is Hibernate-specific feature. For {@link jakarta.persistence.OrderBy} (JPA)
* all path references are expected to be domain paths (attributes).
*
* @author Christian Beikov
*/
public class SelfRenderingOrderingExpression implements OrderingExpression, SelfRenderingExpression {
private final String expression;
public SelfRenderingOrderingExpression(String expression) {
this.expression = expression;
}
public String getExpression() {
return expression;
}
@Override
public JdbcMappingContainer getExpressionType() {
return null;
}
@Override
public void renderToSql(
SqlAppender sqlAppender,
SqlAstTranslator<?> walker,
SessionFactoryImplementor sessionFactory) {
sqlAppender.append( expression );
}
@Override
public Expression resolve(
QuerySpec ast,
TableGroup tableGroup,
String modelPartName,
SqlAstCreationState creationState) {
return this;
}
@Override
public void apply(
QuerySpec ast,
TableGroup tableGroup,
String collation,
String modelPartName,
SortOrder sortOrder,
NullPrecedence nullPrecedence,
SqlAstCreationState creationState) {
final Expression expression = resolve( ast, tableGroup, modelPartName, creationState );
// It makes no sense to order by an expression multiple times
// SQL Server even reports a query error in this case
if ( ast.hasSortSpecifications() ) {
for ( SortSpecification sortSpecification : ast.getSortSpecifications() ) {
if ( sortSpecification.getSortExpression() == expression ) {
return;
}
}
}
ast.addSortSpecification( new SortSpecification( expression, collation, sortOrder, nullPrecedence ) );
}
}

View File

@ -16,6 +16,7 @@ import org.hibernate.metamodel.mapping.ordering.TranslationContext;
public interface SequencePart {
SequencePart resolvePathPart(
String name,
String identifier,
boolean isTerminal,
TranslationContext translationContext);
}

View File

@ -6,6 +6,7 @@
*/
package org.hibernate.metamodel.model.domain.internal;
import org.hibernate.metamodel.model.domain.AllowableParameterType;
import org.hibernate.metamodel.UnsupportedMappingException;
import org.hibernate.metamodel.model.domain.AnyMappingDomainType;
import org.hibernate.metamodel.model.domain.BasicDomainType;
@ -19,7 +20,7 @@ import static jakarta.persistence.metamodel.Bindable.BindableType.SINGULAR_ATTRI
/**
* @author Steve Ebersole
*/
public class AnyMappingSqmPathSource<J> extends AbstractSqmPathSource<J> {
public class AnyMappingSqmPathSource<J> extends AbstractSqmPathSource<J> implements AllowableParameterType<J> {
private final SqmPathSource<?> keyPathSource;
@SuppressWarnings("WeakerAccess")
@ -58,4 +59,14 @@ public class AnyMappingSqmPathSource<J> extends AbstractSqmPathSource<J> {
return new SqmAnyValuedSimplePath<>( navigablePath, this, lhs, lhs.nodeBuilder() );
}
@Override
public PersistenceType getPersistenceType() {
// todo (6.0): no idea what is best here
return PersistenceType.EMBEDDABLE;
}
@Override
public Class<J> getJavaType() {
return getBindableJavaType();
}
}

View File

@ -87,7 +87,7 @@ public class DiscriminatorSqmPath extends AbstractSqmPath implements SelfInterpr
}
@Override
public SemanticPathPart resolvePathPart(String name, boolean isTerminal, SqmCreationState creationState) {
public SqmPath<?> resolvePathPart(String name, boolean isTerminal, SqmCreationState creationState) {
throw new IllegalStateException( "Discriminator cannot be de-referenced" );
}

View File

@ -10,11 +10,14 @@ import java.io.ObjectStreamException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.function.Consumer;
import java.util.stream.Stream;
import jakarta.persistence.EntityGraph;
@ -79,16 +82,16 @@ public class JpaMetamodelImpl implements JpaMetamodel, Serializable {
private final TypeConfiguration typeConfiguration;
private final JpaCompliance jpaCompliance;
private final Map<String, EntityDomainType<?>> jpaEntityTypeMap = new ConcurrentHashMap<>();
private final Map<Class<?>, MappedSuperclassDomainType<?>> jpaMappedSuperclassTypeMap = new ConcurrentHashMap<>();
private final Map<Class, EmbeddableDomainType<?>> jpaEmbeddableDescriptorMap = new ConcurrentHashMap<>();
private final Map<String, Map<Class<?>, Enum<?>>> allowedEnumLiteralTexts = new ConcurrentHashMap<>();
private final Map<String, EntityDomainType<?>> jpaEntityTypeMap = new TreeMap<>(); // Need ordering for deterministic implementers list in SqmPolymorphicRootDescriptor
private final Map<Class<?>, MappedSuperclassDomainType<?>> jpaMappedSuperclassTypeMap = new HashMap<>();
private final Map<Class, EmbeddableDomainType<?>> jpaEmbeddableDescriptorMap = new HashMap<>();
private final Map<String, Map<Class<?>, Enum<?>>> allowedEnumLiteralTexts = new HashMap<>();
private final transient Map<String, RootGraphImplementor> entityGraphMap = new ConcurrentHashMap<>();
private final Map<Class, SqmPolymorphicRootDescriptor<?>> polymorphicEntityReferenceMap = new ConcurrentHashMap<>();
private final Map<Class, String> entityProxyInterfaceMap = new ConcurrentHashMap<>();
private final Map<Class, String> entityProxyInterfaceMap = new HashMap<>();
private final Map<String, ImportInfo<?>> nameToImportMap = new ConcurrentHashMap<>();
@ -465,6 +468,18 @@ public class JpaMetamodelImpl implements JpaMetamodel, Serializable {
visitEntityTypes(
entityDomainType -> {
if ( javaType.isAssignableFrom( entityDomainType.getJavaType() ) ) {
final ManagedDomainType<?> superType = entityDomainType.getSuperType();
// If the entity super type is also assignable, skip adding this entity type
if ( superType instanceof EntityDomainType<?>
&& javaType.isAssignableFrom( superType.getJavaType() ) ) {
final Queryable entityPersister = (Queryable) typeConfiguration.getSessionFactory()
.getMetamodel()
.getEntityDescriptor( ( (EntityDomainType<?>) superType ).getHibernateEntityName() );
// But only skip adding this type if the parent doesn't require explicit polymorphism
if ( !entityPersister.isExplicitPolymorphism() ) {
return;
}
}
final Queryable entityPersister = (Queryable) typeConfiguration.getSessionFactory()
.getMetamodel()
.getEntityDescriptor( entityDomainType.getHibernateEntityName() );
@ -531,11 +546,11 @@ public class JpaMetamodelImpl implements JpaMetamodel, Serializable {
this.allowedEnumLiteralTexts.computeIfAbsent(
enumConstant.name(),
k -> new ConcurrentHashMap<>()
k -> new HashMap<>()
).put( enumClass, enumConstant );
this.allowedEnumLiteralTexts.computeIfAbsent(
qualifiedEnumLiteral,
k -> new ConcurrentHashMap<>()
k -> new HashMap<>()
).put( enumClass, enumConstant );
}
}

View File

@ -1,26 +0,0 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.persister.collection;
/**
* The names of all the collection properties.
*
* @author josh
*/
public final class CollectionPropertyNames {
private CollectionPropertyNames() {
}
public static final String COLLECTION_SIZE = "size";
public static final String COLLECTION_ELEMENTS = "elements";
public static final String COLLECTION_INDICES = "indices";
public static final String COLLECTION_MAX_INDEX = "maxIndex";
public static final String COLLECTION_MIN_INDEX = "minIndex";
public static final String COLLECTION_MAX_ELEMENT = "maxElement";
public static final String COLLECTION_MIN_ELEMENT = "minElement";
public static final String COLLECTION_INDEX = "index";
}

View File

@ -5558,10 +5558,15 @@ public abstract class AbstractEntityPersister
creationProcess.registerInitializationCallback(
"Entity(" + getEntityName() + ") `sqmMultiTableMutationStrategy` interpretation",
() -> {
sqmMultiTableMutationStrategy = interpretSqmMultiTableStrategy(
this,
creationProcess
);
try {
sqmMultiTableMutationStrategy = interpretSqmMultiTableStrategy(
this,
creationProcess
);
}
catch (Exception ex) {
return false;
}
if ( sqmMultiTableMutationStrategy == null ) {
return false;
}

View File

@ -939,8 +939,8 @@ public class SingleTableEntityPersister extends AbstractEntityPersister {
}
final Object value = getDiscriminatorValue();
final boolean hasNotNullDiscriminator = value == NOT_NULL_DISCRIMINATOR;
final boolean hasNullDiscrininator = value == NULL_DISCRIMINATOR;
if ( hasNotNullDiscriminator || hasNullDiscrininator ) {
final boolean hasNullDiscriminator = value == NULL_DISCRIMINATOR;
if ( hasNotNullDiscriminator || hasNullDiscriminator ) {
final NullnessPredicate nullnessPredicate = new NullnessPredicate( sqlExpression );
if ( hasNotNullDiscriminator ) {
return new NegatedPredicate( nullnessPredicate );

View File

@ -10,7 +10,6 @@ import java.lang.reflect.Field;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.metamodel.model.domain.EntityDomainType;
import org.hibernate.query.criteria.JpaPath;
import org.hibernate.query.hql.HqlLogging;
import org.hibernate.query.hql.spi.DotIdentifierConsumer;
import org.hibernate.query.hql.spi.SemanticPathPart;
@ -48,7 +47,7 @@ import org.hibernate.type.descriptor.java.spi.JavaTypeRegistry;
public class BasicDotIdentifierConsumer implements DotIdentifierConsumer {
private final SqmCreationState creationState;
private String pathSoFar;
private StringBuilder pathSoFar = new StringBuilder();
private SemanticPathPart currentPart;
public BasicDotIdentifierConsumer(SqmCreationState creationState) {
@ -76,12 +75,10 @@ public class BasicDotIdentifierConsumer implements DotIdentifierConsumer {
reset();
}
if ( pathSoFar == null ) {
pathSoFar = identifier;
}
else {
pathSoFar += ( '.' + identifier );
if ( pathSoFar.length() != 0 ) {
pathSoFar.append( '.' );
}
pathSoFar.append( identifier );
HqlLogging.QUERY_LOGGER.tracef(
"BasicDotIdentifierHandler#consumeIdentifier( %s, %s, %s ) - %s",
@ -102,7 +99,7 @@ public class BasicDotIdentifierConsumer implements DotIdentifierConsumer {
}
protected void reset() {
pathSoFar = null;
pathSoFar.setLength( 0 );
currentPart = createBasePart();
}
@ -179,7 +176,8 @@ public class BasicDotIdentifierConsumer implements DotIdentifierConsumer {
return this;
}
final String importableName = creationContext.getJpaMetamodel().qualifyImportableName( pathSoFar );
final String path = pathSoFar.toString();
final String importableName = creationContext.getJpaMetamodel().qualifyImportableName( path );
if ( importableName != null ) {
final EntityDomainType<?> entityDomainType = creationContext.getJpaMetamodel().entity( importableName );
if ( entityDomainType != null ) {
@ -189,7 +187,7 @@ public class BasicDotIdentifierConsumer implements DotIdentifierConsumer {
final SqmFunctionDescriptor functionDescriptor = creationContext.getQueryEngine()
.getSqmFunctionRegistry()
.findFunctionDescriptor( pathSoFar );
.findFunctionDescriptor( path );
if ( functionDescriptor != null ) {
return functionDescriptor.generateSqmExpression(
null,
@ -212,10 +210,10 @@ public class BasicDotIdentifierConsumer implements DotIdentifierConsumer {
// }
// see if it is a named field/enum reference
final int splitPosition = pathSoFar.lastIndexOf( '.' );
final int splitPosition = path.lastIndexOf( '.' );
if ( splitPosition > 0 ) {
final String prefix = pathSoFar.substring( 0, splitPosition );
final String terminal = pathSoFar.substring( splitPosition + 1 );
final String prefix = path.substring( 0, splitPosition );
final String terminal = path.substring( splitPosition + 1 );
//TODO: try interpreting paths of form foo.bar.Foo.Bar as foo.bar.Foo$Bar
try {
@ -254,7 +252,7 @@ public class BasicDotIdentifierConsumer implements DotIdentifierConsumer {
}
}
throw new ParsingException( "Could not interpret dot-ident : " + pathSoFar );
throw new ParsingException( "Could not interpret dot-ident : " + path );
}
protected void validateAsRoot(SqmFrom<?, ?> pathRoot) {

View File

@ -7,19 +7,11 @@
package org.hibernate.query.hql.internal;
import org.hibernate.NotYetImplementedFor6Exception;
import org.hibernate.metamodel.mapping.ModelPartContainer;
import org.hibernate.metamodel.model.domain.EntityDomainType;
import org.hibernate.query.hql.HqlLogging;
import org.hibernate.query.hql.spi.SemanticPathPart;
import org.hibernate.query.hql.spi.SqmCreationState;
import org.hibernate.query.sqm.tree.domain.SqmPath;
import org.hibernate.query.sqm.tree.expression.SqmExpression;
import org.hibernate.query.sqm.tree.from.SqmAttributeJoin;
import org.hibernate.query.sqm.tree.from.SqmCrossJoin;
import org.hibernate.query.sqm.tree.from.SqmEntityJoin;
import org.hibernate.query.sqm.tree.from.SqmFrom;
import org.hibernate.query.sqm.tree.from.SqmJoin;
import org.hibernate.query.sqm.tree.from.SqmRoot;
/**
* Specialized "intermediate" SemanticPathPart for processing domain model paths
@ -49,53 +41,7 @@ public class DomainPathPart implements SemanticPathPart {
currentPath,
name
);
final SqmPath<?> reusablePath = currentPath.getReusablePath( name );
if ( reusablePath != null ) {
currentPath = reusablePath;
}
else {
// Try to resolve an existing attribute join without ON clause
SqmPath<?> resolvedPath = null;
if ( currentPath instanceof SqmFrom<?, ?> ) {
ModelPartContainer modelPartContainer = null;
for ( SqmJoin<?, ?> sqmJoin : ( (SqmFrom<?, ?>) currentPath ).getSqmJoins() ) {
if ( sqmJoin instanceof SqmAttributeJoin<?, ?>
&& name.equals( sqmJoin.getReferencedPathSource().getPathName() ) ) {
final SqmAttributeJoin<?, ?> attributeJoin = (SqmAttributeJoin<?, ?>) sqmJoin;
if ( attributeJoin.getOn() == null ) {
// todo (6.0): to match the expectation of the JPA spec I think we also have to check
// that the join type is INNER or the default join type for the attribute,
// but as far as I understand, in 5.x we expect to ignore this behavior
// if ( attributeJoin.getSqmJoinType() != SqmJoinType.INNER ) {
// if ( attributeJoin.getAttribute().isCollection() ) {
// continue;
// }
// if ( modelPartContainer == null ) {
// modelPartContainer = findModelPartContainer( attributeJoin, creationState );
// }
// final TableGroupJoinProducer joinProducer = (TableGroupJoinProducer) modelPartContainer.findSubPart(
// name,
// null
// );
// if ( attributeJoin.getSqmJoinType().getCorrespondingSqlJoinType() != joinProducer.getDefaultSqlAstJoinType( null ) ) {
// continue;
// }
// }
resolvedPath = sqmJoin;
if ( attributeJoin.isFetched() ) {
break;
}
}
}
}
}
if ( resolvedPath == null ) {
currentPath = currentPath.get( name );
}
else {
currentPath = resolvedPath;
}
}
currentPath = currentPath.resolvePathPart( name, isTerminal, creationState );
if ( isTerminal ) {
return currentPath;
}
@ -104,45 +50,6 @@ public class DomainPathPart implements SemanticPathPart {
}
}
private ModelPartContainer findModelPartContainer(SqmAttributeJoin<?, ?> attributeJoin, SqmCreationState creationState) {
final SqmFrom<?, ?> lhs = attributeJoin.getLhs();
if ( lhs instanceof SqmAttributeJoin<?, ?> ) {
final SqmAttributeJoin<?, ?> lhsAttributeJoin = (SqmAttributeJoin<?, ?>) lhs;
if ( lhsAttributeJoin.getReferencedPathSource() instanceof EntityDomainType<?> ) {
final String entityName = ( (EntityDomainType<?>) lhsAttributeJoin.getReferencedPathSource() ).getHibernateEntityName();
return (ModelPartContainer) creationState.getCreationContext().getQueryEngine()
.getTypeConfiguration()
.getSessionFactory()
.getMetamodel()
.entityPersister( entityName )
.findSubPart( attributeJoin.getAttribute().getName(), null );
}
else {
return (ModelPartContainer) findModelPartContainer( lhsAttributeJoin, creationState )
.findSubPart( attributeJoin.getAttribute().getName(), null );
}
}
else {
final String entityName;
if ( lhs instanceof SqmRoot<?> ) {
entityName = ( (SqmRoot<?>) lhs ).getEntityName();
}
else if ( lhs instanceof SqmEntityJoin<?> ) {
entityName = ( (SqmEntityJoin<?>) lhs ).getEntityName();
}
else {
assert lhs instanceof SqmCrossJoin<?>;
entityName = ( (SqmCrossJoin<?>) lhs ).getEntityName();
}
return (ModelPartContainer) creationState.getCreationContext().getQueryEngine()
.getTypeConfiguration()
.getSessionFactory()
.getMetamodel()
.entityPersister( entityName )
.findSubPart( attributeJoin.getAttribute().getName(), null );
}
}
@Override
public SqmPath<?> resolveIndexedAccess(
SqmExpression<?> selector,

View File

@ -9,7 +9,6 @@ package org.hibernate.query.hql.internal;
import java.util.Locale;
import org.hibernate.metamodel.model.domain.EntityDomainType;
import org.hibernate.query.NavigablePath;
import org.hibernate.query.SemanticException;
import org.hibernate.query.hql.HqlInterpretationException;
import org.hibernate.query.hql.spi.DotIdentifierConsumer;
@ -19,11 +18,13 @@ import org.hibernate.query.sqm.SqmJoinable;
import org.hibernate.query.sqm.SqmPathSource;
import org.hibernate.query.hql.spi.SqmCreationProcessingState;
import org.hibernate.query.hql.spi.SqmCreationState;
import org.hibernate.query.sqm.spi.SqmCreationHelper;
import org.hibernate.query.sqm.tree.SqmJoinType;
import org.hibernate.query.sqm.tree.domain.SqmPolymorphicRootDescriptor;
import org.hibernate.query.sqm.tree.from.SqmAttributeJoin;
import org.hibernate.query.sqm.tree.from.SqmEntityJoin;
import org.hibernate.query.sqm.tree.from.SqmFrom;
import org.hibernate.query.sqm.tree.from.SqmJoin;
import org.hibernate.query.sqm.tree.from.SqmRoot;
import org.jboss.logging.Logger;
@ -37,7 +38,7 @@ public class QualifiedJoinPathConsumer implements DotIdentifierConsumer {
private static final Logger log = Logger.getLogger( QualifiedJoinPathConsumer.class );
private final SqmCreationState creationState;
private final SqmRoot sqmRoot;
private final SqmRoot<?> sqmRoot;
private final SqmJoinType joinType;
private final boolean fetch;
@ -110,7 +111,7 @@ public class QualifiedJoinPathConsumer implements DotIdentifierConsumer {
final SqmCreationProcessingState processingState = creationState.getCurrentProcessingState();
final SqmPathRegistry pathRegistry = processingState.getPathRegistry();
final SqmFrom pathRootByAlias = pathRegistry.findFromByAlias( identifier );
final SqmFrom<?, Object> pathRootByAlias = pathRegistry.findFromByAlias( identifier );
if ( pathRootByAlias != null ) {
// identifier is an alias (identification variable)
@ -127,7 +128,7 @@ public class QualifiedJoinPathConsumer implements DotIdentifierConsumer {
);
}
final SqmFrom pathRootByExposedNavigable = pathRegistry.findFromExposing( identifier );
final SqmFrom<?, Object> pathRootByExposedNavigable = pathRegistry.findFromExposing( identifier );
if ( pathRootByExposedNavigable != null ) {
return new AttributeJoinDelegate(
createJoin( pathRootByExposedNavigable, identifier, isTerminal ),
@ -152,7 +153,7 @@ public class QualifiedJoinPathConsumer implements DotIdentifierConsumer {
);
}
private SqmFrom createJoin(SqmFrom lhs, String identifier, boolean isTerminal) {
private SqmFrom<?, ?> createJoin(SqmFrom<?, Object> lhs, String identifier, boolean isTerminal) {
return createJoin(
lhs,
identifier,
@ -164,15 +165,16 @@ public class QualifiedJoinPathConsumer implements DotIdentifierConsumer {
);
}
private static SqmFrom createJoin(
SqmFrom lhs,
private static SqmFrom<?, Object> createJoin(
SqmFrom<?, Object> lhs,
String name,
SqmJoinType joinType,
String alias,
boolean fetch,
boolean isTerminal,
SqmCreationState creationState) {
final SqmPathSource subPathSource = lhs.getReferencedPathSource().findSubPathSource( name );
//noinspection unchecked
final SqmPathSource<Object> subPathSource = (SqmPathSource<Object>) lhs.getReferencedPathSource().findSubPathSource( name );
if ( subPathSource == null ) {
throw new HqlInterpretationException(
String.format(
@ -183,14 +185,22 @@ public class QualifiedJoinPathConsumer implements DotIdentifierConsumer {
)
);
}
final SqmAttributeJoin join = ( (SqmJoinable) subPathSource ).createSqmJoin(
if ( !isTerminal ) {
for ( SqmJoin<?, ?> sqmJoin : lhs.getSqmJoins() ) {
if ( sqmJoin.getAlias() == null && sqmJoin.getReferencedPathSource() == subPathSource ) {
//noinspection unchecked
return (SqmFrom<?, Object>) sqmJoin;
}
}
}
@SuppressWarnings("unchecked")
final SqmAttributeJoin<Object, Object> join = ( (SqmJoinable) subPathSource ).createSqmJoin(
lhs,
joinType,
isTerminal ? alias : null,
isTerminal ? alias : SqmCreationHelper.IMPLICIT_ALIAS,
fetch,
creationState
);
//noinspection unchecked
lhs.addSqmJoin( join );
creationState.getCurrentProcessingState().getPathRegistry().register( join );
return join;
@ -209,10 +219,10 @@ public class QualifiedJoinPathConsumer implements DotIdentifierConsumer {
private final boolean fetch;
private final String alias;
private SqmFrom currentPath;
private SqmFrom<?, Object> currentPath;
public AttributeJoinDelegate(
SqmFrom base,
SqmFrom<?, ?> base,
SqmJoinType joinType,
boolean fetch,
String alias,
@ -222,7 +232,8 @@ public class QualifiedJoinPathConsumer implements DotIdentifierConsumer {
this.alias = alias;
this.creationState = creationState;
this.currentPath = base;
//noinspection unchecked
this.currentPath = (SqmFrom<?, Object>) base;
}
@Override
@ -240,7 +251,7 @@ public class QualifiedJoinPathConsumer implements DotIdentifierConsumer {
@Override
public void consumeTreat(String entityName, boolean isTerminal) {
final EntityDomainType<?> entityDomainType = creationState.getCreationContext().getJpaMetamodel()
final EntityDomainType<Object> entityDomainType = creationState.getCreationContext().getJpaMetamodel()
.entity( entityName );
currentPath = currentPath.treatAs( entityDomainType, isTerminal ? alias : null );
creationState.getCurrentProcessingState().getPathRegistry().register( currentPath );
@ -254,7 +265,7 @@ public class QualifiedJoinPathConsumer implements DotIdentifierConsumer {
private static class ExpectingEntityJoinDelegate implements ConsumerDelegate {
private final SqmCreationState creationState;
private final SqmRoot sqmRoot;
private final SqmRoot<?> sqmRoot;
private final SqmJoinType joinType;
private final boolean fetch;
@ -267,7 +278,7 @@ public class QualifiedJoinPathConsumer implements DotIdentifierConsumer {
public ExpectingEntityJoinDelegate(
String identifier,
boolean isTerminal,
SqmRoot sqmRoot,
SqmRoot<?> sqmRoot,
SqmJoinType joinType,
String alias,
boolean fetch,

View File

@ -20,9 +20,14 @@ import org.hibernate.query.hql.spi.SqmCreationOptions;
import org.hibernate.query.hql.spi.SqmCreationProcessingState;
import org.hibernate.query.hql.spi.SqmCreationState;
import org.hibernate.query.hql.spi.SqmPathRegistry;
import org.hibernate.query.sqm.internal.SqmDmlCreationProcessingState;
import org.hibernate.query.sqm.internal.SqmQuerySpecCreationProcessingStateStandardImpl;
import org.hibernate.query.sqm.spi.BaseSemanticQueryWalker;
import org.hibernate.query.sqm.spi.SqmCreationContext;
import org.hibernate.query.sqm.tree.SqmDeleteOrUpdateStatement;
import org.hibernate.query.sqm.tree.SqmQuery;
import org.hibernate.query.sqm.tree.cte.SqmCteContainer;
import org.hibernate.query.sqm.tree.cte.SqmCteStatement;
import org.hibernate.query.sqm.tree.delete.SqmDeleteStatement;
import org.hibernate.query.sqm.tree.domain.SqmBasicValuedSimplePath;
import org.hibernate.query.sqm.tree.domain.SqmEmbeddedValuedSimplePath;
@ -81,6 +86,7 @@ import org.hibernate.type.descriptor.java.JavaType;
* @author Steve Ebersole
*/
public class QuerySplitter {
public static <R> SqmSelectStatement<R>[] split(
SqmSelectStatement<R> statement,
SessionFactoryImplementor sessionFactory) {
@ -128,10 +134,46 @@ public class QuerySplitter {
}
}
public static <R> SqmDeleteStatement<R>[] split(
SqmDeleteStatement<R> statement,
SessionFactoryImplementor sessionFactory) {
// We only allow unmapped polymorphism in a very restricted way. Specifically,
// the unmapped polymorphic reference can only be a root and can be the only
// root. Use that restriction to locate the unmapped polymorphic reference
final SqmRoot<?> unmappedPolymorphicReference = findUnmappedPolymorphicReference( statement );
if ( unmappedPolymorphicReference == null ) {
return new SqmDeleteStatement[] { statement };
}
final SqmPolymorphicRootDescriptor<?> unmappedPolymorphicDescriptor = (SqmPolymorphicRootDescriptor<?>) unmappedPolymorphicReference.getReferencedPathSource();
final SqmDeleteStatement<R>[] expanded = new SqmDeleteStatement[ unmappedPolymorphicDescriptor.getImplementors().size() ];
int i = -1;
for ( EntityDomainType<?> mappedDescriptor : unmappedPolymorphicDescriptor.getImplementors() ) {
i++;
final UnmappedPolymorphismReplacer<R> replacer = new UnmappedPolymorphismReplacer<>(
unmappedPolymorphicReference,
mappedDescriptor,
sessionFactory
);
expanded[i] = replacer.visitDeleteStatement( statement );
}
return expanded;
}
private static SqmRoot<?> findUnmappedPolymorphicReference(SqmDeleteOrUpdateStatement<?> queryPart) {
if ( queryPart.getTarget().getReferencedPathSource() instanceof SqmPolymorphicRootDescriptor<?> ) {
return queryPart.getTarget();
}
return null;
}
@SuppressWarnings("unchecked")
private static class UnmappedPolymorphismReplacer<R> extends BaseSemanticQueryWalker implements SqmCreationState {
private final SqmRoot unmappedPolymorphicFromElement;
private final EntityDomainType mappedDescriptor;
private final EntityDomainType<R> mappedDescriptor;
private final SqmCreationContext creationContext;
private final Stack<SqmCreationProcessingState> processingStateStack = new StandardStack<>();
@ -168,9 +210,56 @@ public class QuerySplitter {
throw new UnsupportedOperationException( "Not valid" );
}
@Override
public Object visitCteContainer(SqmCteContainer consumer) {
final SqmCteContainer processingQuery = (SqmCteContainer) getProcessingStateStack().getCurrent()
.getProcessingQuery();
processingQuery.setWithRecursive( consumer.isWithRecursive() );
for ( SqmCteStatement<?> cteStatement : consumer.getCteStatements() ) {
processingQuery.addCteStatement( visitCteStatement( cteStatement ) );
}
return processingQuery;
}
@Override
public SqmCteStatement<?> visitCteStatement(SqmCteStatement<?> sqmCteStatement) {
// No need to copy anything here
return sqmCteStatement;
}
@Override
public SqmDeleteStatement<R> visitDeleteStatement(SqmDeleteStatement<?> statement) {
throw new UnsupportedOperationException( "Not valid" );
final SqmRoot<?> sqmRoot = statement.getTarget();
final SqmRoot<R> copy = new SqmRoot<>(
mappedDescriptor,
sqmRoot.getExplicitAlias(),
sqmRoot.isAllowJoins(),
sqmRoot.nodeBuilder()
);
sqmFromCopyMap.put( sqmRoot, copy );
sqmPathCopyMap.put( sqmRoot.getNavigablePath(), copy );
final SqmDeleteStatement<R> statementCopy = new SqmDeleteStatement<>(
copy,
statement.getQuerySource(),
statement.nodeBuilder()
);
processingStateStack.push(
new SqmDmlCreationProcessingState(
statementCopy,
this
)
);
getProcessingStateStack().getCurrent().getPathRegistry().register( copy );
try {
visitCteContainer( statement );
statementCopy.setWhereClause( visitWhereClause( statement.getWhereClause() ) );
}
finally {
processingStateStack.pop();
}
return statementCopy;
}
@Override
@ -185,6 +274,7 @@ public class QuerySplitter {
)
);
try {
visitCteContainer( statement );
copy.setQueryPart( visitQueryPart( statement.getQueryPart() ) );
}
finally {
@ -292,15 +382,13 @@ public class QuerySplitter {
sqmRoot.isAllowJoins(),
sqmRoot.nodeBuilder()
);
return (SqmRoot<?>) getProcessingStateStack().getCurrent().getPathRegistry().resolvePath(
copy.getNavigablePath(),
navigablePath -> {
sqmFromCopyMap.put( sqmRoot, copy );
sqmPathCopyMap.put( sqmRoot.getNavigablePath(), copy );
currentFromClauseCopy.addRoot( copy );
return copy;
}
);
getProcessingStateStack().getCurrent().getPathRegistry().register( copy );
sqmFromCopyMap.put( sqmRoot, copy );
sqmPathCopyMap.put( sqmRoot.getNavigablePath(), copy );
if ( currentFromClauseCopy != null ) {
currentFromClauseCopy.addRoot( copy );
}
return copy;
}
@Override
@ -309,21 +397,17 @@ public class QuerySplitter {
if ( sqmFrom != null ) {
return (SqmCrossJoin<?>) sqmFrom;
}
return (SqmCrossJoin<?>) getProcessingStateStack().getCurrent().getPathRegistry().resolvePath(
join.getNavigablePath(),
navigablePath -> {
final SqmRoot<?> sqmRoot = (SqmRoot<?>) sqmFromCopyMap.get( join.findRoot() );
final SqmCrossJoin copy = new SqmCrossJoin<>(
join.getReferencedPathSource(),
join.getExplicitAlias(),
sqmRoot
);
sqmFromCopyMap.put( join, copy );
sqmPathCopyMap.put( join.getNavigablePath(), copy );
sqmRoot.addSqmJoin( copy );
return copy;
}
final SqmRoot<?> sqmRoot = (SqmRoot<?>) sqmFromCopyMap.get( join.findRoot() );
final SqmCrossJoin copy = new SqmCrossJoin<>(
join.getReferencedPathSource(),
join.getExplicitAlias(),
sqmRoot
);
getProcessingStateStack().getCurrent().getPathRegistry().register( copy );
sqmFromCopyMap.put( join, copy );
sqmPathCopyMap.put( join.getNavigablePath(), copy );
sqmRoot.addSqmJoin( copy );
return copy;
}
@Override
@ -332,22 +416,18 @@ public class QuerySplitter {
if ( sqmFrom != null ) {
return (SqmEntityJoin<?>) sqmFrom;
}
return (SqmEntityJoin<?>) getProcessingStateStack().getCurrent().getPathRegistry().resolvePath(
join.getNavigablePath(),
navigablePath -> {
final SqmRoot<?> sqmRoot = (SqmRoot<?>) sqmFromCopyMap.get( join.findRoot() );
final SqmEntityJoin copy = new SqmEntityJoin<>(
join.getReferencedPathSource(),
join.getExplicitAlias(),
join.getSqmJoinType(),
sqmRoot
);
sqmFromCopyMap.put( join, copy );
sqmPathCopyMap.put( join.getNavigablePath(), copy );
sqmRoot.addSqmJoin( copy );
return copy;
}
final SqmRoot<?> sqmRoot = (SqmRoot<?>) sqmFromCopyMap.get( join.findRoot() );
final SqmEntityJoin copy = new SqmEntityJoin<>(
join.getReferencedPathSource(),
join.getExplicitAlias(),
join.getSqmJoinType(),
sqmRoot
);
getProcessingStateStack().getCurrent().getPathRegistry().register( copy );
sqmFromCopyMap.put( join, copy );
sqmPathCopyMap.put( join.getNavigablePath(), copy );
sqmRoot.addSqmJoin( copy );
return copy;
}
@Override
@ -356,92 +436,69 @@ public class QuerySplitter {
if ( sqmFrom != null ) {
return (SqmAttributeJoin<?, ?>) sqmFrom;
}
return (SqmAttributeJoin<?, ?>) getProcessingStateStack().getCurrent().getPathRegistry().resolvePath(
join.getNavigablePath(),
navigablePath -> {
SqmAttributeJoin copy = join.makeCopy( getProcessingStateStack().getCurrent() );
sqmFromCopyMap.put( join, copy );
sqmPathCopyMap.put( join.getNavigablePath(), copy );
( (SqmFrom<?, ?>) copy.getParent() ).addSqmJoin( copy );
return copy;
}
);
SqmAttributeJoin copy = join.makeCopy( getProcessingStateStack().getCurrent() );
getProcessingStateStack().getCurrent().getPathRegistry().register( copy );
sqmFromCopyMap.put( join, copy );
sqmPathCopyMap.put( join.getNavigablePath(), copy );
( (SqmFrom<?, ?>) copy.getParent() ).addSqmJoin( copy );
return copy;
}
@Override
public SqmBasicValuedSimplePath<?> visitBasicValuedPath(SqmBasicValuedSimplePath<?> path) {
final SqmPathRegistry pathRegistry = getProcessingStateStack().getCurrent().getPathRegistry();
return (SqmBasicValuedSimplePath<?>) pathRegistry.resolvePath(
final SqmBasicValuedSimplePath<?> copy = new SqmBasicValuedSimplePath<>(
path.getNavigablePath(),
navigablePath -> {
final SqmBasicValuedSimplePath<?> copy = new SqmBasicValuedSimplePath<>(
navigablePath,
path.getReferencedPathSource(),
pathRegistry.findFromByPath( path.getLhs().getNavigablePath() ),
path.nodeBuilder()
);
sqmPathCopyMap.put( path.getNavigablePath(), copy );
return copy;
}
path.getReferencedPathSource(),
pathRegistry.findFromByPath( path.getLhs().getNavigablePath() ),
path.nodeBuilder()
);
pathRegistry.register( copy );
sqmPathCopyMap.put( path.getNavigablePath(), copy );
return copy;
}
@Override
public SqmEmbeddedValuedSimplePath<?> visitEmbeddableValuedPath(SqmEmbeddedValuedSimplePath<?> path) {
final SqmPathRegistry pathRegistry = getProcessingStateStack().getCurrent().getPathRegistry();
return (SqmEmbeddedValuedSimplePath<?>) pathRegistry.resolvePath(
final SqmEmbeddedValuedSimplePath<?> copy = new SqmEmbeddedValuedSimplePath<>(
path.getNavigablePath(),
navigablePath -> {
final SqmEmbeddedValuedSimplePath<?> copy = new SqmEmbeddedValuedSimplePath<>(
navigablePath,
path.getReferencedPathSource(),
pathRegistry.findFromByPath( path.getLhs().getNavigablePath() ),
path.nodeBuilder()
);
sqmPathCopyMap.put( path.getNavigablePath(), copy );
return copy;
}
path.getReferencedPathSource(),
pathRegistry.findFromByPath( path.getLhs().getNavigablePath() ),
path.nodeBuilder()
);
pathRegistry.register( copy );
sqmPathCopyMap.put( path.getNavigablePath(), copy );
return copy;
}
@Override
public SqmEntityValuedSimplePath<?> visitEntityValuedPath(SqmEntityValuedSimplePath<?> path) {
final SqmPathRegistry pathRegistry = getProcessingStateStack().getCurrent().getPathRegistry();
return (SqmEntityValuedSimplePath<?>) pathRegistry.resolvePath(
final SqmEntityValuedSimplePath<?> copy = new SqmEntityValuedSimplePath<>(
path.getNavigablePath(),
navigablePath -> {
final SqmEntityValuedSimplePath<?> copy = new SqmEntityValuedSimplePath<>(
navigablePath,
path.getReferencedPathSource(),
pathRegistry.findFromByPath( path.getLhs().getNavigablePath() ),
path.nodeBuilder()
);
sqmPathCopyMap.put( path.getNavigablePath(), copy );
return copy;
}
path.getReferencedPathSource(),
pathRegistry.findFromByPath( path.getLhs().getNavigablePath() ),
path.nodeBuilder()
);
pathRegistry.register( copy );
sqmPathCopyMap.put( path.getNavigablePath(), copy );
return copy;
}
@Override
public SqmPluralValuedSimplePath<?> visitPluralValuedPath(SqmPluralValuedSimplePath<?> path) {
final SqmPathRegistry pathRegistry = getProcessingStateStack().getCurrent().getPathRegistry();
return (SqmPluralValuedSimplePath<?>) pathRegistry.resolvePath(
final SqmPluralValuedSimplePath<?> copy = new SqmPluralValuedSimplePath<>(
path.getNavigablePath(),
navigablePath -> {
final SqmPluralValuedSimplePath<?> copy = new SqmPluralValuedSimplePath<>(
navigablePath,
path.getReferencedPathSource(),
pathRegistry.findFromByPath( path.getLhs().getNavigablePath() ),
path.nodeBuilder()
);
sqmPathCopyMap.put( path.getNavigablePath(), copy );
return copy;
}
path.getReferencedPathSource(),
pathRegistry.findFromByPath( path.getLhs().getNavigablePath() ),
path.nodeBuilder()
);
pathRegistry.register( copy );
sqmPathCopyMap.put( path.getNavigablePath(), copy );
return copy;
}
@Override

View File

@ -41,6 +41,7 @@ import org.hibernate.grammars.hql.HqlLexer;
import org.hibernate.grammars.hql.HqlParser;
import org.hibernate.grammars.hql.HqlParserBaseVisitor;
import org.hibernate.internal.util.CharSequenceHelper;
import org.hibernate.internal.util.QuotingHelper;
import org.hibernate.internal.util.collections.Stack;
import org.hibernate.internal.util.collections.StandardStack;
import org.hibernate.metamodel.CollectionClassification;
@ -106,7 +107,6 @@ import org.hibernate.query.sqm.tree.domain.SqmMinIndexPath;
import org.hibernate.query.sqm.tree.domain.SqmPath;
import org.hibernate.query.sqm.tree.domain.SqmPluralValuedSimplePath;
import org.hibernate.query.sqm.tree.domain.SqmPolymorphicRootDescriptor;
import org.hibernate.query.sqm.tree.domain.SqmTreatedPath;
import org.hibernate.query.sqm.tree.expression.SqmAliasedNodeRef;
import org.hibernate.query.sqm.tree.expression.SqmAny;
import org.hibernate.query.sqm.tree.expression.SqmBinaryArithmetic;
@ -134,7 +134,6 @@ import org.hibernate.query.sqm.tree.expression.SqmToDuration;
import org.hibernate.query.sqm.tree.expression.SqmTrimSpecification;
import org.hibernate.query.sqm.tree.expression.SqmTuple;
import org.hibernate.query.sqm.tree.expression.SqmUnaryOperation;
import org.hibernate.query.sqm.tree.from.DowncastLocation;
import org.hibernate.query.sqm.tree.from.SqmAttributeJoin;
import org.hibernate.query.sqm.tree.from.SqmCrossJoin;
import org.hibernate.query.sqm.tree.from.SqmEntityJoin;
@ -339,7 +338,7 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
final ParseTree parseTree = ctx.getChild( 0 );
if ( parseTree instanceof HqlParser.SelectStatementContext ) {
final SqmSelectStatement<R> selectStatement = visitSelectStatement( (HqlParser.SelectStatementContext) parseTree );
selectStatement.getQueryPart().validateQueryGroupFetchStructure();
selectStatement.getQueryPart().validateFetchStructureAndOwners();
return selectStatement;
}
else if ( parseTree instanceof HqlParser.InsertStatementContext ) {
@ -425,6 +424,11 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
dmlTargetIndex + 1
);
final SqmRoot<R> root = visitDmlTarget( dmlTargetContext );
if ( root.getReferencedPathSource() instanceof SqmPolymorphicRootDescriptor<?> ) {
throw new SemanticException(
"Can't create an INSERT for a non entity name: " + root.getReferencedPathSource().getHibernateEntityName()
);
}
final HqlParser.QueryExpressionContext queryExpressionContext = ctx.queryExpression();
if ( queryExpressionContext != null ) {
@ -477,10 +481,12 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
processingState.getPathRegistry().register( root );
try {
for ( HqlParser.ValuesContext values : ctx.valuesList().values() ) {
SqmValues sqmValues = new SqmValues();
for ( HqlParser.ExpressionContext expressionContext : values.expression() ) {
sqmValues.getExpressions().add( (SqmExpression<?>) expressionContext.accept( this ) );
final HqlParser.ValuesListContext valuesListContext = ctx.valuesList();
for ( int i = 1; i < valuesListContext.getChildCount(); i += 2 ) {
final ParseTree values = valuesListContext.getChild( i );
final SqmValues sqmValues = new SqmValues();
for ( int j = 1; j < values.getChildCount(); j += 2 ) {
sqmValues.getExpressions().add( (SqmExpression<?>) values.getChild( j ).accept( this ) );
}
insertStatement.getValuesList().add( sqmValues );
}
@ -506,6 +512,11 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
final int dmlTargetIndex = versioned ? 2 : 1;
final HqlParser.DmlTargetContext dmlTargetContext = (HqlParser.DmlTargetContext) ctx.getChild( dmlTargetIndex );
final SqmRoot<R> root = visitDmlTarget( dmlTargetContext );
if ( root.getReferencedPathSource() instanceof SqmPolymorphicRootDescriptor<?> ) {
throw new SemanticException(
"Can't create an UPDATE for a non entity name: " + root.getReferencedPathSource().getHibernateEntityName()
);
}
final SqmUpdateStatement<R> updateStatement = new SqmUpdateStatement<>( root, creationContext.getNodeBuilder() );
parameterCollector = updateStatement;
@ -884,7 +895,7 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
}
else {
resultIdentifier = applyJpaCompliance(
visitResultIdentifier( (HqlParser.ResultIdentifierContext) ctx.getChild( 1 ) )
visitIdentificationVariableDef( (HqlParser.IdentificationVariableDefContext) ctx.getChild( 1 ) )
);
}
final SqmSelectableNode<?> selectableNode = visitSelectableNode( ctx );
@ -912,16 +923,7 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
private SqmSelectableNode<?> visitSelectableNode(HqlParser.SelectionContext ctx) {
final ParseTree subCtx = ctx.getChild( 0 ).getChild( 0 );
if ( subCtx instanceof HqlParser.DynamicInstantiationContext ) {
return visitDynamicInstantiation( (HqlParser.DynamicInstantiationContext) subCtx );
}
else if ( subCtx instanceof HqlParser.JpaSelectObjectSyntaxContext ) {
return visitJpaSelectObjectSyntax( (HqlParser.JpaSelectObjectSyntaxContext) subCtx );
}
else if ( subCtx instanceof HqlParser.MapEntrySelectionContext ) {
return visitMapEntrySelection( (HqlParser.MapEntrySelectionContext) subCtx );
}
else if ( subCtx instanceof HqlParser.ExpressionContext ) {
if ( subCtx instanceof HqlParser.ExpressionOrPredicateContext ) {
final SqmExpression<?> sqmExpression = (SqmExpression<?>) subCtx.accept( this );
if ( sqmExpression instanceof SqmPath ) {
final SqmPath<?> sqmPath = (SqmPath<?>) sqmExpression;
@ -945,40 +947,7 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
return sqmExpression;
}
throw new ParsingException( "Unexpected selection rule type : " + ctx.getText() );
}
@Override
public String visitResultIdentifier(HqlParser.ResultIdentifierContext resultIdentifierContext) {
if ( resultIdentifierContext != null ) {
if ( resultIdentifierContext.getChildCount() == 1 ) {
return resultIdentifierContext.getText();
}
else {
final HqlParser.IdentifierContext identifierContext = (HqlParser.IdentifierContext) resultIdentifierContext.getChild( 1 );
final Token aliasToken = identifierContext.getStart();
final String explicitAlias = aliasToken.getText();
if ( aliasToken.getType() != IDENTIFIER ) {
// we have a reserved word used as an identification variable.
if ( creationOptions.useStrictJpaCompliance() ) {
throw new StrictJpaComplianceViolation(
String.format(
Locale.ROOT,
"Strict JPQL compliance was violated : %s [%s]",
StrictJpaComplianceViolation.Type.RESERVED_WORD_USED_AS_ALIAS.description(),
explicitAlias
),
StrictJpaComplianceViolation.Type.RESERVED_WORD_USED_AS_ALIAS
);
}
}
return explicitAlias;
}
}
return null;
return (SqmSelectableNode<?>) subCtx.accept( this );
}
@Override
@ -1041,15 +1010,13 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
public SqmDynamicInstantiationArgument<?> visitDynamicInstantiationArg(HqlParser.DynamicInstantiationArgContext ctx) {
final String alias;
if ( ctx.getChildCount() > 1 ) {
alias = ctx.getChild( ctx.getChildCount() - 1 ).getText();
alias = visitIdentificationVariableDef( (HqlParser.IdentificationVariableDefContext) ctx.getChild( ctx.getChildCount() - 1 ) );
}
else {
alias = null;
}
final SqmSelectableNode<?> argExpression = visitDynamicInstantiationArgExpression(
(HqlParser.DynamicInstantiationArgExpressionContext) ctx.getChild( 0 )
);
final SqmSelectableNode<?> argExpression = (SqmSelectableNode<?>) ctx.getChild( 0 ).accept( this );
final SqmDynamicInstantiationArgument<?> argument = new SqmDynamicInstantiationArgument<>(
argExpression,
@ -1068,19 +1035,6 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
return argument;
}
@Override
public SqmSelectableNode<?> visitDynamicInstantiationArgExpression(HqlParser.DynamicInstantiationArgExpressionContext ctx) {
final ParseTree parseTree = ctx.getChild( 0 );
if ( parseTree instanceof HqlParser.DynamicInstantiationContext ) {
return visitDynamicInstantiation( (HqlParser.DynamicInstantiationContext) parseTree );
}
else if ( parseTree instanceof HqlParser.ExpressionContext ) {
return (SqmExpression<?>) parseTree.accept( this );
}
throw new ParsingException( "Unexpected dynamic-instantiation-argument rule type : " + ctx.getText() );
}
@Override
public SqmPath<?> visitJpaSelectObjectSyntax(HqlParser.JpaSelectObjectSyntaxContext ctx) {
final String alias = ctx.getChild( 2 ).getText();
@ -1126,7 +1080,7 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
return new SqmAliasedNodeRef( position, integerDomainType, creationContext.getNodeBuilder() );
}
else if ( child instanceof HqlParser.IdentifierContext ) {
final String identifierText = child.getText();
final String identifierText = visitIdentifier( (HqlParser.IdentifierContext) child );
final Integer correspondingPosition = getCurrentProcessingState()
.getPathRegistry()
@ -1312,13 +1266,36 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
}
@Override
public SqmExpression<?> visitPathExpression(HqlParser.PathExpressionContext ctx) {
final HqlParser.PathContext path = (HqlParser.PathContext) ctx.getChild( 0 );
final Object accept = path.accept( this );
if ( accept instanceof DomainPathPart ) {
return ( (DomainPathPart) accept ).getSqmExpression();
public Object visitSyntacticPathExpression(HqlParser.SyntacticPathExpressionContext ctx) {
SemanticPathPart part = visitSyntacticDomainPath( (HqlParser.SyntacticDomainPathContext) ctx.getChild( 0 ) );
if ( ctx.getChildCount() == 2 ) {
dotIdentifierConsumerStack.push(
new BasicDotIdentifierConsumer( part, this ) {
@Override
protected void reset() {
}
}
);
try {
part = (SemanticPathPart) ctx.getChild( 1 ).accept( this );
}
finally {
dotIdentifierConsumerStack.pop();
}
}
return (SqmExpression<?>) accept;
if ( part instanceof DomainPathPart ) {
return ( (DomainPathPart) part ).getSqmExpression();
}
return (SqmExpression<?>) part;
}
@Override
public Object visitGeneralPathExpression(HqlParser.GeneralPathExpressionContext ctx) {
final SemanticPathPart part = visitGeneralPathFragment( (HqlParser.GeneralPathFragmentContext) ctx.getChild( 0 ) );
if ( part instanceof DomainPathPart ) {
return ( (DomainPathPart) part ).getSqmExpression();
}
return (SqmExpression<?>) part;
}
@Override
@ -1365,31 +1342,46 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
return (SqmExpression<?>) firstChild.accept( this );
}
public String getEntityName(HqlParser.EntityNameContext parserEntityName) {
final StringBuilder sb = new StringBuilder();
final int end = parserEntityName.getChildCount();
sb.append( visitIdentifier( (HqlParser.IdentifierContext) parserEntityName.getChild( 0 ) ) );
for ( int i = 2; i < end; i += 2 ) {
sb.append( '.' );
sb.append( visitIdentifier( (HqlParser.IdentifierContext) parserEntityName.getChild( i ) ) );
}
return sb.toString();
}
@Override
public String visitIdentifier(HqlParser.IdentifierContext ctx) {
final TerminalNode node = (TerminalNode) ctx.getChild( 0 );
if ( node.getSymbol().getType() == HqlParser.QUOTED_IDENTIFIER ) {
return QuotingHelper.unquoteIdentifier( node.getText() );
}
return node.getText();
}
@Override
public EntityDomainType<?> visitEntityName(HqlParser.EntityNameContext parserEntityName) {
final String entityName = parserEntityName.fullNameText;
final EntityDomainType<?> entityReference = resolveEntityReference( entityName );
final String entityName = getEntityName( parserEntityName );
final EntityDomainType<?> entityReference = getCreationContext()
.getJpaMetamodel()
.getHqlEntityReference( entityName );
if ( entityReference == null ) {
throw new UnknownEntityException( "Could not resolve entity name [" + entityName + "] as DML target", entityName );
}
checkFQNEntityNameJpaComplianceViolationIfNeeded( entityName, entityReference );
if ( entityReference instanceof SqmPolymorphicRootDescriptor<?> && getCreationOptions().useStrictJpaCompliance() ) {
throw new StrictJpaComplianceViolation(
"Encountered the use of a non entity name [" + entityName + "], " +
"but strict JPQL compliance was requested which doesn't allow this",
StrictJpaComplianceViolation.Type.NON_ENTITY_NAME
);
}
return entityReference;
}
private EntityDomainType<?> resolveEntityReference(String entityName) {
log.debugf( "Attempting to resolve path [%s] as entity reference...", entityName );
EntityDomainType<?> reference = null;
try {
entityName = creationContext.getJpaMetamodel().qualifyImportableName( entityName );
reference = creationContext.getJpaMetamodel().entity( entityName );
}
catch (Exception ignore) {
}
return reference;
}
@Override
public SqmFromClause visitFromClause(HqlParser.FromClauseContext parserFromClause) {
final SqmFromClause fromClause;
@ -1436,7 +1428,7 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
public SqmRoot<?> visitPathRoot(HqlParser.PathRootContext ctx) {
final HqlParser.EntityNameContext entityNameContext = (HqlParser.EntityNameContext) ctx.getChild( 0 );
final List<ParseTree> entityNameParseTreeChildren = entityNameContext.children;
final String name = entityNameContext.fullNameText;
final String name = getEntityName( entityNameContext );
log.debugf( "Handling root path - %s", name );
final EntityDomainType entityDescriptor = getCreationContext()
@ -1542,10 +1534,14 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
);
}
}
return identifierContext.getText();
return visitIdentifier( identifierContext );
}
else {
return lastChild.getText();
final TerminalNode node = (TerminalNode) lastChild;
if ( node.getSymbol().getType() == HqlParser.QUOTED_IDENTIFIER ) {
return QuotingHelper.unquoteIdentifier( node.getText() );
}
return node.getText();
}
}
@ -1569,7 +1565,7 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
private <T> void consumeCrossJoin(HqlParser.CrossJoinContext parserJoin, SqmRoot<T> sqmRoot) {
final HqlParser.PathRootContext pathRootContext = (HqlParser.PathRootContext) parserJoin.getChild( 2 );
final HqlParser.EntityNameContext entityNameContext = (HqlParser.EntityNameContext) pathRootContext.getChild( 0 );
final String name = entityNameContext.fullNameText;
final String name = getEntityName( entityNameContext );
SqmTreeCreationLogger.LOGGER.debugf( "Handling root path - %s", name );
@ -1640,12 +1636,17 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
identificationVariableDefContext = null;
}
final String alias = visitIdentificationVariableDef( identificationVariableDefContext );
final boolean fetch = parserJoin.getChild( 2 ) instanceof TerminalNode;
if ( fetch && processingStateStack.depth() > 1 ) {
throw new SemanticException( "fetch not allowed in subquery from-elements" );
}
dotIdentifierConsumerStack.push(
new QualifiedJoinPathConsumer(
sqmRoot,
joinType,
parserJoin.getChild( 2 ) instanceof TerminalNode,
fetch,
alias,
this
)
@ -1927,15 +1928,11 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
ctx = ctx.getChild( 0 );
}
if ( ctx instanceof HqlParser.PathContext && ctx.getChildCount() == 1 ) {
if ( ctx instanceof HqlParser.GeneralPathFragmentContext && ctx.getChildCount() == 1 ) {
ctx = ctx.getChild( 0 );
if ( ctx instanceof HqlParser.GeneralPathFragmentContext && ctx.getChildCount() == 1 ) {
ctx = ctx.getChild( 0 );
if ( ctx instanceof HqlParser.DotIdentifierSequenceContext ) {
return creationContext.getJpaMetamodel().getAllowedEnumLiteralTexts().get( ctx.getText() );
}
if ( ctx instanceof HqlParser.DotIdentifierSequenceContext ) {
return creationContext.getJpaMetamodel().getAllowedEnumLiteralTexts().get( ctx.getText() );
}
}
}
@ -2008,10 +2005,12 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
final List<SqmExpression<?>> listExpressions = new ArrayList<>( estimatedSize );
for ( int i = 1; i < size; i++ ) {
final ParseTree parseTree = tupleExpressionListContext.getChild( i );
if ( parseTree instanceof HqlParser.ExpressionContext ) {
final HqlParser.ExpressionContext expressionContext = (HqlParser.ExpressionContext) parseTree;
if ( parseTree instanceof HqlParser.ExpressionOrPredicateContext ) {
final ParseTree child = parseTree.getChild( 0 );
final HqlParser.ExpressionContext expressionContext;
final Map<Class<?>, Enum<?>> possibleEnumValues;
if ( isEnum && ( possibleEnumValues = getPossibleEnumValues( expressionContext ) ) != null ) {
if ( isEnum && child instanceof HqlParser.ExpressionContext
&& ( possibleEnumValues = getPossibleEnumValues( expressionContext = (HqlParser.ExpressionContext) child ) ) != null ) {
listExpressions.add(
resolveEnumShorthandLiteral(
expressionContext,
@ -2021,7 +2020,7 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
);
}
else {
listExpressions.add( (SqmExpression<?>) expressionContext.accept( this ) );
listExpressions.add( (SqmExpression<?>) child.accept( this ) );
}
}
}
@ -2367,7 +2366,7 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
final List<SqmExpression<?>> expressions = new ArrayList<>( estimateExpressionsCount );
for ( int i = 0; i < size; i++ ) {
final ParseTree parseTree = parentContext.getChild( i );
if ( parseTree instanceof HqlParser.ExpressionContext ) {
if ( parseTree instanceof HqlParser.ExpressionOrPredicateContext ) {
expressions.add( (SqmExpression<?>) parseTree.accept( this ) );
}
}
@ -2524,6 +2523,36 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
return (SqmExpression<?>) ctx.getChild( 0 ).accept( this );
}
@Override
public SqmExpression<?> visitUnaryNumericLiteralExpression(HqlParser.UnaryNumericLiteralExpressionContext ctx) {
final TerminalNode node = (TerminalNode) ctx.getChild( 1 ).getChild( 0 );
final String text;
if ( ( (TerminalNode) ctx.getChild( 0 ).getChild( 0 ) ).getSymbol().getType() == HqlParser.MINUS ) {
text = "-" + node.getText();
}
else {
text = node.getText();
}
switch ( node.getSymbol().getType() ) {
case HqlParser.INTEGER_LITERAL:
return integerOrLongLiteral( text );
case HqlParser.LONG_LITERAL:
return longLiteral( text );
case HqlParser.BIG_INTEGER_LITERAL:
return bigIntegerLiteral( text );
case HqlParser.HEX_LITERAL:
return hexLiteral( text );
case HqlParser.FLOAT_LITERAL:
return floatLiteral( text );
case HqlParser.DOUBLE_LITERAL:
return doubleLiteral( text );
case HqlParser.BIG_DECIMAL_LITERAL:
return bigDecimalLiteral( text );
default:
throw new ParsingException("Unexpected terminal node [" + text + "]");
}
}
@Override
public Object visitBinaryLiteral(HqlParser.BinaryLiteralContext ctx) {
final TerminalNode firstNode = (TerminalNode) ctx.getChild( 0 );
@ -2561,7 +2590,7 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
case HqlParser.STRING_LITERAL:
return stringLiteral( node.getText() );
case HqlParser.INTEGER_LITERAL:
return integerLiteral( node.getText() );
return integerOrLongLiteral( node.getText() );
case HqlParser.LONG_LITERAL:
return longLiteral( node.getText() );
case HqlParser.BIG_INTEGER_LITERAL:
@ -2680,7 +2709,7 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
final TerminalNode firstChild = (TerminalNode) ctx.getChild( 0 );
final String timezoneText;
if ( firstChild.getSymbol().getType() == HqlParser.STRING_LITERAL ) {
timezoneText = unescapeStringLiteral( ctx.getText() );
timezoneText = QuotingHelper.unquoteStringLiteral( ctx.getText() );
}
else {
timezoneText = ctx.getText();
@ -2864,77 +2893,9 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
);
}
private String unescapeStringLiteral(String text) {
// Unescape the parsed literal and handle escape sequences
final StringBuilder sb = new StringBuilder( text.length() - 2 );
final int end = text.length() - 1;
final char delimiter = text.charAt( 0 );
for ( int i = 1; i < end; i++ ) {
char c = text.charAt( i );
switch ( c ) {
case '\'':
if ( delimiter == '\'' ) {
i++;
}
break;
case '"':
if ( delimiter == '"' ) {
i++;
}
break;
case '\\':
if ( ( i + 1 ) < end ) {
char nextChar = text.charAt( ++i );
switch ( nextChar ) {
case 'b':
c = '\b';
break;
case 't':
c = '\t';
break;
case 'n':
c = '\n';
break;
case 'f':
c = '\f';
break;
case 'r':
c = '\r';
break;
case '\\':
c = '\\';
break;
case '\'':
c = '\'';
break;
case '"':
c = '"';
break;
case '`':
c = '`';
break;
case 'u':
c = (char) Integer.parseInt( text.substring( i + 1, i + 5 ), 16 );
i += 4;
break;
default:
sb.append( '\\' );
c = nextChar;
break;
}
}
break;
default:
break;
}
sb.append( c );
}
return sb.toString();
}
private SqmLiteral<String> stringLiteral(String text) {
return new SqmLiteral<>(
unescapeStringLiteral( text ),
QuotingHelper.unquoteStringLiteral( text ),
resolveExpressableTypeBasic( String.class ),
creationContext.getNodeBuilder()
);
@ -2950,6 +2911,35 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
);
}
private SqmLiteral<? extends Number> integerOrLongLiteral(String text) {
try {
final Integer value = Integer.valueOf( text );
return new SqmLiteral<>(
value,
resolveExpressableTypeBasic( Integer.class ),
creationContext.getNodeBuilder()
);
}
catch (NumberFormatException e) {
// This is at least what 5.x did
try {
final Long value = Long.valueOf( text );
return new SqmLiteral<>(
value,
resolveExpressableTypeBasic( Long.class ),
creationContext.getNodeBuilder()
);
}
catch (NumberFormatException e2) {
e.addSuppressed( e2 );
throw new LiteralNumberFormatException(
"Unable to convert sqm literal [" + text + "] to Integer",
e
);
}
}
}
private SqmLiteral<Integer> integerLiteral(String text) {
try {
final Integer value = Integer.valueOf( text );
@ -3132,7 +3122,7 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
@Override
public SqmExpression<?> visitJpaNonStandardFunction(HqlParser.JpaNonStandardFunctionContext ctx) {
final String functionName = unescapeStringLiteral( ctx.getChild( 2 ).getText() ).toLowerCase();
final String functionName = QuotingHelper.unquoteStringLiteral( ctx.getChild( 2 ).getText() ).toLowerCase();
final List<SqmTypedNode<?>> functionArguments;
if ( ctx.getChildCount() > 4 ) {
//noinspection unchecked
@ -3249,7 +3239,7 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
for ( ; i < size; i += 2 ) {
// we handle the final argument differently...
if ( i == lastIndex ) {
arguments.add( visitFinalFunctionArgument( (HqlParser.ExpressionContext) ctx.getChild( i ) ) );
arguments.add( visitFinalFunctionArgument( ctx.getChild( i ) ) );
}
else {
arguments.add( (SqmTypedNode<?>) ctx.getChild( i ).accept( this ) );
@ -3275,7 +3265,7 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
return arguments;
}
private SqmExpression<?> visitFinalFunctionArgument(HqlParser.ExpressionContext expression) {
private SqmExpression<?> visitFinalFunctionArgument(ParseTree expression) {
// the final argument to a function may accept multi-value parameter (varargs),
// but only if we are operating in non-strict JPA mode
parameterDeclarationContextStack.push( () -> !creationOptions.useStrictJpaCompliance() );
@ -3467,7 +3457,7 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
@Override
public Object visitFormat(HqlParser.FormatContext ctx) {
String format = unescapeStringLiteral( ctx.getChild( 0 ).getText() );
String format = QuotingHelper.unquoteStringLiteral( ctx.getChild( 0 ).getText() );
if (!FORMAT.matcher(format).matches()) {
throw new SemanticException("illegal format pattern: '" + format + "'");
}
@ -3843,7 +3833,7 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
@Override
public SqmLiteral<Character> visitTrimCharacter(HqlParser.TrimCharacterContext ctx) {
final String trimCharText = ctx != null
? unescapeStringLiteral( ctx.getText() )
? QuotingHelper.unquoteStringLiteral( ctx.getText() )
: " "; // JPA says space is the default
if ( trimCharText.length() != 1 ) {
@ -4059,7 +4049,19 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
final SqmPath<?> indexedPath = pathPart.resolveIndexedAccess( indexExpression, !hasIndexContinuation, this );
if ( hasIndexContinuation ) {
return (SemanticPathPart) idxCtx.getChild( 4 ).accept( this );
dotIdentifierConsumerStack.push(
new BasicDotIdentifierConsumer( indexedPath, this ) {
@Override
protected void reset() {
}
}
);
try {
return (SemanticPathPart) idxCtx.getChild( 4 ).accept( this );
}
finally {
dotIdentifierConsumerStack.pop();
}
}
return indexedPath;
}
@ -4079,7 +4081,7 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
assert identifierContext.getChildCount() == 1;
dotIdentifierConsumer.consumeIdentifier(
identifierContext.getChild( 0 ).getText(),
visitIdentifier( identifierContext ),
true,
! hasContinuations
);
@ -4090,7 +4092,7 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
final HqlParser.IdentifierContext identifier = (HqlParser.IdentifierContext) continuation.getChild( 1 );
assert identifier.getChildCount() == 1;
dotIdentifierConsumer.consumeIdentifier(
identifier.getChild( 0 ).getText(),
visitIdentifier( identifier ),
false,
i >= numberOfContinuations
);

View File

@ -8,13 +8,11 @@ package org.hibernate.query.hql.internal;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.function.Function;
import org.hibernate.internal.util.MutableInteger;
import org.hibernate.jpa.spi.JpaCompliance;
import org.hibernate.query.NavigablePath;
import org.hibernate.query.hql.HqlLogging;
@ -117,27 +115,6 @@ public class SqmPathRegistryImpl implements SqmPathRegistry {
}
}
@Override
public <X> SqmPath<X> findPath(NavigablePath path) {
final SqmPath<?> found = sqmPathByPath.get( path );
if ( found != null ) {
//noinspection unchecked
return (SqmPath<X>) found;
}
if ( associatedProcessingState.getParentProcessingState() != null ) {
final SqmFrom<?, X> containingQueryFrom = associatedProcessingState.getParentProcessingState()
.getPathRegistry()
.findFromByPath( path );
if ( containingQueryFrom != null ) {
// todo (6.0) create a correlation?
return containingQueryFrom;
}
}
return null;
}
@Override
public <X extends SqmFrom<?, ?>> X findFromByPath(NavigablePath navigablePath) {
final SqmFrom<?, ?> found = sqmFromByPath.get( navigablePath );
@ -247,21 +224,6 @@ public class SqmPathRegistryImpl implements SqmPathRegistry {
return (X) sqmFrom;
}
@Override
public <X> SqmPath<X> resolvePath(NavigablePath navigablePath, Function<NavigablePath, SqmPath<X>> creator) {
SqmTreeCreationLogger.LOGGER.tracef( "SqmProcessingIndex#resolvePath(NavigablePath) : %s", navigablePath );
final SqmPath<?> existing = sqmPathByPath.get( navigablePath );
if ( existing != null ) {
//noinspection unchecked
return (SqmPath<X>) existing;
}
final SqmPath<X> sqmPath = creator.apply( navigablePath );
register( sqmPath );
return sqmPath;
}
private boolean definesAttribute(SqmPathSource<?> containerType, String name) {
return containerType.findSubPathSource( name ) != null;
}

View File

@ -71,22 +71,6 @@ public interface SqmPathRegistry {
*/
<X extends SqmFrom<?, ?>> X resolveFrom(SqmPath<?> path);
/**
* Find an SqmPath by its NavigablePath. Will return a SqmFrom if the NavigablePath
* has (yet) been resolved to a SqmFrom. Otherwise, it will be a non-SqmFrom SqmPath
*
* @return matching SqmPath or {@code null}
*/
<X> SqmPath<X> findPath(NavigablePath path);
/**
* Similar to {@link #findPath}, but accepting a producer to be used
* to create and register a SqmPath if none yet registered.
*
* @return The existing or just-created SqmPath
*/
<X> SqmPath<X> resolvePath(NavigablePath path, Function<NavigablePath, SqmPath<X>> creator);
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// SqmSelection

View File

@ -27,8 +27,7 @@ public class QueryHelper {
return types[0];
}
//noinspection unchecked
SqmExpressable<? extends T> highest = highestPrecedenceType2( (SqmExpressable) types[0], types[1] );
SqmExpressable<? extends T> highest = highestPrecedenceType2( types[0], types[1] );
for ( int i = 2; i < types.length; i++ ) {
highest = highestPrecedenceType2( highest, types[i] );
}
@ -58,6 +57,9 @@ public class QueryHelper {
}
// any other precedence rules?
if ( type2.getExpressableJavaTypeDescriptor().isWider( type1.getExpressableJavaTypeDescriptor() ) ) {
return type2;
}
return type1;
}

View File

@ -9,6 +9,7 @@ package org.hibernate.query.internal;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.IdentityHashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
@ -195,6 +196,8 @@ public class QueryParameterBindingsImpl implements QueryParameterBindings {
for ( QueryParameterBinding<?> binding : parameterBindingMap.values() ) {
final MappingModelExpressable<?> mappingType = determineMappingType( binding, persistenceContext );
assert mappingType instanceof JavaTypedExpressable;
//noinspection unchecked
final JavaType<Object> javaType = ( (JavaTypedExpressable<Object>) mappingType ).getExpressableJavaTypeDescriptor();
if ( binding.isMultiValued() ) {
for ( Object bindValue : binding.getBindValues() ) {
@ -203,8 +206,9 @@ public class QueryParameterBindingsImpl implements QueryParameterBindings {
final Object disassembled = mappingType.disassemble( bindValue, persistenceContext );
allBindValues.add( disassembled );
//noinspection unchecked
final int valueHashCode = ( (JavaTypedExpressable<Object>) mappingType ).getExpressableJavaTypeDescriptor().extractHashCode( bindValue );
final int valueHashCode = bindValue != null
? javaType.extractHashCode( bindValue )
: 0;
hashCode = 31 * hashCode + valueHashCode;
}
@ -215,8 +219,9 @@ public class QueryParameterBindingsImpl implements QueryParameterBindings {
final Object disassembled = mappingType.disassemble( bindValue, persistenceContext );
allBindValues.add( disassembled );
//noinspection unchecked
final int valueHashCode = ( (JavaTypedExpressable<Object>) mappingType ).getExpressableJavaTypeDescriptor().extractHashCode( bindValue );
final int valueHashCode = bindValue != null
? javaType.extractHashCode( bindValue )
: 0;
hashCode = 31 * hashCode + valueHashCode;
}
@ -249,12 +254,19 @@ public class QueryParameterBindingsImpl implements QueryParameterBindings {
}
if ( binding.isMultiValued() ) {
final Object firstBindValue = binding.getBindValues().iterator().next();
return typeConfiguration.getBasicTypeForJavaType( firstBindValue.getClass() );
final Iterator<?> iterator = binding.getBindValues().iterator();
Object firstNonNullBindValue = null;
if ( iterator.hasNext() && firstNonNullBindValue == null ) {
firstNonNullBindValue = iterator.next();
}
if ( firstNonNullBindValue != null ) {
return typeConfiguration.getBasicTypeForJavaType( firstNonNullBindValue.getClass() );
}
}
else {
else if ( binding.getBindValue() != null ) {
return typeConfiguration.getBasicTypeForJavaType( binding.getBindValue().getClass() );
}
return typeConfiguration.getBasicTypeForJavaType( binding.getBindType().getJavaType() );
}

View File

@ -287,7 +287,7 @@ public class ResultSetMappingImpl implements ResultSetMapping {
JdbcValuesMetadata jdbcResultsMetadata,
SessionFactoryImplementor sessionFactory) {
final int jdbcPosition = valuesArrayPosition + 1;
final BasicType<?> jdbcMapping = jdbcResultsMetadata.resolveType( jdbcPosition, null );
final BasicType<?> jdbcMapping = jdbcResultsMetadata.resolveType( jdbcPosition, null, sessionFactory );
final String name = jdbcResultsMetadata.resolveColumnName( jdbcPosition );

View File

@ -119,7 +119,11 @@ public class CompleteResultBuilderBasicValuedStandard implements CompleteResultB
basicType = explicitType;
}
else {
basicType = jdbcResultsMetadata.resolveType( jdbcPosition, explicitJavaTypeDescriptor );
basicType = jdbcResultsMetadata.resolveType(
jdbcPosition,
explicitJavaTypeDescriptor,
sessionFactory
);
}
final int valuesArrayPosition = ResultsHelper.jdbcPositionToValuesArrayPosition( jdbcPosition );

View File

@ -6,6 +6,8 @@
*/
package org.hibernate.query.results.dynamic;
import java.util.List;
import org.hibernate.query.NativeQuery;
import org.hibernate.query.results.FetchBuilder;
import org.hibernate.sql.results.graph.Fetchable;
@ -14,4 +16,5 @@ import org.hibernate.sql.results.graph.Fetchable;
* @author Steve Ebersole
*/
public interface DynamicFetchBuilder extends FetchBuilder, NativeQuery.ReturnProperty {
List<String> getColumnAliases();
}

View File

@ -209,6 +209,11 @@ public class DynamicFetchBuilderLegacy implements DynamicFetchBuilder, NativeQue
return this;
}
@Override
public List<String> getColumnAliases() {
return columnNames;
}
@Override
public NativeQuery.FetchReturn setLockMode(LockMode lockMode) {
return null;

View File

@ -129,4 +129,9 @@ public class DynamicFetchBuilderStandard
columnNames.add( columnAlias );
return this;
}
@Override
public List<String> getColumnAliases() {
return columnNames;
}
}

View File

@ -122,7 +122,13 @@ public class DynamicResultBuilderBasicConverted<O,R> implements DynamicResultBui
else {
jdbcPosition = currentJdbcPosition;
}
final BasicType<?> basicType = jdbcResultsMetadata.resolveType( jdbcPosition, basicValueConverter.getRelationalJavaDescriptor() );
final BasicType<?> basicType = jdbcResultsMetadata.resolveType(
jdbcPosition,
basicValueConverter.getRelationalJavaDescriptor(),
domainResultCreationState.getSqlAstCreationState()
.getCreationContext()
.getSessionFactory()
);
final int valuesArrayPosition = ResultsHelper.jdbcPositionToValuesArrayPosition( jdbcPosition );
return new SqlSelectionImpl( valuesArrayPosition, (BasicValuedMapping) basicType );

View File

@ -136,7 +136,11 @@ public class DynamicResultBuilderBasicStandard implements DynamicResultBuilderBa
basicType = explicitType;
}
else {
basicType = jdbcResultsMetadata.resolveType( jdbcPosition, explicitJavaTypeDescriptor );
basicType = jdbcResultsMetadata.resolveType(
jdbcPosition,
explicitJavaTypeDescriptor,
sessionFactory
);
}
return new SqlSelectionImpl( valuesArrayPosition, (BasicValuedMapping) basicType );
}

View File

@ -18,6 +18,7 @@ import org.hibernate.metamodel.mapping.CollectionPart;
import org.hibernate.metamodel.mapping.EntityIdentifierMapping;
import org.hibernate.metamodel.mapping.EntityMappingType;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.metamodel.mapping.internal.SingleAttributeIdentifierMapping;
import org.hibernate.query.NativeQuery;
import org.hibernate.query.NavigablePath;
import org.hibernate.query.results.DomainResultCreationStateImpl;
@ -169,13 +170,23 @@ public class DynamicResultBuilderEntityStandard
}
);
final TableReference tableReference = tableGroup.getPrimaryTableReference();
if ( idColumnNames != null ) {
final List<String> idColumnAliases;
final DynamicFetchBuilder idFetchBuilder;
if ( this.idColumnNames != null ) {
idColumnAliases = this.idColumnNames;
}
else if ( ( idFetchBuilder = findIdFetchBuilder() ) != null ) {
idColumnAliases = idFetchBuilder.getColumnAliases();
}
else {
idColumnAliases = null;
}
if ( idColumnAliases != null ) {
final EntityIdentifierMapping identifierMapping = entityMapping.getIdentifierMapping();
identifierMapping.forEachSelectable(
(selectionIndex, selectableMapping) -> {
resolveSqlSelection(
idColumnNames.get( selectionIndex ),
idColumnAliases.get( selectionIndex ),
createColumnReferenceKey( tableReference, selectableMapping.getSelectionExpression() ),
selectableMapping.getJdbcMapping(),
jdbcResultsMetadata,
@ -229,6 +240,14 @@ public class DynamicResultBuilderEntityStandard
}
}
private DynamicFetchBuilder findIdFetchBuilder() {
final EntityIdentifierMapping identifierMapping = entityMapping.getIdentifierMapping();
if ( identifierMapping instanceof SingleAttributeIdentifierMapping ) {
return findFetchBuilder( ( (SingleAttributeIdentifierMapping) identifierMapping ).getAttributeName() );
}
return findFetchBuilder( identifierMapping.getPartName() );
}
private void resolveSqlSelection(
String columnAlias,
String columnKey,

View File

@ -28,7 +28,6 @@ import org.hibernate.sql.results.graph.embeddable.EmbeddableValuedFetchable;
import org.hibernate.sql.results.jdbc.spi.JdbcValuesMetadata;
import static org.hibernate.query.results.ResultsHelper.impl;
import static org.hibernate.sql.ast.spi.SqlExpressionResolver.createColumnReferenceKey;
/**
* @author Steve Ebersole

View File

@ -1545,7 +1545,7 @@ public abstract class AbstractQuery<R> implements QueryImplementor<R> {
throw getSession().getExceptionConverter().convert( e );
}
finally {
afterQuery( true );
afterQuery( success );
}
}

View File

@ -30,6 +30,7 @@ public class StrictJpaComplianceViolation extends SemanticException {
LIMIT_OFFSET_CLAUSE( "use of LIMIT/OFFSET clause" ),
IDENTIFICATION_VARIABLE_NOT_DECLARED_IN_FROM_CLAUSE( "use of an alias not declared in the FROM clause" ),
FQN_ENTITY_NAME( "use of FQN for entity name" ),
NON_ENTITY_NAME( "use of class or interface FQN for entity name" ),
IMPLICIT_TREAT( "use of implicit treat" ),
MIXED_POSITIONAL_NAMED_PARAMETERS( "mix of positional and named parameters" ),
;

View File

@ -78,7 +78,7 @@ public class SelfRenderingSqmFunction<T> extends SqmFunction<T> {
getRenderingSupport(),
resolveSqlAstArguments( getArguments(), walker ),
resultType,
getMappingModelExpressable( walker, resultType )
resultType == null ? null : getMappingModelExpressable( walker, resultType )
);
}

View File

@ -0,0 +1,30 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.query.sqm.internal;
import org.hibernate.query.spi.DomainQueryExecutionContext;
import org.hibernate.query.spi.NonSelectQueryPlan;
/**
* @author Christian Beikov
*/
public class AggregatedNonSelectQueryPlanImpl implements NonSelectQueryPlan {
private final NonSelectQueryPlan[] aggregatedQueryPlans;
public AggregatedNonSelectQueryPlanImpl(NonSelectQueryPlan[] aggregatedQueryPlans) {
this.aggregatedQueryPlans = aggregatedQueryPlans;
}
@Override
public int executeUpdate(DomainQueryExecutionContext executionContext) {
int updated = 0;
for ( NonSelectQueryPlan aggregatedQueryPlan : aggregatedQueryPlans ) {
updated += aggregatedQueryPlan.executeUpdate( executionContext );
}
return updated;
}
}

View File

@ -12,6 +12,7 @@ import java.util.List;
import org.hibernate.ScrollMode;
import org.hibernate.internal.EmptyScrollableResults;
import org.hibernate.query.Limit;
import org.hibernate.query.spi.DomainQueryExecutionContext;
import org.hibernate.query.spi.ScrollableResultsImplementor;
import org.hibernate.query.spi.SelectQueryPlan;
@ -30,13 +31,41 @@ public class AggregatedSelectQueryPlanImpl<R> implements SelectQueryPlan<R> {
@Override
public List<R> performList(DomainQueryExecutionContext executionContext) {
if ( executionContext.getQueryOptions().getEffectiveLimit().getMaxRowsJpa() == 0 ) {
final Limit effectiveLimit = executionContext.getQueryOptions().getEffectiveLimit();
final int maxRowsJpa = effectiveLimit.getMaxRowsJpa();
if ( maxRowsJpa == 0 ) {
return Collections.emptyList();
}
int elementsToSkip = effectiveLimit.getFirstRowJpa();
final List<R> overallResults = new ArrayList<>();
for ( SelectQueryPlan<R> aggregatedQueryPlan : aggregatedQueryPlans ) {
overallResults.addAll( aggregatedQueryPlan.performList( executionContext ) );
final List<R> list = aggregatedQueryPlan.performList( executionContext );
final int size = list.size();
if ( size <= elementsToSkip ) {
// More elements to skip than the collection size
elementsToSkip -= size;
continue;
}
final int availableElements = size - elementsToSkip;
if ( overallResults.size() + availableElements >= maxRowsJpa ) {
// This result list is the last one i.e. fulfills the limit
final int end = elementsToSkip + ( maxRowsJpa - overallResults.size() );
for ( int i = elementsToSkip; i < end; i++ ) {
overallResults.add( list.get( i ) );
}
break;
}
else if ( elementsToSkip > 0 ) {
// We can skip a part of this result list
for ( int i = availableElements; i < size; i++ ) {
overallResults.add( list.get( i ) );
}
elementsToSkip = 0;
}
else {
overallResults.addAll( list );
}
}
return overallResults;

View File

@ -34,6 +34,7 @@ import org.hibernate.query.sqm.sql.SqmTranslation;
import org.hibernate.query.sqm.sql.SqmTranslator;
import org.hibernate.query.sqm.sql.SqmTranslatorFactory;
import org.hibernate.query.sqm.tree.expression.SqmParameter;
import org.hibernate.query.sqm.tree.select.SqmDynamicInstantiation;
import org.hibernate.query.sqm.tree.select.SqmSelectStatement;
import org.hibernate.query.sqm.tree.select.SqmSelection;
import org.hibernate.sql.ast.SqlAstTranslator;
@ -239,12 +240,17 @@ public class ConcreteSqmSelectQueryPlan<R> implements SelectQueryPlan<R> {
SqmSelectStatement sqm,
QueryOptions queryOptions) {
final List<String> aliases = new ArrayList<>();
sqm.getQuerySpec().getSelectClause().getSelections().forEach(
sqmSelection ->
sqmSelection.getSelectableNode().visitSubSelectableNodes(
subSelection -> aliases.add( subSelection.getAlias() )
)
);
for ( SqmSelection<?> sqmSelection : sqm.getQuerySpec().getSelectClause().getSelections() ) {
// The row a tuple transformer gets to see only contains 1 element for a dynamic instantiation
if ( sqmSelection.getSelectableNode() instanceof SqmDynamicInstantiation<?> ) {
aliases.add( sqmSelection.getAlias() );
}
else {
sqmSelection.getSelectableNode().visitSubSelectableNodes(
subSelection -> aliases.add( subSelection.getAlias() )
);
}
}
return new RowTransformerTupleTransformerAdapter<>(
ArrayHelper.toStringArray( aliases ), queryOptions.getTupleTransformer()

View File

@ -31,6 +31,7 @@ import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.collections.IdentitySet;
import org.hibernate.metamodel.model.domain.BasicDomainType;
import org.hibernate.metamodel.model.domain.DomainType;
import org.hibernate.metamodel.model.domain.EntityDomainType;
import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.query.ImmutableEntityUpdateQueryHandlingMode;
import org.hibernate.query.Query;
@ -226,7 +227,7 @@ public class QuerySqmImpl<R>
SqmUtil.verifyIsSelectStatement( sqmStatement, null );
final SqmQueryPart<R> queryPart = ( (SqmSelectStatement<R>) sqmStatement ).getQueryPart();
// For criteria queries, we have to validate the fetch structure here
queryPart.validateQueryGroupFetchStructure();
queryPart.validateFetchStructureAndOwners();
visitQueryReturnType(
queryPart,
resultType,
@ -627,7 +628,12 @@ public class QuerySqmImpl<R>
executionContextToUse = this;
}
else {
executionContextToUse = new DelegatingDomainQueryExecutionContext( this );
executionContextToUse = new DelegatingDomainQueryExecutionContext( this ) {
@Override
public QueryOptions getQueryOptions() {
return normalizedQueryOptions;
}
};
}
}
else {
@ -792,11 +798,23 @@ public class QuerySqmImpl<R>
}
private NonSelectQueryPlan buildDeleteQueryPlan() {
final SqmDeleteStatement<R> sqmDelete = (SqmDeleteStatement<R>) getSqmStatement();
final SqmDeleteStatement<R>[] concreteSqmStatements = QuerySplitter.split(
(SqmDeleteStatement<R>) getSqmStatement(),
getSessionFactory()
);
final String entityNameToDelete = sqmDelete.getTarget().getReferencedPathSource().getHibernateEntityName();
if ( concreteSqmStatements.length > 1 ) {
return buildAggregatedDeleteQueryPlan( concreteSqmStatements );
}
else {
return buildConcreteDeleteQueryPlan( concreteSqmStatements[0] );
}
}
private NonSelectQueryPlan buildConcreteDeleteQueryPlan(SqmDeleteStatement<R> sqmDelete) {
final EntityDomainType<?> entityDomainType = sqmDelete.getTarget().getReferencedPathSource();
final String entityNameToDelete = entityDomainType.getHibernateEntityName();
final EntityPersister entityDescriptor = getSessionFactory().getDomainModel().findEntityDescriptor( entityNameToDelete );
final SqmMultiTableMutationStrategy multiTableStrategy = entityDescriptor.getSqmMultiTableMutationStrategy();
if ( multiTableStrategy == null ) {
return new SimpleDeleteQueryPlan( entityDescriptor, sqmDelete, domainParameterXref );
@ -806,6 +824,16 @@ public class QuerySqmImpl<R>
}
}
private NonSelectQueryPlan buildAggregatedDeleteQueryPlan(SqmDeleteStatement<R>[] concreteSqmStatements) {
final NonSelectQueryPlan[] aggregatedQueryPlans = new NonSelectQueryPlan[ concreteSqmStatements.length ];
for ( int i = 0, x = concreteSqmStatements.length; i < x; i++ ) {
aggregatedQueryPlans[i] = buildConcreteDeleteQueryPlan( concreteSqmStatements[i] );
}
return new AggregatedNonSelectQueryPlanImpl( aggregatedQueryPlans );
}
private NonSelectQueryPlan buildUpdateQueryPlan() {
final SqmUpdateStatement<R> sqmUpdate = (SqmUpdateStatement<R>) getSqmStatement();

View File

@ -201,6 +201,7 @@ public class MatchingIdSelectionHelper {
final MultiTableSqmMutationConverter sqmConverter = new MultiTableSqmMutationConverter(
entityDescriptor,
sqmMutationStatement,
sqmMutationStatement.getTarget(),
domainParameterXref,
executionContext.getQueryOptions(),

View File

@ -21,6 +21,7 @@ import org.hibernate.query.sqm.sql.BaseSqmToSqlAstConverter;
import org.hibernate.query.sqm.sql.internal.DomainResultProducer;
import org.hibernate.query.sqm.sql.internal.SqlAstProcessingStateImpl;
import org.hibernate.query.sqm.sql.internal.SqlAstQueryPartProcessingStateImpl;
import org.hibernate.query.sqm.tree.SqmStatement;
import org.hibernate.query.sqm.tree.expression.SqmParameter;
import org.hibernate.query.sqm.tree.from.SqmRoot;
import org.hibernate.query.sqm.tree.predicate.SqmWhereClause;
@ -63,6 +64,7 @@ public class MultiTableSqmMutationConverter extends BaseSqmToSqlAstConverter<Sta
public MultiTableSqmMutationConverter(
EntityMappingType mutatingEntityDescriptor,
SqmStatement<?> statement,
SqmRoot<?> sqmRoot,
DomainParameterXref domainParameterXref,
QueryOptions queryOptions,
@ -71,6 +73,7 @@ public class MultiTableSqmMutationConverter extends BaseSqmToSqlAstConverter<Sta
SqlAstCreationContext creationContext) {
this(
mutatingEntityDescriptor,
statement,
sqmRoot,
sqmRoot.getExplicitAlias(),
domainParameterXref,
@ -83,6 +86,7 @@ public class MultiTableSqmMutationConverter extends BaseSqmToSqlAstConverter<Sta
public MultiTableSqmMutationConverter(
EntityMappingType mutatingEntityDescriptor,
SqmStatement<?> statement,
SqmRoot<?> sqmRoot,
String sourceAlias,
DomainParameterXref domainParameterXref,
@ -90,7 +94,7 @@ public class MultiTableSqmMutationConverter extends BaseSqmToSqlAstConverter<Sta
LoadQueryInfluencers loadQueryInfluencers,
QueryParameterBindings domainParameterBindings,
SqlAstCreationContext creationContext) {
super( creationContext, null, queryOptions, loadQueryInfluencers, domainParameterXref, domainParameterBindings );
super( creationContext, statement, queryOptions, loadQueryInfluencers, domainParameterXref, domainParameterBindings );
this.mutatingEntityDescriptor = mutatingEntityDescriptor;
final SqlAstProcessingStateImpl rootProcessingState = new SqlAstProcessingStateImpl(
@ -114,6 +118,11 @@ public class MultiTableSqmMutationConverter extends BaseSqmToSqlAstConverter<Sta
getFromClauseAccess().registerTableGroup( sqmRoot.getNavigablePath(), mutatingTableGroup );
}
@Override
public void pruneTableGroupJoins() {
super.pruneTableGroupJoins();
}
@SuppressWarnings("unused")
public EntityMappingType getMutatingEntityDescriptor() {
return mutatingEntityDescriptor;
@ -139,7 +148,7 @@ public class MultiTableSqmMutationConverter extends BaseSqmToSqlAstConverter<Sta
this.parameterResolutionConsumer = parameterResolutionConsumer;
for ( SqmAssignment assignment : setClause.getAssignments() ) {
visitAssignment( assignment, assignmentConsumer );
assignmentConsumer.accept( visitAssignment( assignment ) );
}
}
@ -147,16 +156,6 @@ public class MultiTableSqmMutationConverter extends BaseSqmToSqlAstConverter<Sta
throw new UnsupportedOperationException();
}
private void visitAssignment(
SqmAssignment sqmAssignment,
Consumer<Assignment> assignmentConsumer) {
final Assignable assignable = (Assignable) sqmAssignment.getTargetPath().accept( this );
final Expression value = (Expression) sqmAssignment.getValue().accept( this );
assignmentConsumer.accept( new Assignment( assignable, value ) );
}
@Override
public Assignment visitAssignment(SqmAssignment sqmAssignment) {
return new Assignment(

View File

@ -104,7 +104,7 @@ public class SqmMutationStrategyHelper {
else {
// element-collection or many-to-many - delete the collection-table row
final TableReference tableReference = new TableReference( separateCollectionTable, null, true, sessionFactory );
final TableReference tableReference = new TableReference( separateCollectionTable, DeleteStatement.DEFAULT_ALIAS, true, sessionFactory );
final DeleteStatement sqlAstDelete = new DeleteStatement(
tableReference,

View File

@ -117,6 +117,7 @@ public abstract class AbstractCteMutationHandler extends AbstractMutationHandler
final MultiTableSqmMutationConverter sqmConverter = new MultiTableSqmMutationConverter(
entityDescriptor,
sqmMutationStatement,
sqmMutationStatement.getTarget(),
explicitDmlTargetAlias,
domainParameterXref,
@ -140,6 +141,7 @@ public abstract class AbstractCteMutationHandler extends AbstractMutationHandler
columnReference -> {},
(sqmParam, mappingType, jdbcParameters) -> paramTypeResolutions.put( sqmParam, mappingType )
);
sqmConverter.pruneTableGroupJoins();
final CteStatement idSelectCte = new CteStatement(
BaseSqmToSqlAstConverter.createCteTable( getCteTable(), factory ),
@ -165,7 +167,7 @@ public abstract class AbstractCteMutationHandler extends AbstractMutationHandler
.getSqlAstTranslatorFactory()
.buildSelectTranslator( factory, statement );
final Expression count = createCountStart( factory, sqmConverter );
final Expression count = createCountStar( factory, sqmConverter );
domainResults.add(
new BasicResult<>(
0,
@ -215,7 +217,7 @@ public abstract class AbstractCteMutationHandler extends AbstractMutationHandler
return ( (Number) list.get( 0 ) ).intValue();
}
private Expression createCountStart(
private Expression createCountStar(
SessionFactoryImplementor factory,
MultiTableSqmMutationConverter sqmConverter) {
final SqmExpression<?> arg = new SqmStar( factory.getNodeBuilder() );

View File

@ -160,7 +160,7 @@ public final class ExecuteWithIdTableHelper {
public static QuerySpec createIdTableSelectQuerySpec(
IdTable idTable,
Function<SharedSessionContractImplementor,String> sessionUidAccess,
Function<SharedSessionContractImplementor, String> sessionUidAccess,
EntityMappingType entityDescriptor,
ExecutionContext executionContext) {
return createIdTableSelectQuerySpec( idTable, null, sessionUidAccess, entityDescriptor, executionContext );
@ -169,14 +169,14 @@ public final class ExecuteWithIdTableHelper {
public static QuerySpec createIdTableSelectQuerySpec(
IdTable idTable,
ModelPart fkModelPart,
Function<SharedSessionContractImplementor,String> sessionUidAccess,
Function<SharedSessionContractImplementor, String> sessionUidAccess,
EntityMappingType entityDescriptor,
ExecutionContext executionContext) {
final QuerySpec querySpec = new QuerySpec( false );
final TableReference idTableReference = new TableReference(
idTable.getTableExpression(),
null,
IdTable.DEFAULT_ALIAS,
true,
executionContext.getSession().getFactory()
);

View File

@ -7,23 +7,33 @@
package org.hibernate.query.sqm.mutation.internal.idtable;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.function.Function;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.boot.model.relational.Exportable;
import org.hibernate.dialect.Dialect;
import org.hibernate.mapping.Collection;
import org.hibernate.mapping.Column;
import org.hibernate.mapping.Contributable;
import org.hibernate.mapping.PersistentClass;
import org.hibernate.mapping.Selectable;
import org.hibernate.mapping.Value;
import org.hibernate.metamodel.mapping.EntityIdentifierMapping;
import org.hibernate.metamodel.mapping.EntityMappingType;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.metamodel.mapping.ModelPart;
import org.hibernate.metamodel.mapping.PluralAttributeMapping;
import org.hibernate.persister.entity.Joinable;
import org.hibernate.sql.ast.tree.from.TableGroup;
import org.hibernate.metamodel.spi.RuntimeModelCreationContext;
/**
* @author Steve Ebersole
*/
public class IdTable implements Exportable, Contributable {
public static final String DEFAULT_ALIAS = "idtable_";
private final EntityMappingType entityDescriptor;
private final String qualifiedTableName;
@ -34,27 +44,38 @@ public class IdTable implements Exportable, Contributable {
public IdTable(
EntityMappingType entityDescriptor,
Function<String,String> idTableNameAdjuster,
Dialect dialect) {
Function<String, String> idTableNameAdjuster,
Dialect dialect,
RuntimeModelCreationContext runtimeModelCreationContext) {
this.entityDescriptor = entityDescriptor;
this.qualifiedTableName = idTableNameAdjuster.apply(
// The table name might be a sub-query, which is inappropriate for an id table name
entityDescriptor.getEntityPersister().getSynchronizedQuerySpaces()[0]
);
// The table name might be a sub-query, which is inappropriate for an id table name
final String originalTableName = entityDescriptor.getEntityPersister().getSynchronizedQuerySpaces()[0];
if ( Identifier.isQuoted( originalTableName ) ) {
this.qualifiedTableName = dialect.quote( idTableNameAdjuster.apply( Identifier.unQuote( originalTableName ) ) );
}
else {
this.qualifiedTableName = idTableNameAdjuster.apply( originalTableName );
}
final PersistentClass entityBinding = runtimeModelCreationContext.getBootModel()
.getEntityBinding( entityDescriptor.getEntityName() );
final Iterator<Column> itr = entityBinding.getTable().getPrimaryKey().getColumnIterator();
final Iterator<JdbcMapping> jdbcMappings = entityDescriptor.getIdentifierMapping().getJdbcMappings().iterator();
while ( itr.hasNext() ) {
final Column column = itr.next();
final JdbcMapping jdbcMapping = jdbcMappings.next();
columns.add(
new IdTableColumn(
this,
column.getText( dialect ),
jdbcMapping,
column.getSqlType( dialect, runtimeModelCreationContext.getMetadata() )
)
);
}
entityDescriptor.getIdentifierMapping().forEachSelectable(
(columnIndex, selection) -> columns.add(
new IdTableColumn(
this,
selection.getSelectionExpression(),
selection.getJdbcMapping(),
dialect.getTypeName(
selection.getJdbcMapping().getJdbcTypeDescriptor()
)
)
)
);
entityDescriptor.visitSubTypeAttributeMappings(
attribute -> {
if ( attribute instanceof PluralAttributeMapping ) {
@ -64,17 +85,27 @@ public class IdTable implements Exportable, Contributable {
// Ensure that the FK target columns are available
final ModelPart fkTarget = pluralAttribute.getKeyDescriptor().getTargetPart();
if ( !( fkTarget instanceof EntityIdentifierMapping ) ) {
final Value value = entityBinding.getSubclassProperty( pluralAttribute.getAttributeName() )
.getValue();
final Iterator<Selectable> columnIterator = ( (Collection) value ).getKey()
.getColumnIterator();
fkTarget.forEachSelectable(
(columnIndex, selection) -> columns.add(
new IdTableColumn(
this,
selection.getSelectionExpression(),
selection.getJdbcMapping(),
dialect.getTypeName(
selection.getJdbcMapping().getJdbcTypeDescriptor()
(columnIndex, selection) -> {
final Selectable selectable = columnIterator.next();
if ( selectable instanceof Column ) {
columns.add(
new IdTableColumn(
this,
selectable.getText( dialect ),
selection.getJdbcMapping(),
( (Column) selectable ).getSqlType(
dialect,
runtimeModelCreationContext.getMetadata()
)
)
)
)
);
}
}
);
}
}

View File

@ -12,7 +12,6 @@ import java.util.IdentityHashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Supplier;
@ -23,6 +22,7 @@ import org.hibernate.engine.spi.LoadQueryInfluencers;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.internal.FilterHelper;
import org.hibernate.internal.util.MutableBoolean;
import org.hibernate.internal.util.MutableInteger;
import org.hibernate.metamodel.mapping.EntityIdentifierMapping;
import org.hibernate.metamodel.mapping.EntityMappingType;
@ -51,7 +51,7 @@ import org.hibernate.sql.ast.tree.expression.JdbcParameter;
import org.hibernate.sql.ast.tree.expression.SqlTuple;
import org.hibernate.sql.ast.tree.from.TableGroup;
import org.hibernate.sql.ast.tree.from.TableReference;
import org.hibernate.sql.ast.tree.from.UnionTableGroup;
import org.hibernate.sql.ast.tree.from.UnionTableReference;
import org.hibernate.sql.ast.tree.predicate.FilterPredicate;
import org.hibernate.sql.ast.tree.predicate.InSubQueryPredicate;
import org.hibernate.sql.ast.tree.predicate.Predicate;
@ -107,9 +107,9 @@ public class RestrictedDeleteExecutionDelegate implements TableBasedDeleteHandle
this.idTableExporterAccess = idTableExporterAccess;
this.sessionUidAccess = sessionUidAccess;
this.sessionFactory = sessionFactory;
converter = new MultiTableSqmMutationConverter(
this.converter = new MultiTableSqmMutationConverter(
entityDescriptor,
sqmDelete,
sqmDelete.getTarget(),
domainParameterXref,
queryOptions,
@ -149,12 +149,12 @@ public class RestrictedDeleteExecutionDelegate implements TableBasedDeleteHandle
// 2) we also inspect each ColumnReference that is part of the where-clause to see which
// table it comes from. if all of the referenced columns (if any at all) are from the root table
// we can perform all of the deletes without using an id-table
final AtomicBoolean needsIdTableWrapper = new AtomicBoolean( false );
final MutableBoolean needsIdTableWrapper = new MutableBoolean( false );
Predicate predicate = converter.visitWhereClause(
sqmDelete.getWhereClause(),
columnReference -> {
if ( ! hierarchyRootTableReference.getIdentificationVariable().equals( columnReference.getQualifier() ) ) {
needsIdTableWrapper.set( true );
needsIdTableWrapper.setValue( true );
}
},
(sqmParameter, mappingType, jdbcParameters) -> {
@ -172,11 +172,17 @@ public class RestrictedDeleteExecutionDelegate implements TableBasedDeleteHandle
deletingTableGroup
);
if ( filterPredicate != null ) {
needsIdTableWrapper.set( true );
needsIdTableWrapper.setValue( true );
predicate = SqlAstTreeHelper.combinePredicates( predicate, filterPredicate );
}
converter.pruneTableGroupJoins();
boolean needsIdTable = needsIdTableWrapper.get();
// We need an id table if we want to delete from an intermediate table to avoid FK violations
// The intermediate table has a FK to the root table, so we can't delete from the root table first
// Deleting from the intermediate table first also isn't possible,
// because that is the source for deletion in other tables, hence we need an id table
final boolean needsIdTable = needsIdTableWrapper.getValue()
|| entityDescriptor != entityDescriptor.getRootEntityDescriptor();
final SqmJdbcExecutionContextAdapter executionContextAdapter = SqmJdbcExecutionContextAdapter.omittingLockingAndPaging( executionContext );
@ -208,17 +214,9 @@ public class RestrictedDeleteExecutionDelegate implements TableBasedDeleteHandle
Map<SqmParameter, MappingModelExpressable> paramTypeResolutions,
SqlExpressionResolver sqlExpressionResolver,
ExecutionContext executionContext) {
final EntityPersister rootEntityPersister;
final String rootEntityName = entityDescriptor.getEntityPersister().getRootEntityName();
if ( rootEntityName.equals( entityDescriptor.getEntityName() ) ) {
rootEntityPersister = entityDescriptor.getEntityPersister();
}
else {
rootEntityPersister = sessionFactory.getDomainModel().findEntityDescriptor( rootEntityName );
}
final MutableInteger rows = new MutableInteger();
assert entityDescriptor == entityDescriptor.getRootEntityDescriptor();
final EntityPersister rootEntityPersister = entityDescriptor.getEntityPersister();
final String rootTableName = ( (Joinable) rootEntityPersister ).getTableName();
final TableReference rootTableReference = tableGroup.resolveTableReference(
tableGroup.getNavigablePath(),
@ -247,48 +245,114 @@ public class RestrictedDeleteExecutionDelegate implements TableBasedDeleteHandle
executionContext.getSession()
);
entityDescriptor.visitConstraintOrderedTables(
(tableExpression, tableKeyColumnVisitationSupplier) -> {
if ( tableExpression.equals( rootTableName ) ) {
rows.set(
deleteFromRootTableWithoutIdTable(
rootTableReference,
suppliedPredicate,
jdbcParameterBindings,
executionContext
)
);
SqmMutationStrategyHelper.cleanUpCollectionTables(
entityDescriptor,
(tableReference, attributeMapping) -> {
// No need for a predicate if there is no supplied predicate i.e. this is a full cleanup
if ( suppliedPredicate == null ) {
return null;
}
final ForeignKeyDescriptor fkDescriptor = attributeMapping.getKeyDescriptor();
final QuerySpec idSelectFkSubQuery;
// todo (6.0): based on the location of the attribute mapping, we could prune the table group of the subquery
if ( fkDescriptor.getTargetPart() instanceof EntityIdentifierMapping ) {
idSelectFkSubQuery = matchingIdSubQuerySpec;
}
else {
rows.set(
rows.get() + deleteFromNonRootTableWithoutIdTable(
resolveUnionTableReference( tableGroup, tableExpression ),
idSelectFkSubQuery = ExecuteWithoutIdTableHelper.createIdMatchingSubQuerySpec(
tableGroup.getNavigablePath(),
rootTableReference,
suppliedPredicate,
rootEntityPersister,
sqlExpressionResolver,
sessionFactory
);
}
return new InSubQueryPredicate(
MappingModelHelper.buildColumnReferenceExpression(
fkDescriptor,
null,
sessionFactory
),
idSelectFkSubQuery,
false
);
},
jdbcParameterBindings,
executionContext
);
if ( rootTableReference instanceof UnionTableReference ) {
final MutableInteger rows = new MutableInteger();
entityDescriptor.visitConstraintOrderedTables(
(tableExpression, tableKeyColumnVisitationSupplier) -> {
final TableReference tableReference = new TableReference(
tableExpression,
tableGroup.getPrimaryTableReference().getIdentificationVariable(),
false,
sessionFactory
);
final QuerySpec idMatchingSubQuerySpec;
// No need for a predicate if there is no supplied predicate i.e. this is a full cleanup
if ( suppliedPredicate == null ) {
idMatchingSubQuerySpec = null;
}
else {
idMatchingSubQuerySpec = matchingIdSubQuerySpec;
}
rows.plus(
deleteFromNonRootTableWithoutIdTable(
tableReference,
tableKeyColumnVisitationSupplier,
sqlExpressionResolver,
tableGroup,
matchingIdSubQuerySpec,
idMatchingSubQuerySpec,
jdbcParameterBindings,
executionContext
)
);
}
}
);
return rows.get();
}
private TableReference resolveUnionTableReference(TableGroup tableGroup, String tableExpression) {
if ( tableGroup instanceof UnionTableGroup ) {
return new TableReference(
tableExpression,
tableGroup.getPrimaryTableReference().getIdentificationVariable(),
false,
sessionFactory
);
return rows.get();
}
else {
return tableGroup.getTableReference( tableGroup.getNavigablePath(), tableExpression, true, true );
entityDescriptor.visitConstraintOrderedTables(
(tableExpression, tableKeyColumnVisitationSupplier) -> {
if ( !tableExpression.equals( rootTableName ) ) {
final TableReference tableReference = tableGroup.getTableReference(
tableGroup.getNavigablePath(),
tableExpression,
true,
true
);
final QuerySpec idMatchingSubQuerySpec;
// No need for a predicate if there is no supplied predicate i.e. this is a full cleanup
if ( suppliedPredicate == null ) {
idMatchingSubQuerySpec = null;
}
else {
idMatchingSubQuerySpec = matchingIdSubQuerySpec;
}
deleteFromNonRootTableWithoutIdTable(
tableReference,
tableKeyColumnVisitationSupplier,
sqlExpressionResolver,
tableGroup,
idMatchingSubQuerySpec,
jdbcParameterBindings,
executionContext
);
}
}
);
return deleteFromRootTableWithoutIdTable(
rootTableReference,
suppliedPredicate,
jdbcParameterBindings,
executionContext
);
}
}
@ -304,7 +368,7 @@ public class RestrictedDeleteExecutionDelegate implements TableBasedDeleteHandle
);
}
private int deleteFromNonRootTableWithoutIdTable(
private int deleteFromNonRootTableWithoutIdTable(
TableReference targetTableReference,
Supplier<Consumer<SelectableConsumer>> tableKeyColumnVisitationSupplier,
SqlExpressionResolver sqlExpressionResolver,
@ -315,50 +379,62 @@ public class RestrictedDeleteExecutionDelegate implements TableBasedDeleteHandle
assert targetTableReference != null;
log.tracef( "deleteFromNonRootTable - %s", targetTableReference.getTableExpression() );
/*
* delete from sub_table
* where sub_id in (
* select root_id from root_table
* where {predicate}
* )
*/
/*
* Create the `sub_id` reference as the LHS of the in-subquery predicate
*/
final List<ColumnReference> deletingTableColumnRefs = new ArrayList<>();
tableKeyColumnVisitationSupplier.get().accept(
(columnIndex, selection) -> {
assert targetTableReference.getTableReference( selection.getContainingTableExpression() ) != null;
final Expression expression = sqlExpressionResolver.resolveSqlExpression(
SqlExpressionResolver.createColumnReferenceKey( targetTableReference, selection.getSelectionExpression() ),
sqlAstProcessingState -> new ColumnReference(
targetTableReference,
selection,
sessionFactory
)
);
deletingTableColumnRefs.add( (ColumnReference) expression );
}
final TableReference deleteTableReference = new TableReference(
targetTableReference.getTableExpression(),
DeleteStatement.DEFAULT_ALIAS,
true,
sessionFactory
);
final Expression deletingTableColumnRefsExpression;
if ( deletingTableColumnRefs.size() == 1 ) {
deletingTableColumnRefsExpression = deletingTableColumnRefs.get( 0 );
final Predicate tableDeletePredicate;
if ( matchingIdSubQuerySpec == null ) {
tableDeletePredicate = null;
}
else {
deletingTableColumnRefsExpression = new SqlTuple( deletingTableColumnRefs, entityDescriptor.getIdentifierMapping() );
/*
* delete from sub_table
* where sub_id in (
* select root_id from root_table
* where {predicate}
* )
*/
/*
* Create the `sub_id` reference as the LHS of the in-subquery predicate
*/
final List<ColumnReference> deletingTableColumnRefs = new ArrayList<>();
tableKeyColumnVisitationSupplier.get().accept(
(columnIndex, selection) -> {
assert deleteTableReference.getTableReference( selection.getContainingTableExpression() ) != null;
final Expression expression = sqlExpressionResolver.resolveSqlExpression(
SqlExpressionResolver.createColumnReferenceKey( deleteTableReference, selection.getSelectionExpression() ),
sqlAstProcessingState -> new ColumnReference(
deleteTableReference,
selection,
sessionFactory
)
);
deletingTableColumnRefs.add( (ColumnReference) expression );
}
);
final Expression deletingTableColumnRefsExpression;
if ( deletingTableColumnRefs.size() == 1 ) {
deletingTableColumnRefsExpression = deletingTableColumnRefs.get( 0 );
}
else {
deletingTableColumnRefsExpression = new SqlTuple( deletingTableColumnRefs, entityDescriptor.getIdentifierMapping() );
}
tableDeletePredicate = new InSubQueryPredicate(
deletingTableColumnRefsExpression,
matchingIdSubQuerySpec,
false
);
}
final InSubQueryPredicate idMatchPredicate = new InSubQueryPredicate(
deletingTableColumnRefsExpression,
matchingIdSubQuerySpec,
false
);
final DeleteStatement sqlAstDelete = new DeleteStatement( targetTableReference, idMatchPredicate );
final DeleteStatement sqlAstDelete = new DeleteStatement( deleteTableReference, tableDeletePredicate );
final int rows = executeSqlDelete(
sqlAstDelete,
jdbcParameterBindings,
@ -510,6 +586,12 @@ public class RestrictedDeleteExecutionDelegate implements TableBasedDeleteHandle
final SessionFactoryImplementor factory = executionContext.getSession().getFactory();
final TableKeyExpressionCollector keyColumnCollector = new TableKeyExpressionCollector( entityDescriptor );
final TableReference targetTable = new TableReference(
tableExpression,
DeleteStatement.DEFAULT_ALIAS,
true,
factory
);
tableKeyColumnVisitationSupplier.get().accept(
(columnIndex, selection) -> {
@ -520,7 +602,7 @@ public class RestrictedDeleteExecutionDelegate implements TableBasedDeleteHandle
keyColumnCollector.apply(
new ColumnReference(
(String) null,
targetTable,
selection,
factory
)
@ -535,10 +617,7 @@ public class RestrictedDeleteExecutionDelegate implements TableBasedDeleteHandle
);
executeSqlDelete(
new DeleteStatement(
new TableReference( tableExpression, null, true, factory ),
predicate
),
new DeleteStatement( targetTable, predicate ),
JdbcParameterBindings.NO_BINDINGS,
executionContext
);

View File

@ -126,6 +126,7 @@ public class TableBasedUpdateHandler
final MultiTableSqmMutationConverter converterDelegate = new MultiTableSqmMutationConverter(
entityDescriptor,
getSqmDeleteOrUpdateStatement(),
getSqmDeleteOrUpdateStatement().getTarget(),
domainParameterXref,
executionContext.getQueryOptions(),
@ -203,6 +204,7 @@ public class TableBasedUpdateHandler
if ( filterPredicate != null ) {
predicate = SqlAstTreeHelper.combinePredicates( predicate, filterPredicate );
}
converterDelegate.pruneTableGroupJoins();
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// cross-reference the TableReference by alias. The TableGroup already

View File

@ -58,7 +58,7 @@ public class TempIdTableExporter implements IdTableExporter {
buffer.append( column.getColumnName() ).append( ' ' );
final int sqlTypeCode = column.getJdbcMapping().getJdbcTypeDescriptor().getDefaultSqlTypeCode();
final String databaseTypeName = databaseTypeNameResolver.apply( sqlTypeCode );
final String databaseTypeName = column.getSqlTypeDefinition();
buffer.append( " " ).append( databaseTypeName ).append( " " );

View File

@ -162,7 +162,7 @@ public class InlineDeleteHandler implements DeleteHandler {
DomainQueryExecutionContext executionContext) {
final TableReference targetTableReference = new TableReference(
targetTableExpression,
null,
DeleteStatement.DEFAULT_ALIAS,
false,
sessionFactory
);

View File

@ -15,18 +15,35 @@ import org.hibernate.query.sqm.tree.domain.SqmPath;
* @author Steve Ebersole
*/
public class SqmCreationHelper {
/**
* This is a special alias that we use for implicit joins within the FROM clause.
* Passing this alias will cause that we don't generate a unique alias for a path,
* but instead use a <code>null</code> alias.
*
* The effect of this is, that we use the same table group for a query like
* `... exists ( from alias.intermediate.attribute where alias.intermediate.otherAttribute is not null )`
* for the path in the FROM clause and the one in the WHERE clause.
*/
public static final String IMPLICIT_ALIAS = "{implicit}";
public static NavigablePath buildRootNavigablePath(String base, String alias) {
// Make sure we always create a unique alias, otherwise we might use a wrong table group for the same join
return alias == null
? new NavigablePath( base, Long.toString( System.nanoTime() ) )
: new NavigablePath( base, alias );
return new NavigablePath( base, determineAlias( alias ) );
}
public static NavigablePath buildSubNavigablePath(NavigablePath lhs, String base, String alias) {
return lhs.append( base, determineAlias( alias ) );
}
private static String determineAlias(String alias) {
// Make sure we always create a unique alias, otherwise we might use a wrong table group for the same join
return alias == null
? lhs.append( base, Long.toString( System.nanoTime() ) )
: lhs.append( base, alias );
if ( alias == null ) {
return Long.toString( System.nanoTime() );
}
else if ( alias == IMPLICIT_ALIAS ) {
return null;
}
return alias;
}
public static NavigablePath buildSubNavigablePath(SqmPath<?> lhs, String subNavigable, String alias) {

View File

@ -82,6 +82,7 @@ import org.hibernate.metamodel.model.domain.EntityDomainType;
import org.hibernate.metamodel.model.domain.PluralPersistentAttribute;
import org.hibernate.metamodel.model.domain.internal.CompositeSqmPathSource;
import org.hibernate.query.criteria.JpaPath;
import org.hibernate.query.internal.QueryHelper;
import org.hibernate.query.sqm.function.SelfRenderingFunctionSqlAstExpression;
import org.hibernate.query.sqm.produce.function.internal.PatternRenderer;
import org.hibernate.query.sqm.tree.SqmJoinType;
@ -368,12 +369,13 @@ public abstract class BaseSqmToSqlAstConverter<T extends Statement> extends Base
private final EntityGraphTraversalState entityGraphTraversalState;
private int fetchDepth;
private String currentBagRole;
private boolean resolvingCircularFetch;
private ForeignKeyDescriptor.Nature currentlyResolvingForeignKeySide;
private SqmQueryPart<?> currentSqmQueryPart;
private Map<String, FilterPredicate> collectionFilterPredicates;
private OrderByFragmentConsumer orderByFragmentConsumer;
private List<Map.Entry<OrderByFragment, TableGroup>> orderByFragments;
private final SqlAliasBaseManager sqlAliasBaseManager = new SqlAliasBaseManager();
private final Stack<SqlAstProcessingState> processingStateStack = new StandardStack<>();
@ -1134,6 +1136,9 @@ public abstract class BaseSqmToSqlAstConverter<T extends Statement> extends Base
for ( SqmDynamicInstantiationArgument<?> sqmArgument : sqmDynamicInstantiation.getArguments() ) {
final SqmSelectableNode<?> selectableNode = sqmArgument.getSelectableNode();
if ( selectableNode instanceof SqmPath<?> ) {
prepareForSelection( (SqmPath<?>) selectableNode );
}
final DomainResultProducer<?> argumentResultProducer = (DomainResultProducer<?>) selectableNode.accept( this );
dynamicInstantiation.addArgument( sqmArgument.getAlias(), argumentResultProducer, this );
}
@ -1423,10 +1428,6 @@ public abstract class BaseSqmToSqlAstConverter<T extends Statement> extends Base
pushProcessingState( processingState );
try {
if ( topLevel ) {
orderByFragmentConsumer = new StandardOrderByFragmentConsumer();
}
// we want to visit the from-clause first
visitFromClause( sqmQuerySpec.getFromClause() );
@ -1451,12 +1452,16 @@ public abstract class BaseSqmToSqlAstConverter<T extends Statement> extends Base
visitOrderByOffsetAndFetch( sqmQuerySpec, sqlQuerySpec );
if ( topLevel && statement instanceof SqmSelectStatement<?> ) {
orderByFragmentConsumer.visitFragments(
(orderByFragment, tableGroup) -> {
orderByFragment.apply( sqlQuerySpec, tableGroup, this );
}
);
orderByFragmentConsumer = null;
if ( orderByFragments != null ) {
orderByFragments.forEach(
entry -> entry.getKey().apply(
sqlQuerySpec,
entry.getValue(),
this
)
);
orderByFragments = null;
}
applyCollectionFilterPredicates( sqlQuerySpec );
}
@ -1500,33 +1505,6 @@ public abstract class BaseSqmToSqlAstConverter<T extends Statement> extends Base
return getFromClauseAccess().getTableGroup( navigablePath );
}
private interface OrderByFragmentConsumer {
void accept(OrderByFragment orderByFragment, TableGroup tableGroup);
void visitFragments(BiConsumer<OrderByFragment,TableGroup> consumer);
}
private static class StandardOrderByFragmentConsumer implements OrderByFragmentConsumer {
private Map<OrderByFragment, TableGroup> fragments;
@Override
public void accept(OrderByFragment orderByFragment, TableGroup tableGroup) {
if ( fragments == null ) {
fragments = new LinkedHashMap<>();
}
fragments.put( orderByFragment, tableGroup );
}
@Override
public void visitFragments(BiConsumer<OrderByFragment, TableGroup> consumer) {
if ( fragments == null || fragments.isEmpty() ) {
return;
}
fragments.forEach( consumer );
}
}
protected void applyCollectionFilterPredicates(QuerySpec sqlQuerySpec) {
final List<TableGroup> roots = sqlQuerySpec.getFromClause().getRoots();
if ( roots != null && roots.size() == 1 ) {
@ -2410,8 +2388,7 @@ public abstract class BaseSqmToSqlAstConverter<T extends Statement> extends Base
implicitJoinChecker = BaseSqmToSqlAstConverter::verifyManipulationImplicitJoin;
}
final FromClauseIndex fromClauseIndex = fromClauseIndexStack.getCurrent();
final boolean useInnerJoin = currentClauseStack.getCurrent() == Clause.SELECT;
prepareReusablePath( fromClauseIndex, sqmPath, useInnerJoin, implicitJoinChecker );
prepareReusablePath( fromClauseIndex, sqmPath, false, implicitJoinChecker );
return supplier.get();
}
@ -2437,6 +2414,7 @@ public abstract class BaseSqmToSqlAstConverter<T extends Statement> extends Base
);
if ( parentPath instanceof SqmTreatedPath<?, ?> ) {
fromClauseIndex.register( (SqmPath<?>) parentPath, parentTableGroup );
return parentTableGroup;
}
final TableGroup newTableGroup = createTableGroup( parentTableGroup, (SqmPath<?>) parentPath, useInnerJoin );
if ( newTableGroup != null ) {
@ -2974,6 +2952,14 @@ public abstract class BaseSqmToSqlAstConverter<T extends Statement> extends Base
this,
creationContext
);
final FilterPredicate filterPredicate = FilterHelper.createFilterPredicate(
getLoadQueryInfluencers(),
(Joinable) pluralAttributeMapping.getCollectionDescriptor(),
tableGroup
);
if ( filterPredicate != null ) {
subQuerySpec.applyPredicate( filterPredicate );
}
getFromClauseAccess().registerTableGroup( pluralPath.getNavigablePath(), tableGroup );
registerPluralTableGroupParts( tableGroup );
@ -3109,6 +3095,14 @@ public abstract class BaseSqmToSqlAstConverter<T extends Statement> extends Base
this,
creationContext
);
final FilterPredicate filterPredicate = FilterHelper.createFilterPredicate(
getLoadQueryInfluencers(),
(Joinable) pluralAttributeMapping.getCollectionDescriptor(),
tableGroup
);
if ( filterPredicate != null ) {
subQuerySpec.applyPredicate( filterPredicate );
}
getFromClauseAccess().registerTableGroup( pluralPartPath.getNavigablePath(), tableGroup );
registerPluralTableGroupParts( tableGroup );
@ -3687,6 +3681,10 @@ public abstract class BaseSqmToSqlAstConverter<T extends Statement> extends Base
log.debugf( "Determining mapping-model type for generalized SqmExpression : %s", sqmExpression );
final SqmExpressable<?> nodeType = sqmExpression.getNodeType();
if ( nodeType == null ) {
// We can't determine the type of the expression
return null;
}
final MappingModelExpressable valueMapping = domainModel.resolveMappingExpressable(
nodeType,
this::findTableGroupByPath
@ -4040,20 +4038,19 @@ public abstract class BaseSqmToSqlAstConverter<T extends Statement> extends Base
}
private BasicValuedMapping getExpressionType(SqmBinaryArithmetic<?> expression) {
final SqmExpressable<?> leftHandOperandType = expression.getLeftHandOperand().getNodeType();
if ( leftHandOperandType instanceof BasicValuedMapping ) {
return (BasicValuedMapping) leftHandOperandType;
final SqmExpressable<?> sqmExpressable = QueryHelper.highestPrecedenceType(
expression.getLeftHandOperand().getNodeType(),
expression.getRightHandOperand().getNodeType()
);
if ( sqmExpressable instanceof BasicValuedMapping ) {
return (BasicValuedMapping) sqmExpressable;
}
else {
final SqmExpressable<?> rightHandOperandType = expression.getRightHandOperand().getNodeType();
if ( rightHandOperandType instanceof BasicValuedMapping ) {
return (BasicValuedMapping) rightHandOperandType;
}
else if ( sqmExpressable != null ) {
return getTypeConfiguration().getBasicTypeForJavaType(
leftHandOperandType.getExpressableJavaTypeDescriptor().getJavaTypeClass()
sqmExpressable.getExpressableJavaTypeDescriptor().getJavaTypeClass()
);
}
return JavaObjectType.INSTANCE;
}
private Expression toSqlExpression(Object value) {
@ -4664,33 +4661,6 @@ public abstract class BaseSqmToSqlAstConverter<T extends Statement> extends Base
);
}
// @Override
// public Object visitPluralAttributeElementBinding(PluralAttributeElementBinding binding) {
// final TableGroup resolvedTableGroup = fromClauseIndex.findResolvedTableGroup( binding.getFromElement() );
//
// return getCurrentDomainReferenceExpressionBuilder().buildPluralAttributeElementReferenceExpression(
// binding,
// resolvedTableGroup,
// PersisterHelper.convert( binding.getNavigablePath() )
// );
// }
//
// @Override
// public ColumnReference visitExplicitColumnReference(SqmColumnReference sqmColumnReference) {
// final TableGroup tableGroup = fromClauseIndex.findTableGroup(
// sqmColumnReference.getSqmFromBase().getNavigablePath()
// );
//
// final ColumnReference columnReference = tableGroup.locateColumnReferenceByName( sqmColumnReference.getColumnName() );
//
// if ( columnReference == null ) {
// throw new HibernateException( "Could not resolve ColumnReference" );
// }
//
// return columnReference;
// }
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Predicates
@ -4763,6 +4733,14 @@ public abstract class BaseSqmToSqlAstConverter<T extends Statement> extends Base
this,
creationContext
);
final FilterPredicate filterPredicate = FilterHelper.createFilterPredicate(
getLoadQueryInfluencers(),
(Joinable) pluralAttributeMapping.getCollectionDescriptor(),
tableGroup
);
if ( filterPredicate != null ) {
subQuerySpec.applyPredicate( filterPredicate );
}
getFromClauseAccess().registerTableGroup( pluralPath.getNavigablePath(), tableGroup );
registerPluralTableGroupParts( tableGroup );
@ -4837,8 +4815,11 @@ public abstract class BaseSqmToSqlAstConverter<T extends Statement> extends Base
finally {
inferrableTypeAccessStack.pop();
}
return new ComparisonPredicate( lhs, predicate.getSqmOperator(), rhs, getBooleanType() );
ComparisonOperator sqmOperator = predicate.getSqmOperator();
if ( predicate.isNegated() ) {
sqmOperator = sqmOperator.negated();
}
return new ComparisonPredicate( lhs, sqmOperator, rhs, getBooleanType() );
}
@Override
@ -4906,11 +4887,7 @@ public abstract class BaseSqmToSqlAstConverter<T extends Statement> extends Base
new SqlSelectionImpl( 1, 0, jdbcLiteral )
);
final ExistsPredicate existsPredicate = new ExistsPredicate( subQuerySpec, getBooleanType() );
if ( predicate.isNegated() ) {
return existsPredicate;
}
return new NegatedPredicate( existsPredicate );
return new ExistsPredicate( subQuerySpec, !predicate.isNegated(), getBooleanType() );
}
finally {
popProcessingStateStack();
@ -5081,19 +5058,22 @@ public abstract class BaseSqmToSqlAstConverter<T extends Statement> extends Base
SqmParameter<?> sqmParameter,
QueryParameterImplementor<?> domainParam,
QueryParameterBinding<?> domainParamBinding) {
final Iterator<?> iterator = domainParamBinding.getBindValues().iterator();
final InListPredicate inListPredicate = new InListPredicate(
(Expression) sqmPredicate.getTestExpression().accept( this ),
sqmPredicate.isNegated(),
getBooleanType()
);
if ( !iterator.hasNext() ) {
return inListPredicate;
}
inferrableTypeAccessStack.push(
() -> determineValueMapping( sqmPredicate.getTestExpression() )
);
try {
final Iterator<?> iterator = domainParamBinding.getBindValues().iterator();
inListPredicate.addExpression( consumeSingleSqmParameter( sqmParameter ) );
iterator.next();
while ( iterator.hasNext() ) {
@ -5145,7 +5125,11 @@ public abstract class BaseSqmToSqlAstConverter<T extends Statement> extends Base
@Override
public Object visitExistsPredicate(SqmExistsPredicate predicate) {
return new ExistsPredicate( (QueryPart) predicate.getExpression().accept( this ), getBooleanType() );
return new ExistsPredicate(
(QueryPart) predicate.getExpression().accept( this ),
predicate.isNegated(),
getBooleanType()
);
}
@Override
@ -5174,165 +5158,192 @@ public abstract class BaseSqmToSqlAstConverter<T extends Statement> extends Base
// .getOrMakeJavaDescriptor( namedClass );
}
@Override
public List<Fetch> visitFetches(FetchParent fetchParent) {
final List<Fetch> fetches = CollectionHelper.arrayList( fetchParent.getReferencedMappingType().getNumberOfFetchables() );
final List<String> bagRoles = new ArrayList<>();
public void addFetch(List<Fetch> fetches, FetchParent fetchParent, Fetchable fetchable, Boolean isKeyFetchable) {
final NavigablePath resolvedNavigablePath = fetchParent.resolveNavigablePath( fetchable );
final BiConsumer<Fetchable, Boolean> fetchableBiConsumer = (fetchable, isKeyFetchable) -> {
final NavigablePath resolvedNavigablePath = fetchParent.resolveNavigablePath( fetchable );
final String alias;
FetchTiming fetchTiming = fetchable.getMappedFetchOptions().getTiming();
boolean joined = false;
final String alias;
FetchTiming fetchTiming = fetchable.getMappedFetchOptions().getTiming();
boolean joined = false;
EntityGraphTraversalState.TraversalResult traversalResult = null;
final FromClauseIndex fromClauseIndex = getFromClauseIndex();
final SqmAttributeJoin<?, ?> fetchedJoin = fromClauseIndex.findFetchedJoinByPath( resolvedNavigablePath );
boolean explicitFetch = false;
EntityGraphTraversalState.TraversalResult traversalResult = null;
final FromClauseIndex fromClauseIndex = getFromClauseIndex();
final SqmAttributeJoin<?, ?> fetchedJoin = fromClauseIndex.findFetchedJoinByPath( resolvedNavigablePath );
boolean explicitFetch = false;
final NavigablePath fetchablePath;
if ( fetchedJoin != null ) {
fetchablePath = fetchedJoin.getNavigablePath();
// there was an explicit fetch in the SQM
// there should be a TableGroupJoin registered for this `fetchablePath` already
assert fromClauseIndex.getTableGroup( fetchedJoin.getNavigablePath() ) != null;
final NavigablePath fetchablePath;
if ( fetchedJoin != null ) {
fetchablePath = fetchedJoin.getNavigablePath();
// there was an explicit fetch in the SQM
// there should be a TableGroupJoin registered for this `fetchablePath` already
assert fromClauseIndex.getTableGroup( fetchedJoin.getNavigablePath() ) != null;
if ( fetchedJoin.isFetched() ) {
fetchTiming = FetchTiming.IMMEDIATE;
}
joined = true;
alias = fetchedJoin.getExplicitAlias();
explicitFetch = true;
if ( fetchedJoin.isFetched() ) {
fetchTiming = FetchTiming.IMMEDIATE;
}
else {
fetchablePath = resolvedNavigablePath;
// there was not an explicit fetch in the SQM
alias = null;
joined = true;
alias = fetchedJoin.getExplicitAlias();
explicitFetch = true;
}
else {
fetchablePath = resolvedNavigablePath;
// there was not an explicit fetch in the SQM
alias = null;
if ( !( fetchable instanceof CollectionPart ) ) {
if ( entityGraphTraversalState != null ) {
traversalResult = entityGraphTraversalState.traverse( fetchParent, fetchable, isKeyFetchable );
fetchTiming = traversalResult.getFetchTiming();
joined = traversalResult.isJoined();
explicitFetch = true;
}
else if ( getLoadQueryInfluencers().hasEnabledFetchProfiles() ) {
// There is no point in checking the fetch profile if it can't affect this fetchable
if ( fetchTiming != FetchTiming.IMMEDIATE || fetchable.incrementFetchDepth() ) {
final String fetchableRole = fetchable.getNavigableRole().getFullPath();
if ( !( fetchable instanceof CollectionPart ) ) {
if ( entityGraphTraversalState != null ) {
traversalResult = entityGraphTraversalState.traverse(
fetchParent,
fetchable,
isKeyFetchable
);
fetchTiming = traversalResult.getFetchTiming();
joined = traversalResult.isJoined();
explicitFetch = true;
}
else if ( getLoadQueryInfluencers().hasEnabledFetchProfiles() ) {
// There is no point in checking the fetch profile if it can't affect this fetchable
if ( fetchTiming != FetchTiming.IMMEDIATE || fetchable.incrementFetchDepth() ) {
final String fetchableRole = fetchable.getNavigableRole().getFullPath();
for ( String enabledFetchProfileName : getLoadQueryInfluencers().getEnabledFetchProfileNames() ) {
final FetchProfile enabledFetchProfile = getCreationContext().getSessionFactory()
.getFetchProfile( enabledFetchProfileName );
final org.hibernate.engine.profile.Fetch profileFetch = enabledFetchProfile.getFetchByRole(
fetchableRole );
for ( String enabledFetchProfileName : getLoadQueryInfluencers()
.getEnabledFetchProfileNames() ) {
final FetchProfile enabledFetchProfile = getCreationContext()
.getSessionFactory()
.getFetchProfile( enabledFetchProfileName );
final org.hibernate.engine.profile.Fetch profileFetch = enabledFetchProfile.getFetchByRole(
fetchableRole );
if ( profileFetch != null ) {
fetchTiming = FetchTiming.IMMEDIATE;
joined = joined || profileFetch.getStyle() == org.hibernate.engine.profile.Fetch.Style.JOIN;
explicitFetch = true;
if ( profileFetch != null ) {
fetchTiming = FetchTiming.IMMEDIATE;
joined = joined || profileFetch.getStyle() == org.hibernate.engine.profile.Fetch.Style.JOIN;
explicitFetch = true;
if ( currentBagRole != null && fetchable instanceof PluralAttributeMapping ) {
final CollectionClassification collectionClassification = ( (PluralAttributeMapping) fetchable ).getMappedType()
.getCollectionSemantics()
.getCollectionClassification();
if ( collectionClassification == CollectionClassification.BAG ) {
// To avoid a MultipleBagFetchException due to fetch profiles in a circular model,
// we skip join fetching in case we encounter an existing bag role
joined = false;
}
}
}
}
}
}
}
final TableGroup existingJoinedGroup = fromClauseIndex.findTableGroup( fetchablePath );
if ( existingJoinedGroup != null ) {
// we can use this to trigger the fetch from the joined group.
final TableGroup existingJoinedGroup = fromClauseIndex.findTableGroup( fetchablePath );
if ( existingJoinedGroup != null ) {
// we can use this to trigger the fetch from the joined group.
// todo (6.0) : do we want to do this though?
// On the positive side it would allow EntityGraph to use the existing TableGroup. But that ties in
// to the discussion above regarding how to handle eager and EntityGraph (JOIN versus SELECT).
// Can be problematic if the existing one is restricted
//fetchTiming = FetchTiming.IMMEDIATE;
}
// todo (6.0) : do we want to do this though?
// On the positive side it would allow EntityGraph to use the existing TableGroup. But that ties in
// to the discussion above regarding how to handle eager and EntityGraph (JOIN versus SELECT).
// Can be problematic if the existing one is restricted
//fetchTiming = FetchTiming.IMMEDIATE;
}
// lastly, account for any app-defined max-fetch-depth
final Integer maxDepth = getCreationContext().getMaximumFetchDepth();
if ( maxDepth != null ) {
if ( fetchDepth >= maxDepth ) {
joined = false;
}
}
if ( joined && fetchable instanceof TableGroupJoinProducer ) {
TableGroupJoinProducer tableGroupJoinProducer = (TableGroupJoinProducer) fetchable;
fromClauseIndex.resolveTableGroup(
fetchablePath,
np -> {
// generate the join
final TableGroup lhs = fromClauseIndex.getTableGroup( fetchParent.getNavigablePath() );
final TableGroupJoin tableGroupJoin = ( (TableGroupJoinProducer) fetchable ).createTableGroupJoin(
fetchablePath,
lhs,
alias,
tableGroupJoinProducer.getDefaultSqlAstJoinType( lhs ),
true,
false,
this
);
lhs.addTableGroupJoin( tableGroupJoin );
return tableGroupJoin.getJoinedGroup();
}
);
// lastly, account for any app-defined max-fetch-depth
final Integer maxDepth = getCreationContext().getMaximumFetchDepth();
if ( maxDepth != null ) {
if ( fetchDepth >= maxDepth ) {
joined = false;
}
}
final boolean incrementFetchDepth = fetchable.incrementFetchDepth();
try {
if ( incrementFetchDepth ) {
fetchDepth++;
}
// There is no need to check for circular fetches if this is an explicit fetch
if ( !explicitFetch && !isResolvingCircularFetch() ) {
final Fetch biDirectionalFetch = fetchable.resolveCircularFetch(
fetchablePath,
fetchParent,
fetchTiming,
this
);
if ( biDirectionalFetch != null ) {
fetches.add( biDirectionalFetch );
return;
}
}
final Fetch fetch = buildFetch( fetchablePath, fetchParent, fetchable, fetchTiming, joined, alias );
if ( fetch != null ) {
if ( fetch.getTiming() == FetchTiming.IMMEDIATE && fetchable instanceof PluralAttributeMapping ) {
final PluralAttributeMapping pluralAttributeMapping = (PluralAttributeMapping) fetchable;
final CollectionClassification collectionClassification = pluralAttributeMapping.getMappedType()
.getCollectionSemantics()
.getCollectionClassification();
if ( collectionClassification == CollectionClassification.BAG ) {
bagRoles.add( fetchable.getNavigableRole().getNavigableName() );
if ( joined && fetchable instanceof TableGroupJoinProducer ) {
TableGroupJoinProducer tableGroupJoinProducer = (TableGroupJoinProducer) fetchable;
fromClauseIndex.resolveTableGroup(
fetchablePath,
np -> {
// generate the join
final TableGroup lhs = fromClauseIndex.getTableGroup( fetchParent.getNavigablePath() );
final TableGroupJoin tableGroupJoin = ( (TableGroupJoinProducer) fetchable ).createTableGroupJoin(
fetchablePath,
lhs,
alias,
tableGroupJoinProducer.getDefaultSqlAstJoinType( lhs ),
true,
false,
BaseSqmToSqlAstConverter.this
);
lhs.addTableGroupJoin( tableGroupJoin );
return tableGroupJoin.getJoinedGroup();
}
}
);
}
}
fetches.add( fetch );
final boolean incrementFetchDepth = fetchable.incrementFetchDepth();
try {
if ( incrementFetchDepth ) {
fetchDepth++;
}
// There is no need to check for circular fetches if this is an explicit fetch
if ( !explicitFetch && !isResolvingCircularFetch() ) {
final Fetch biDirectionalFetch = fetchable.resolveCircularFetch(
fetchablePath,
fetchParent,
fetchTiming,
this
);
if ( biDirectionalFetch != null ) {
fetches.add( biDirectionalFetch );
return;
}
}
finally {
if ( incrementFetchDepth ) {
fetchDepth--;
}
if ( entityGraphTraversalState != null && traversalResult != null ) {
entityGraphTraversalState.backtrack( traversalResult.getPreviousContext() );
final Fetch fetch = buildFetch(
fetchablePath,
fetchParent,
fetchable,
fetchTiming,
joined,
alias
);
if ( fetch != null ) {
if ( fetch.getTiming() == FetchTiming.IMMEDIATE && fetchable instanceof PluralAttributeMapping ) {
final PluralAttributeMapping pluralAttributeMapping = (PluralAttributeMapping) fetchable;
final CollectionClassification collectionClassification = pluralAttributeMapping.getMappedType()
.getCollectionSemantics()
.getCollectionClassification();
if ( collectionClassification == CollectionClassification.BAG ) {
if ( currentBagRole != null ) {
throw new MultipleBagFetchException(
Arrays.asList(
currentBagRole,
fetchable.getNavigableRole().getNavigableName()
)
);
}
currentBagRole = fetchable.getNavigableRole().getNavigableName();
}
}
fetches.add( fetch );
}
};
}
finally {
if ( incrementFetchDepth ) {
fetchDepth--;
}
if ( entityGraphTraversalState != null && traversalResult != null ) {
entityGraphTraversalState.backtrack( traversalResult.getPreviousContext() );
}
}
}
@Override
public List<Fetch> visitFetches(FetchParent fetchParent) {
final List<Fetch> fetches = CollectionHelper.arrayList( fetchParent.getReferencedMappingType().getNumberOfFetchables() );
// todo (6.0) : determine how to best handle TREAT
// fetchParent.getReferencedMappingContainer().visitKeyFetchables( fetchableBiConsumer, treatTargetType );
// fetchParent.getReferencedMappingContainer().visitFetchables( fetchableBiConsumer, treatTargetType );
fetchParent.getReferencedMappingContainer().visitKeyFetchables( fetchable -> fetchableBiConsumer.accept( fetchable, true ), null );
fetchParent.getReferencedMappingContainer().visitFetchables( fetchable -> fetchableBiConsumer.accept( fetchable, false ), null );
if ( bagRoles.size() > 1 ) {
throw new MultipleBagFetchException( bagRoles );
}
fetchParent.getReferencedMappingContainer().visitKeyFetchables( fetchable -> addFetch( fetches, fetchParent, fetchable, true ), null );
fetchParent.getReferencedMappingContainer().visitFetchables( fetchable -> addFetch( fetches, fetchParent, fetchable, false ), null );
return fetches;
}
@ -5400,16 +5411,9 @@ public abstract class BaseSqmToSqlAstConverter<T extends Statement> extends Base
}
}
if ( orderByFragmentConsumer != null ) {
if ( currentQuerySpec().isRoot() ) {
assert tableGroup.getModelPart() == pluralAttributeMapping;
if ( pluralAttributeMapping.getOrderByFragment() != null ) {
orderByFragmentConsumer.accept( pluralAttributeMapping.getOrderByFragment(), tableGroup );
}
if ( pluralAttributeMapping.getManyToManyOrderByFragment() != null ) {
orderByFragmentConsumer.accept( pluralAttributeMapping.getManyToManyOrderByFragment(), tableGroup );
}
applyOrdering( tableGroup, pluralAttributeMapping );
}
}
@ -5428,6 +5432,23 @@ public abstract class BaseSqmToSqlAstConverter<T extends Statement> extends Base
}
}
private void applyOrdering(TableGroup tableGroup, PluralAttributeMapping pluralAttributeMapping) {
if ( pluralAttributeMapping.getOrderByFragment() != null ) {
applyOrdering( tableGroup, pluralAttributeMapping.getOrderByFragment() );
}
if ( pluralAttributeMapping.getManyToManyOrderByFragment() != null ) {
applyOrdering( tableGroup, pluralAttributeMapping.getManyToManyOrderByFragment() );
}
}
private void applyOrdering(TableGroup tableGroup, OrderByFragment orderByFragment) {
if ( orderByFragments == null ) {
orderByFragments = new ArrayList<>();
}
orderByFragments.add( new AbstractMap.SimpleEntry<>( orderByFragment, tableGroup ) );
}
@Override
public boolean isResolvingCircularFetch() {
return resolvingCircularFetch;

View File

@ -7,7 +7,9 @@
package org.hibernate.query.sqm.sql.internal;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.function.Consumer;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.metamodel.mapping.BasicEntityIdentifierMapping;
@ -31,6 +33,7 @@ import org.hibernate.sql.ast.tree.expression.SqlTuple;
import org.hibernate.sql.ast.tree.expression.SqlTupleContainer;
import org.hibernate.sql.ast.tree.from.TableGroup;
import org.hibernate.sql.ast.tree.from.TableReference;
import org.hibernate.sql.ast.tree.update.Assignable;
import org.hibernate.sql.results.graph.DomainResultCreationState;
import static org.hibernate.sql.ast.spi.SqlExpressionResolver.createColumnReferenceKey;
@ -38,7 +41,8 @@ import static org.hibernate.sql.ast.spi.SqlExpressionResolver.createColumnRefere
/**
* @author Koen Aers
*/
public class EntityValuedPathInterpretation<T> extends AbstractSqmPathInterpretation<T> implements SqlTupleContainer {
public class EntityValuedPathInterpretation<T> extends AbstractSqmPathInterpretation<T> implements SqlTupleContainer,
Assignable {
public static <T> EntityValuedPathInterpretation<T> from(
SqmEntityValuedSimplePath<T> sqmPath,
@ -232,12 +236,34 @@ public class EntityValuedPathInterpretation<T> extends AbstractSqmPathInterpreta
sqlExpression.accept( sqlTreeWalker );
}
@Override
public List<ColumnReference> getColumnReferences() {
if ( sqlExpression instanceof SqlTuple ) {
//noinspection unchecked
return (List<ColumnReference>) ( (SqlTuple) sqlExpression ).getExpressions();
}
return Collections.singletonList( (ColumnReference) sqlExpression );
}
@Override
public void visitColumnReferences(Consumer<ColumnReference> columnReferenceConsumer) {
if ( sqlExpression instanceof SqlTuple ) {
for ( Expression e : ( (SqlTuple) sqlExpression ).getExpressions() ) {
columnReferenceConsumer.accept( (ColumnReference) e );
}
}
else {
columnReferenceConsumer.accept( (ColumnReference) sqlExpression );
}
}
@Override
public SqlTuple getSqlTuple() {
return sqlExpression instanceof SqlTuple
? (SqlTuple) sqlExpression
: null;
}
@Override
public void applySqlSelections(DomainResultCreationState creationState) {
creationState.getSqlAstCreationState().getSqlExpressionResolver().resolveSqlSelection(

View File

@ -53,7 +53,7 @@ public abstract class AbstractSqmAttributeJoin<O,T>
lhs,
SqmCreationHelper.buildSubNavigablePath( lhs, joinedNavigable.getName(), alias ),
joinedNavigable,
alias,
alias == SqmCreationHelper.IMPLICIT_ALIAS ? null : alias,
joinType,
fetched,
nodeBuilder

View File

@ -24,6 +24,7 @@ import jakarta.persistence.metamodel.PluralAttribute;
import jakarta.persistence.metamodel.SetAttribute;
import jakarta.persistence.metamodel.SingularAttribute;
import org.hibernate.metamodel.mapping.ModelPartContainer;
import org.hibernate.metamodel.model.domain.BagPersistentAttribute;
import org.hibernate.metamodel.model.domain.EntityDomainType;
import org.hibernate.metamodel.model.domain.ListPersistentAttribute;
@ -44,6 +45,7 @@ import org.hibernate.query.hql.spi.SqmCreationState;
import org.hibernate.query.sqm.spi.SqmCreationHelper;
import org.hibernate.query.sqm.tree.SqmJoinType;
import org.hibernate.query.sqm.tree.from.SqmAttributeJoin;
import org.hibernate.query.sqm.tree.from.SqmCrossJoin;
import org.hibernate.query.sqm.tree.from.SqmEntityJoin;
import org.hibernate.query.sqm.tree.from.SqmFrom;
import org.hibernate.query.sqm.tree.from.SqmJoin;
@ -129,18 +131,84 @@ public abstract class AbstractSqmFrom<O,T> extends AbstractSqmPath<T> implements
String name,
boolean isTerminal,
SqmCreationState creationState) {
final NavigablePath subNavPath = getNavigablePath().append( name );
return creationState.getProcessingStateStack().getCurrent().getPathRegistry().resolvePath(
subNavPath,
snp -> {
final SqmPathSource<?> subSource = getReferencedPathSource().findSubPathSource( name );
if ( subSource == null ) {
throw UnknownPathException.unknownSubPath( this, name );
// Try to resolve an existing attribute join without ON clause
SqmPath<?> resolvedPath = null;
ModelPartContainer modelPartContainer = null;
for ( SqmJoin<?, ?> sqmJoin : getSqmJoins() ) {
if ( sqmJoin instanceof SqmAttributeJoin<?, ?>
&& name.equals( sqmJoin.getReferencedPathSource().getPathName() ) ) {
final SqmAttributeJoin<?, ?> attributeJoin = (SqmAttributeJoin<?, ?>) sqmJoin;
if ( attributeJoin.getOn() == null ) {
// todo (6.0): to match the expectation of the JPA spec I think we also have to check
// that the join type is INNER or the default join type for the attribute,
// but as far as I understand, in 5.x we expect to ignore this behavior
// if ( attributeJoin.getSqmJoinType() != SqmJoinType.INNER ) {
// if ( attributeJoin.getAttribute().isCollection() ) {
// continue;
// }
// if ( modelPartContainer == null ) {
// modelPartContainer = findModelPartContainer( attributeJoin, creationState );
// }
// final TableGroupJoinProducer joinProducer = (TableGroupJoinProducer) modelPartContainer.findSubPart(
// name,
// null
// );
// if ( attributeJoin.getSqmJoinType().getCorrespondingSqlJoinType() != joinProducer.getDefaultSqlAstJoinType( null ) ) {
// continue;
// }
// }
resolvedPath = sqmJoin;
if ( attributeJoin.isFetched() ) {
break;
}
return subSource.createSqmPath( this, getReferencedPathSource().getIntermediatePathSource( subSource ) );
}
);
}
}
if ( resolvedPath != null ) {
return resolvedPath;
}
final SqmPath<?> sqmPath = get( name );
creationState.getProcessingStateStack().getCurrent().getPathRegistry().register( sqmPath );
return sqmPath;
}
private ModelPartContainer findModelPartContainer(SqmAttributeJoin<?, ?> attributeJoin, SqmCreationState creationState) {
final SqmFrom<?, ?> lhs = attributeJoin.getLhs();
if ( lhs instanceof SqmAttributeJoin<?, ?> ) {
final SqmAttributeJoin<?, ?> lhsAttributeJoin = (SqmAttributeJoin<?, ?>) lhs;
if ( lhsAttributeJoin.getReferencedPathSource() instanceof EntityDomainType<?> ) {
final String entityName = ( (EntityDomainType<?>) lhsAttributeJoin.getReferencedPathSource() ).getHibernateEntityName();
return (ModelPartContainer) creationState.getCreationContext().getQueryEngine()
.getTypeConfiguration()
.getSessionFactory()
.getMetamodel()
.entityPersister( entityName )
.findSubPart( attributeJoin.getAttribute().getName(), null );
}
else {
return (ModelPartContainer) findModelPartContainer( lhsAttributeJoin, creationState )
.findSubPart( attributeJoin.getAttribute().getName(), null );
}
}
else {
final String entityName;
if ( lhs instanceof SqmRoot<?> ) {
entityName = ( (SqmRoot<?>) lhs ).getEntityName();
}
else if ( lhs instanceof SqmEntityJoin<?> ) {
entityName = ( (SqmEntityJoin<?>) lhs ).getEntityName();
}
else {
assert lhs instanceof SqmCrossJoin<?>;
entityName = ( (SqmCrossJoin<?>) lhs ).getEntityName();
}
return (ModelPartContainer) creationState.getCreationContext().getQueryEngine()
.getTypeConfiguration()
.getSessionFactory()
.getMetamodel()
.entityPersister( entityName )
.findSubPart( attributeJoin.getAttribute().getName(), null );
}
}
@Override

Some files were not shown because too many files have changed in this diff Show More