HHH-18661 Add unnest() set-returning function and enable XML/JSON based array support on more databases

This commit is contained in:
Christian Beikov 2024-10-09 14:15:14 +02:00
parent 9bb5e64521
commit 25ddb64a4c
225 changed files with 9086 additions and 1030 deletions

View File

@ -434,6 +434,8 @@ use master
go
create login $SYBASE_USER with password $SYBASE_PASSWORD
go
exec sp_configure 'enable xml', 1
go
exec sp_dboption $SYBASE_DB, 'abort tran on log full', true
go
exec sp_dboption $SYBASE_DB, 'allow nulls by default', true

View File

@ -8,6 +8,7 @@
:array-example-dir-hql: {core-project-dir}/src/test/java/org/hibernate/orm/test/function/array
:json-example-dir-hql: {core-project-dir}/src/test/java/org/hibernate/orm/test/function/json
:xml-example-dir-hql: {core-project-dir}/src/test/java/org/hibernate/orm/test/function/xml
:srf-example-dir-hql: {core-project-dir}/src/test/java/org/hibernate/orm/test/function/srf
:extrasdir: extras
This chapter describes Hibernate Query Language (HQL) and Jakarta Persistence Query Language (JPQL).
@ -1197,32 +1198,33 @@ The following functions deal with SQL array types, which are not supported on ev
|===
| Function | Purpose
| `array()` | Creates an array based on the passed arguments
| `array_list()` | Like `array`, but returns the result as `List<?>`
| `array_agg()` | Aggregates row values into an array
| `array_position()` | Determines the position of an element in an array
| `array_positions()` | Determines all positions of an element in an array
| `array_positions_list()` | Like `array_positions`, but returns the result as `List<Integer>`
| `array_length()` | Determines the length of an array
| `array_concat()` | Concatenates array with each other in order
| `array_prepend()` | Prepends element to array
| `array_append()` | Appends element to array
| `array_contains()` | Whether an array contains an element
| `array_contains_nullable()` | Whether an array contains an element, supporting `null` element
| `array_includes()` | Whether an array contains another array
| `array_includes_nullable()` | Whether an array contains another array, supporting `null` elements
| `array_intersects()` | Whether an array holds at least one element of another array
| `array_intersects_nullable()` | Whether an array holds at least one element of another array, supporting `null` elements
| `array_get()` | Accesses the element of an array by index
| `array_set()` | Creates array copy with given element at given index
| `array_remove()` | Creates array copy with given element removed
| `array_remove_index()` | Creates array copy with the element at the given index removed
| `array_slice()` | Creates a sub-array of the based on lower and upper index
| `array_replace()` | Creates array copy replacing a given element with another
| `array_trim()` | Creates array copy trimming the last _N_ elements
| `array_fill()` | Creates array filled with the same element _N_ times
| `array_fill_list()` | Like `array_fill`, but returns the result as `List<?>`
| `array_to_string()` | String representation of array
| <<hql-array-constructor-functions,`array()`>> | Creates an array based on the passed arguments
| <<hql-array-constructor-functions,`array_list()`>> | Like `array`, but returns the result as `List<?>`
| <<hql-array-aggregate-functions,`array_agg()`>> | Aggregates row values into an array
| <<hql-array-position-functions,`array_position()`>> | Determines the position of an element in an array
| <<hql-array-positions-functions,`array_positions()`>> | Determines all positions of an element in an array
| <<hql-array-positions-functions,`array_positions_list()`>> | Like `array_positions`, but returns the result as `List<Integer>`
| <<hql-array-length-functions,`array_length()`>> | Determines the length of an array
| <<hql-array-concat-functions,`array_concat()`>> | Concatenates array with each other in order
| <<hql-array-prepend-functions,`array_prepend()`>> | Prepends element to array
| <<hql-array-append-functions,`array_append()`>> | Appends element to array
| <<hql-array-contains-functions,`array_contains()`>> | Whether an array contains an element
| <<hql-array-contains-functions,`array_contains_nullable()`>> | Whether an array contains an element, supporting `null` element
| <<hql-array-includes-example,`array_includes()`>> | Whether an array contains another array
| <<hql-array-includes-example,`array_includes_nullable()`>> | Whether an array contains another array, supporting `null` elements
| <<hql-array-intersects-functions,`array_intersects()`>> | Whether an array holds at least one element of another array
| <<hql-array-intersects-functions,`array_intersects_nullable()`>> | Whether an array holds at least one element of another array, supporting `null` elements
| <<hql-array-get-functions,`array_get()`>> | Accesses the element of an array by index
| <<hql-array-set-functions,`array_set()`>> | Creates array copy with given element at given index
| <<hql-array-remove-functions,`array_remove()`>> | Creates array copy with given element removed
| <<hql-array-remove-index-functions,`array_remove_index()`>> | Creates array copy with the element at the given index removed
| <<hql-array-slice-functions,`array_slice()`>> | Creates a sub-array of the based on lower and upper index
| <<hql-array-replace-functions,`array_replace()`>> | Creates array copy replacing a given element with another
| <<hql-array-trim-functions,`array_trim()`>> | Creates array copy trimming the last _N_ elements
| <<hql-array-fill-functions,`array_fill()`>> | Creates array filled with the same element _N_ times
| <<hql-array-fill-functions,`array_fill_list()`>> | Like `array_fill`, but returns the result as `List<?>`
| <<hql-array-to-string-functions,`array_to_string()`>> | String representation of array
| <<hql-array-unnest,`unnest()`>> | Turns an array into rows
|===
[[hql-array-constructor-functions]]
@ -1637,6 +1639,32 @@ include::{array-example-dir-hql}/ArrayToStringTest.java[tags=hql-array-to-string
----
====
[[hql-array-unnest]]
===== `unnest()`
A <<hql-from-set-returning-functions,set-returning function>>, which turns the single array argument into rows.
Returns no rows if the array argument is `null` or an empty array.
The `index()` function can be used to access the 1-based array index of an array element.
[[hql-array-unnest-struct-example]]
====
[source, java, indent=0]
----
include::{array-example-dir-hql}/ArrayUnnestStructTest.java[tags=hql-array-unnest-aggregate-with-ordinality-example]
----
====
The `lateral` keyword is mandatory if the argument refers to a from node item of the same query level.
Basic plural attributes can also be joined directly, which is syntax sugar for `lateral unnest(...)`.
[[hql-array-unnest-example]]
====
[source, java, indent=0]
----
include::{array-example-dir-hql}/ArrayUnnestTest.java[tags=hql-array-unnest-example]
----
====
[[hql-functions-json]]
==== Functions for dealing with JSON
@ -2916,6 +2944,48 @@ The CTE name can be used for a `from` clause root or a `join`, similar to entity
Refer to the <<hql-with-cte,with clause>> chapter for details about CTEs.
[[hql-from-set-returning-functions]]
==== Set-returning functions in `from` clause
A set-returning function is a function that produces rows instead of a single scalar value
and is exclusively used in the `from` clause, either as root node or join target.
The `index()` function can be used to access the 1-based index of a returned row.
The following set-returning functions are available on many platforms:
|===
| Function | purpose
| <<hql-array-unnest,`unnest()`>> | Turns an array into rows
//| `generate_series()` | Creates a series of values as rows
|===
To use set returning functions defined in the database, it is required to register them in a `FunctionContributor`:
[[hql-from-set-returning-functions-contributor-example]]
====
[source, java, indent=0]
----
include::{srf-example-dir-hql}/CustomSetReturningFunctionTest.java[tags=hql-set-returning-function-contributor-example]
----
====
After that, the function can be used in the `from` clause:
[[hql-from-set-returning-functions-custom-example]]
====
[source, java, indent=0]
----
include::{srf-example-dir-hql}/CustomSetReturningFunctionTest.java[tags=hql-set-returning-function-custom-example]
----
====
NOTE: The `index()` function represents the idea of the `with ordinality` SQL syntax,
which is not supported on some databases for user defined functions.
Hibernate ORM tries to emulate this feature by wrapping invocations as lateral subqueries and using `row_number()`,
which may lead to worse performance.
[[hql-join]]
=== Declaring joined entities
@ -3131,6 +3201,17 @@ Most databases support some flavor of `join lateral`, and Hibernate emulates the
But emulation is neither very efficient, nor does it support all possible query shapes, so it's important to test on your target database.
====
[[hql-join-set-returning-function]]
==== Set-returning functions in joins
A `join` clause may contain a set-returning function, either:
- an uncorrelated set-returning function, which is almost the same as a <<hql-from-set-returning-functions,set-returning function in the `from` clause>>, except that it may have an `on` restriction, or
- a _lateral join_, which is a correlated set-returning function, and may refer to other roots declared earlier in the same `from` clause.
The `lateral` keyword just distinguishes the two cases.
A lateral join may be an inner or left outer join, but not a right join, nor a full join.
[[hql-implicit-join]]
==== Implicit association joins (path expressions)

View File

@ -18,6 +18,7 @@ import java.util.TimeZone;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.hibernate.LockMode;
import org.hibernate.LockOptions;
import org.hibernate.PessimisticLockException;
@ -92,7 +93,6 @@ import static org.hibernate.type.SqlTypes.GEOMETRY;
import static org.hibernate.type.SqlTypes.INET;
import static org.hibernate.type.SqlTypes.INTEGER;
import static org.hibernate.type.SqlTypes.JSON;
import static org.hibernate.type.SqlTypes.JSON_ARRAY;
import static org.hibernate.type.SqlTypes.LONG32NVARCHAR;
import static org.hibernate.type.SqlTypes.LONG32VARBINARY;
import static org.hibernate.type.SqlTypes.LONG32VARCHAR;
@ -263,11 +263,9 @@ public class CockroachLegacyDialect extends Dialect {
if ( getVersion().isSameOrAfter( 20 ) ) {
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( INET, "inet", this ) );
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( JSON, "jsonb", this ) );
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( JSON_ARRAY, "jsonb", this ) );
}
else {
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( JSON, "json", this ) );
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( JSON_ARRAY, "json", this ) );
}
ddlTypeRegistry.addDescriptor( new NamedNativeEnumDdlTypeImpl( this ) );
ddlTypeRegistry.addDescriptor( new NamedNativeOrdinalEnumDdlTypeImpl( this ) );
@ -372,11 +370,11 @@ public class CockroachLegacyDialect extends Dialect {
if ( getVersion().isSameOrAfter( 20, 0 ) ) {
jdbcTypeRegistry.addDescriptorIfAbsent( PgJdbcHelper.getInetJdbcType( serviceRegistry ) );
jdbcTypeRegistry.addDescriptorIfAbsent( PgJdbcHelper.getJsonbJdbcType( serviceRegistry ) );
jdbcTypeRegistry.addDescriptorIfAbsent( PgJdbcHelper.getJsonbArrayJdbcType( serviceRegistry ) );
jdbcTypeRegistry.addTypeConstructorIfAbsent( PgJdbcHelper.getJsonbArrayJdbcType( serviceRegistry ) );
}
else {
jdbcTypeRegistry.addDescriptorIfAbsent( PgJdbcHelper.getJsonJdbcType( serviceRegistry ) );
jdbcTypeRegistry.addDescriptorIfAbsent( PgJdbcHelper.getJsonArrayJdbcType( serviceRegistry ) );
jdbcTypeRegistry.addTypeConstructorIfAbsent( PgJdbcHelper.getJsonArrayJdbcType( serviceRegistry ) );
}
}
else {
@ -384,11 +382,11 @@ public class CockroachLegacyDialect extends Dialect {
if ( getVersion().isSameOrAfter( 20, 0 ) ) {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingInetJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonJdbcType.JSONB_INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonArrayJdbcType.JSONB_INSTANCE );
jdbcTypeRegistry.addTypeConstructorIfAbsent( PostgreSQLCastingJsonArrayJdbcTypeConstructor.JSONB_INSTANCE );
}
else {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonJdbcType.JSON_INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonArrayJdbcType.JSON_INSTANCE );
jdbcTypeRegistry.addTypeConstructorIfAbsent( PostgreSQLCastingJsonArrayJdbcTypeConstructor.JSON_INSTANCE );
}
}
}
@ -398,11 +396,11 @@ public class CockroachLegacyDialect extends Dialect {
if ( getVersion().isSameOrAfter( 20, 0 ) ) {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingInetJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonJdbcType.JSONB_INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonArrayJdbcType.JSONB_INSTANCE );
jdbcTypeRegistry.addTypeConstructorIfAbsent( PostgreSQLCastingJsonArrayJdbcTypeConstructor.JSONB_INSTANCE );
}
else {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonJdbcType.JSON_INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonArrayJdbcType.JSON_INSTANCE );
jdbcTypeRegistry.addTypeConstructorIfAbsent( PostgreSQLCastingJsonArrayJdbcTypeConstructor.JSON_INSTANCE );
}
}
@ -424,6 +422,7 @@ public class CockroachLegacyDialect extends Dialect {
)
);
// Replace the standard array constructor
jdbcTypeRegistry.addTypeConstructor( PostgreSQLArrayJdbcTypeConstructor.INSTANCE );
}
@ -518,6 +517,8 @@ public class CockroachLegacyDialect extends Dialect {
functionFactory.jsonArrayAppend_postgresql( false );
functionFactory.jsonArrayInsert_postgresql();
functionFactory.unnest_postgresql();
// Postgres uses # instead of ^ for XOR
functionContributions.getFunctionRegistry().patternDescriptorBuilder( "bitxor", "(?1#?2)" )
.setExactArgumentCount( 2 )
@ -534,6 +535,11 @@ public class CockroachLegacyDialect extends Dialect {
functionContributions.getFunctionRegistry().registerAlternateKey( "truncate", "trunc" );
}
@Override
public @Nullable String getDefaultOrdinalityColumnName() {
return "ordinality";
}
@Override
public TimeZoneSupport getTimeZoneSupport() {
return TimeZoneSupport.NORMALIZE;

View File

@ -88,6 +88,7 @@ import org.hibernate.tool.schema.extract.internal.SequenceInformationExtractorDB
import org.hibernate.tool.schema.extract.internal.SequenceInformationExtractorNoOpImpl;
import org.hibernate.tool.schema.extract.spi.SequenceInformationExtractor;
import org.hibernate.type.JavaObjectType;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.StandardBasicTypes;
import org.hibernate.type.descriptor.ValueExtractor;
import org.hibernate.type.descriptor.java.JavaType;
@ -455,6 +456,14 @@ public class DB2LegacyDialect extends Dialect {
functionFactory.xmlexists_db2_legacy();
}
functionFactory.xmlagg();
functionFactory.unnest_emulated();
}
@Override
public int getPreferredSqlTypeCodeForArray() {
// Even if DB2 11 supports JSON functions, it's not possible to unnest a JSON array to rows, so stick to XML
return SqlTypes.XML_ARRAY;
}
@Override

View File

@ -10,8 +10,11 @@ import java.util.function.Consumer;
import org.hibernate.LockMode;
import org.hibernate.dialect.DatabaseVersion;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.metamodel.mapping.CollectionPart;
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
import org.hibernate.metamodel.mapping.ModelPart;
import org.hibernate.query.IllegalQueryOperationException;
import org.hibernate.query.derived.AnonymousTupleTableGroupProducer;
import org.hibernate.query.sqm.ComparisonOperator;
import org.hibernate.query.sqm.FetchClauseType;
import org.hibernate.sql.ast.Clause;
@ -19,6 +22,7 @@ import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
import org.hibernate.sql.ast.spi.AbstractSqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlSelection;
import org.hibernate.sql.ast.tree.MutationStatement;
import org.hibernate.sql.ast.tree.SqlAstNode;
import org.hibernate.sql.ast.tree.Statement;
import org.hibernate.sql.ast.tree.delete.DeleteStatement;
import org.hibernate.sql.ast.tree.expression.CaseSearchedExpression;
@ -28,6 +32,8 @@ import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.expression.Literal;
import org.hibernate.sql.ast.tree.expression.SqlTuple;
import org.hibernate.sql.ast.tree.expression.Summarization;
import org.hibernate.sql.ast.tree.from.DerivedTableReference;
import org.hibernate.sql.ast.tree.from.FunctionTableReference;
import org.hibernate.sql.ast.tree.from.NamedTableReference;
import org.hibernate.sql.ast.tree.from.QueryPartTableReference;
import org.hibernate.sql.ast.tree.from.TableGroup;
@ -253,6 +259,34 @@ public class DB2LegacySqlAstTranslator<T extends JdbcOperation> extends Abstract
inLateral = oldLateral;
}
@Override
protected void renderDerivedTableReference(DerivedTableReference tableReference) {
if ( tableReference instanceof FunctionTableReference && tableReference.isLateral() ) {
// No need for a lateral keyword for functions
tableReference.accept( this );
}
else {
super.renderDerivedTableReference( tableReference );
}
}
@Override
public void renderNamedSetReturningFunction(String functionName, List<? extends SqlAstNode> sqlAstArguments, AnonymousTupleTableGroupProducer tupleType, String tableIdentifierVariable, SqlAstNodeRenderingMode argumentRenderingMode) {
final ModelPart ordinalitySubPart = tupleType.findSubPart( CollectionPart.Nature.INDEX.getName(), null );
if ( ordinalitySubPart != null ) {
appendSql( "lateral (select t.*, row_number() over() " );
appendSql( ordinalitySubPart.asBasicValuedModelPart().getSelectionExpression() );
appendSql( " from table(" );
renderSimpleNamedFunction( functionName, sqlAstArguments, argumentRenderingMode );
append( ") t)" );
}
else {
appendSql( "table(" );
super.renderNamedSetReturningFunction( functionName, sqlAstArguments, tupleType, tableIdentifierVariable, argumentRenderingMode );
append( ')' );
}
}
@Override
public void visitSelectStatement(SelectStatement statement) {
if ( getQueryPartForRowNumbering() == statement.getQueryPart() && inLateral ) {

View File

@ -4,17 +4,13 @@
*/
package org.hibernate.community.dialect;
import org.hibernate.LockMode;
import org.hibernate.dialect.DatabaseVersion;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.query.sqm.ComparisonOperator;
import org.hibernate.sql.ast.tree.Statement;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.expression.Literal;
import org.hibernate.sql.ast.tree.from.FunctionTableReference;
import org.hibernate.sql.ast.tree.from.NamedTableReference;
import org.hibernate.sql.ast.tree.from.TableGroup;
import org.hibernate.sql.ast.tree.from.TableReference;
import org.hibernate.sql.ast.tree.from.QueryPartTableReference;
import org.hibernate.sql.ast.tree.select.QueryPart;
import org.hibernate.sql.exec.spi.JdbcOperation;
@ -58,28 +54,10 @@ public class DB2zLegacySqlAstTranslator<T extends JdbcOperation> extends DB2Lega
}
@Override
protected boolean renderPrimaryTableReference(TableGroup tableGroup, LockMode lockMode) {
if ( shouldInlineCte( tableGroup ) ) {
inlineCteTableGroup( tableGroup, lockMode );
return false;
}
final TableReference tableReference = tableGroup.getPrimaryTableReference();
if ( tableReference instanceof NamedTableReference ) {
return renderNamedTableReference( (NamedTableReference) tableReference, lockMode );
}
public void visitQueryPartTableReference(QueryPartTableReference tableReference) {
// DB2 z/OS we need the "table" qualifier for table valued functions or lateral sub-queries
append( "table " );
tableReference.accept( this );
return false;
}
@Override
public void visitFunctionTableReference(FunctionTableReference tableReference) {
// For the table qualifier we need parenthesis on DB2 z/OS
append( OPEN_PARENTHESIS );
tableReference.getFunctionExpression().accept( this );
append( CLOSE_PARENTHESIS );
renderDerivedTableReference( tableReference );
super.visitQueryPartTableReference( tableReference );
}
@Override

View File

@ -14,6 +14,7 @@ import java.util.Date;
import java.util.List;
import java.util.TimeZone;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.hibernate.PessimisticLockException;
import org.hibernate.QueryTimeoutException;
import org.hibernate.boot.model.FunctionContributions;
@ -97,7 +98,6 @@ import static org.hibernate.type.SqlTypes.FLOAT;
import static org.hibernate.type.SqlTypes.GEOMETRY;
import static org.hibernate.type.SqlTypes.INTERVAL_SECOND;
import static org.hibernate.type.SqlTypes.JSON;
import static org.hibernate.type.SqlTypes.JSON_ARRAY;
import static org.hibernate.type.SqlTypes.LONG32NVARCHAR;
import static org.hibernate.type.SqlTypes.LONG32VARBINARY;
import static org.hibernate.type.SqlTypes.LONG32VARCHAR;
@ -265,7 +265,6 @@ public class H2LegacyDialect extends Dialect {
}
if ( getVersion().isSameOrAfter( 1, 4, 200 ) ) {
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( JSON, "json", this ) );
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( JSON_ARRAY, "json", this ) );
}
}
ddlTypeRegistry.addDescriptor( new NativeEnumDdlTypeImpl( this ) );
@ -296,7 +295,8 @@ public class H2LegacyDialect extends Dialect {
}
if ( getVersion().isSameOrAfter( 1, 4, 200 ) ) {
jdbcTypeRegistry.addDescriptorIfAbsent( H2JsonJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( H2JsonArrayJdbcType.INSTANCE );
// Replace the standard array constructor
jdbcTypeRegistry.addTypeConstructor( H2JsonArrayJdbcTypeConstructor.INSTANCE );
}
jdbcTypeRegistry.addDescriptor( EnumJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptor( OrdinalEnumJdbcType.INSTANCE );
@ -427,6 +427,8 @@ public class H2LegacyDialect extends Dialect {
else {
functionFactory.listagg_groupConcat();
}
functionFactory.unnest_h2( getMaximumArraySize() );
}
/**
@ -439,6 +441,11 @@ public class H2LegacyDialect extends Dialect {
return 1000;
}
@Override
public @Nullable String getDefaultOrdinalityColumnName() {
return "nord";
}
@Override
public void augmentPhysicalTableTypes(List<String> tableTypesList) {
if ( getVersion().isSameOrAfter( 2 ) ) {

View File

@ -488,18 +488,28 @@ public class HANALegacyDialect extends Dialect {
typeConfiguration
);
if ( getVersion().isSameOrAfter(2, 0, 20) ) {
// Introduced in 2.0 SPS 02
if ( getVersion().isSameOrAfter( 2, 0 ) ) {
// Introduced in 2.0 SPS 00
functionFactory.jsonValue_no_passing();
functionFactory.jsonQuery_no_passing();
functionFactory.jsonExists_hana();
if ( getVersion().isSameOrAfter(2, 0, 40) ) {
// Introduced in 2.0 SPS 04
functionFactory.jsonObject_hana();
functionFactory.jsonArray_hana();
functionFactory.jsonArrayAgg_hana();
functionFactory.jsonObjectAgg_hana();
functionFactory.unnest_hana();
// functionFactory.json_table();
if ( getVersion().isSameOrAfter(2, 0, 20 ) ) {
if ( getVersion().isSameOrAfter( 2, 0, 40 ) ) {
// Introduced in 2.0 SPS 04
functionFactory.jsonObject_hana();
functionFactory.jsonArray_hana();
functionFactory.jsonArrayAgg_hana();
functionFactory.jsonObjectAgg_hana();
}
// functionFactory.xmltable();
}
// functionFactory.xmlextract();
}
}

View File

@ -9,17 +9,22 @@ import java.util.List;
import org.hibernate.MappingException;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.internal.util.collections.Stack;
import org.hibernate.metamodel.mapping.CollectionPart;
import org.hibernate.metamodel.mapping.ModelPart;
import org.hibernate.query.IllegalQueryOperationException;
import org.hibernate.query.derived.AnonymousTupleTableGroupProducer;
import org.hibernate.query.sqm.ComparisonOperator;
import org.hibernate.sql.ast.Clause;
import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
import org.hibernate.sql.ast.spi.AbstractSqlAstTranslator;
import org.hibernate.sql.ast.tree.SqlAstNode;
import org.hibernate.sql.ast.tree.Statement;
import org.hibernate.sql.ast.tree.cte.CteStatement;
import org.hibernate.sql.ast.tree.expression.BinaryArithmeticExpression;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.expression.Literal;
import org.hibernate.sql.ast.tree.expression.Summarization;
import org.hibernate.sql.ast.tree.from.DerivedTableReference;
import org.hibernate.sql.ast.tree.from.FunctionTableReference;
import org.hibernate.sql.ast.tree.from.NamedTableReference;
import org.hibernate.sql.ast.tree.from.QueryPartTableReference;
@ -34,6 +39,8 @@ import org.hibernate.sql.ast.tree.update.UpdateStatement;
import org.hibernate.sql.exec.spi.JdbcOperation;
import org.hibernate.sql.model.internal.TableInsertStandard;
import static org.hibernate.dialect.SybaseASESqlAstTranslator.isLob;
/**
* An SQL AST translator for the Legacy HANA dialect.
*/
@ -192,15 +199,35 @@ public class HANALegacySqlAstTranslator<T extends JdbcOperation> extends Abstrac
}
@Override
protected SqlAstNodeRenderingMode getParameterRenderingMode() {
// HANA does not support parameters in lateral subqueries for some reason, so inline all the parameters in this case
return inLateral ? SqlAstNodeRenderingMode.INLINE_ALL_PARAMETERS : super.getParameterRenderingMode();
protected void renderDerivedTableReference(DerivedTableReference tableReference) {
if ( tableReference instanceof FunctionTableReference && tableReference.isLateral() ) {
// No need for a lateral keyword for functions
tableReference.accept( this );
}
else {
super.renderDerivedTableReference( tableReference );
}
}
@Override
public void visitFunctionTableReference(FunctionTableReference tableReference) {
tableReference.getFunctionExpression().accept( this );
renderTableReferenceIdentificationVariable( tableReference );
public void renderNamedSetReturningFunction(String functionName, List<? extends SqlAstNode> sqlAstArguments, AnonymousTupleTableGroupProducer tupleType, String tableIdentifierVariable, SqlAstNodeRenderingMode argumentRenderingMode) {
final ModelPart ordinalitySubPart = tupleType.findSubPart( CollectionPart.Nature.INDEX.getName(), null );
if ( ordinalitySubPart != null ) {
appendSql( "(select t.*, row_number() over() " );
appendSql( ordinalitySubPart.asBasicValuedModelPart().getSelectionExpression() );
appendSql( " from " );
renderSimpleNamedFunction( functionName, sqlAstArguments, argumentRenderingMode );
append( " t)" );
}
else {
super.renderNamedSetReturningFunction( functionName, sqlAstArguments, tupleType, tableIdentifierVariable, argumentRenderingMode );
}
}
@Override
protected SqlAstNodeRenderingMode getParameterRenderingMode() {
// HANA does not support parameters in lateral subqueries for some reason, so inline all the parameters in this case
return inLateral ? SqlAstNodeRenderingMode.INLINE_ALL_PARAMETERS : super.getParameterRenderingMode();
}
@Override
@ -212,7 +239,38 @@ public class HANALegacySqlAstTranslator<T extends JdbcOperation> extends Abstrac
@Override
protected void renderComparison(Expression lhs, ComparisonOperator operator, Expression rhs) {
// In SAP HANA, LOBs are not "comparable", so we have to use a like predicate for comparison
final boolean isLob = isLob( lhs.getExpressionType() );
if ( operator == ComparisonOperator.DISTINCT_FROM || operator == ComparisonOperator.NOT_DISTINCT_FROM ) {
if ( isLob ) {
switch ( operator ) {
case DISTINCT_FROM:
appendSql( "case when " );
lhs.accept( this );
appendSql( " like " );
rhs.accept( this );
appendSql( " or " );
lhs.accept( this );
appendSql( " is null and " );
rhs.accept( this );
appendSql( " is null then 0 else 1 end=1" );
return;
case NOT_DISTINCT_FROM:
appendSql( "case when " );
lhs.accept( this );
appendSql( " like " );
rhs.accept( this );
appendSql( " or " );
lhs.accept( this );
appendSql( " is null and " );
rhs.accept( this );
appendSql( " is null then 0 else 1 end=0" );
return;
default:
// Fall through
break;
}
}
// HANA does not support plain parameters in the select clause of the intersect emulation
withParameterRenderingMode(
SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER,
@ -220,7 +278,24 @@ public class HANALegacySqlAstTranslator<T extends JdbcOperation> extends Abstrac
);
}
else {
renderComparisonEmulateIntersect( lhs, operator, rhs );
if ( isLob ) {
switch ( operator ) {
case EQUAL:
lhs.accept( this );
appendSql( " like " );
rhs.accept( this );
return;
case NOT_EQUAL:
lhs.accept( this );
appendSql( " not like " );
rhs.accept( this );
return;
default:
// Fall through
break;
}
}
renderComparisonStandard( lhs, operator, rhs );
}
}

View File

@ -9,6 +9,7 @@ import java.sql.DatabaseMetaData;
import java.sql.SQLException;
import java.sql.Types;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.hibernate.JDBCException;
import org.hibernate.LockMode;
import org.hibernate.StaleObjectStateException;
@ -277,6 +278,8 @@ public class HSQLLegacyDialect extends Dialect {
functionFactory.jsonObjectAgg_h2();
}
functionFactory.unnest( "c1", "c2" );
//trim() requires parameters to be cast when used as trim character
functionContributions.getFunctionRegistry().register( "trim", new TrimFunction(
this,
@ -285,6 +288,11 @@ public class HSQLLegacyDialect extends Dialect {
) );
}
@Override
public @Nullable String getDefaultOrdinalityColumnName() {
return "c2";
}
@Override
public String currentTime() {
return "localtime";

View File

@ -22,6 +22,8 @@ import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.expression.Literal;
import org.hibernate.sql.ast.tree.expression.SqlTuple;
import org.hibernate.sql.ast.tree.expression.Summarization;
import org.hibernate.sql.ast.tree.from.DerivedTableReference;
import org.hibernate.sql.ast.tree.from.FunctionTableReference;
import org.hibernate.sql.ast.tree.from.NamedTableReference;
import org.hibernate.sql.ast.tree.insert.ConflictClause;
import org.hibernate.sql.ast.tree.insert.InsertSelectStatement;
@ -72,6 +74,17 @@ public class HSQLLegacySqlAstTranslator<T extends JdbcOperation> extends Abstrac
}
}
@Override
protected void renderDerivedTableReference(DerivedTableReference tableReference) {
if ( tableReference instanceof FunctionTableReference && tableReference.isLateral() ) {
// No need for a lateral keyword for functions
tableReference.accept( this );
}
else {
super.renderDerivedTableReference( tableReference );
}
}
@Override
protected void visitConflictClause(ConflictClause conflictClause) {
if ( conflictClause != null ) {

View File

@ -29,7 +29,7 @@ import org.hibernate.tool.schema.extract.spi.SequenceInformationExtractor;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.StandardBasicTypes;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.JsonArrayJdbcType;
import org.hibernate.type.descriptor.jdbc.JsonArrayJdbcTypeConstructor;
import org.hibernate.type.descriptor.jdbc.JsonJdbcType;
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
import org.hibernate.type.descriptor.sql.internal.DdlTypeImpl;
@ -96,7 +96,12 @@ public class MariaDBLegacyDialect extends MySQLLegacyDialect {
commonFunctionFactory.jsonArrayAgg_mariadb();
commonFunctionFactory.jsonObjectAgg_mariadb();
commonFunctionFactory.jsonArrayAppend_mariadb();
if ( getVersion().isSameOrAfter( 10, 3, 3 ) ) {
if ( getVersion().isSameOrAfter( 10, 6 ) ) {
commonFunctionFactory.unnest_emulated();
}
commonFunctionFactory.inverseDistributionOrderedSetAggregates_windowEmulation();
functionContributions.getFunctionRegistry().patternDescriptorBuilder( "median", "median(?1) over ()" )
.setInvariantType( functionContributions.getTypeConfiguration().getBasicTypeRegistry().resolve( StandardBasicTypes.DOUBLE ) )
@ -145,7 +150,7 @@ public class MariaDBLegacyDialect extends MySQLLegacyDialect {
final JdbcTypeRegistry jdbcTypeRegistry = typeContributions.getTypeConfiguration().getJdbcTypeRegistry();
// Make sure we register the JSON type descriptor before calling super, because MariaDB does not need casting
jdbcTypeRegistry.addDescriptorIfAbsent( SqlTypes.JSON, JsonJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( SqlTypes.JSON_ARRAY, JsonArrayJdbcType.INSTANCE );
jdbcTypeRegistry.addTypeConstructorIfAbsent( JsonArrayJdbcTypeConstructor.INSTANCE );
super.contributeTypes( typeContributions, serviceRegistry );
if ( getVersion().isSameOrAfter( 10, 7 ) ) {

View File

@ -22,6 +22,7 @@ import org.hibernate.sql.ast.tree.expression.ColumnReference;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.expression.Literal;
import org.hibernate.sql.ast.tree.expression.Summarization;
import org.hibernate.sql.ast.tree.from.DerivedTableReference;
import org.hibernate.sql.ast.tree.from.NamedTableReference;
import org.hibernate.sql.ast.tree.from.QueryPartTableReference;
import org.hibernate.sql.ast.tree.insert.ConflictClause;
@ -277,6 +278,11 @@ public class MariaDBLegacySqlAstTranslator<T extends JdbcOperation> extends Abst
emulateQueryPartTableReferenceColumnAliasing( tableReference );
}
@Override
protected void renderDerivedTableReferenceIdentificationVariable(DerivedTableReference tableReference) {
renderTableReferenceIdentificationVariable( tableReference );
}
@Override
public void visitOffsetFetchClause(QueryPart queryPart) {
if ( !isRowNumberingCurrentQueryPart() ) {

View File

@ -537,6 +537,11 @@ public class MySQLLegacyDialect extends Dialect {
return Types.BIT;
}
@Override
public int getPreferredSqlTypeCodeForArray() {
return getMySQLVersion().isSameOrAfter( 5, 7 ) ? SqlTypes.JSON_ARRAY : super.getPreferredSqlTypeCodeForArray();
}
// @Override
// public int getDefaultDecimalPrecision() {
// //this is the maximum, but I guess it's too high
@ -667,6 +672,10 @@ public class MySQLLegacyDialect extends Dialect {
functionFactory.jsonMergepatch_mysql();
functionFactory.jsonArrayAppend_mysql();
functionFactory.jsonArrayInsert_mysql();
if ( getMySQLVersion().isSameOrAfter( 8 ) ) {
functionFactory.unnest_emulated();
}
}
}
@ -678,7 +687,7 @@ public class MySQLLegacyDialect extends Dialect {
if ( getMySQLVersion().isSameOrAfter( 5, 7 ) ) {
jdbcTypeRegistry.addDescriptorIfAbsent( SqlTypes.JSON, MySQLCastingJsonJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( SqlTypes.JSON_ARRAY, MySQLCastingJsonArrayJdbcType.INSTANCE );
jdbcTypeRegistry.addTypeConstructorIfAbsent( MySQLCastingJsonArrayJdbcTypeConstructor.INSTANCE );
}
// MySQL requires a custom binder for binding untyped nulls with the NULL type

View File

@ -23,6 +23,8 @@ import org.hibernate.sql.ast.tree.expression.ColumnReference;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.expression.Literal;
import org.hibernate.sql.ast.tree.expression.Summarization;
import org.hibernate.sql.ast.tree.from.DerivedTableReference;
import org.hibernate.sql.ast.tree.from.FunctionTableReference;
import org.hibernate.sql.ast.tree.from.NamedTableReference;
import org.hibernate.sql.ast.tree.from.QueryPartTableReference;
import org.hibernate.sql.ast.tree.from.ValuesTableReference;
@ -269,6 +271,27 @@ public class MySQLLegacySqlAstTranslator<T extends JdbcOperation> extends Abstra
}
}
@Override
protected void renderDerivedTableReference(DerivedTableReference tableReference) {
if ( tableReference instanceof FunctionTableReference && tableReference.isLateral() ) {
// No need for a lateral keyword for functions
tableReference.accept( this );
}
else {
super.renderDerivedTableReference( tableReference );
}
}
@Override
protected void renderDerivedTableReferenceIdentificationVariable(DerivedTableReference tableReference) {
if ( getDialect().getVersion().isSameOrAfter( 8 ) ) {
super.renderDerivedTableReferenceIdentificationVariable( tableReference );
}
else {
renderTableReferenceIdentificationVariable( tableReference );
}
}
@Override
public void visitOffsetFetchClause(QueryPart queryPart) {
if ( !isRowNumberingCurrentQueryPart() ) {

View File

@ -91,7 +91,6 @@ import org.hibernate.type.descriptor.jdbc.BlobJdbcType;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.NullJdbcType;
import org.hibernate.type.descriptor.jdbc.ObjectNullAsNullTypeJdbcType;
import org.hibernate.type.descriptor.jdbc.OracleJsonArrayBlobJdbcType;
import org.hibernate.type.descriptor.jdbc.OracleJsonBlobJdbcType;
import org.hibernate.type.descriptor.jdbc.SqlTypedJdbcType;
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
@ -123,7 +122,6 @@ import static org.hibernate.type.SqlTypes.FLOAT;
import static org.hibernate.type.SqlTypes.GEOMETRY;
import static org.hibernate.type.SqlTypes.INTEGER;
import static org.hibernate.type.SqlTypes.JSON;
import static org.hibernate.type.SqlTypes.JSON_ARRAY;
import static org.hibernate.type.SqlTypes.NUMERIC;
import static org.hibernate.type.SqlTypes.NVARCHAR;
import static org.hibernate.type.SqlTypes.REAL;
@ -335,6 +333,8 @@ public class OracleLegacyDialect extends Dialect {
functionFactory.xmlquery_oracle();
functionFactory.xmlexists();
functionFactory.xmlagg();
functionFactory.unnest_oracle();
}
@Override
@ -745,11 +745,9 @@ public class OracleLegacyDialect extends Dialect {
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( GEOMETRY, "MDSYS.SDO_GEOMETRY", this ) );
if ( getVersion().isSameOrAfter( 21 ) ) {
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( JSON, "json", this ) );
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( JSON_ARRAY, "json", this ) );
}
else if ( getVersion().isSameOrAfter( 12 ) ) {
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( JSON, "blob", this ) );
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( JSON_ARRAY, "blob", this ) );
}
}
@ -919,11 +917,11 @@ public class OracleLegacyDialect extends Dialect {
if ( getVersion().isSameOrAfter( 21 ) ) {
typeContributions.contributeJdbcType( OracleJsonJdbcType.INSTANCE );
typeContributions.contributeJdbcType( OracleJsonArrayJdbcType.INSTANCE );
typeContributions.contributeJdbcTypeConstructor( OracleJsonArrayJdbcTypeConstructor.NATIVE_INSTANCE );
}
else {
typeContributions.contributeJdbcType( OracleJsonBlobJdbcType.INSTANCE );
typeContributions.contributeJdbcType( OracleJsonArrayBlobJdbcType.INSTANCE );
typeContributions.contributeJdbcTypeConstructor( OracleJsonArrayJdbcTypeConstructor.BLOB_INSTANCE );
}
}

View File

@ -10,18 +10,23 @@ import java.util.List;
import org.hibernate.dialect.OracleArrayJdbcType;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.internal.util.collections.Stack;
import org.hibernate.metamodel.mapping.CollectionPart;
import org.hibernate.metamodel.mapping.EmbeddableValuedModelPart;
import org.hibernate.metamodel.mapping.EntityIdentifierMapping;
import org.hibernate.metamodel.mapping.EntityMappingType;
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
import org.hibernate.metamodel.mapping.ModelPart;
import org.hibernate.query.IllegalQueryOperationException;
import org.hibernate.query.derived.AnonymousTupleTableGroupProducer;
import org.hibernate.query.sqm.ComparisonOperator;
import org.hibernate.query.sqm.FetchClauseType;
import org.hibernate.query.sqm.FrameExclusion;
import org.hibernate.query.sqm.FrameKind;
import org.hibernate.sql.ast.Clause;
import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
import org.hibernate.sql.ast.spi.AbstractSqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlSelection;
import org.hibernate.sql.ast.tree.SqlAstNode;
import org.hibernate.sql.ast.tree.Statement;
import org.hibernate.sql.ast.tree.cte.CteMaterialization;
import org.hibernate.sql.ast.tree.expression.CaseSearchedExpression;
@ -33,6 +38,7 @@ import org.hibernate.sql.ast.tree.expression.Over;
import org.hibernate.sql.ast.tree.expression.SqlTuple;
import org.hibernate.sql.ast.tree.expression.SqlTupleContainer;
import org.hibernate.sql.ast.tree.expression.Summarization;
import org.hibernate.sql.ast.tree.from.DerivedTableReference;
import org.hibernate.sql.ast.tree.from.FromClause;
import org.hibernate.sql.ast.tree.from.FunctionTableReference;
import org.hibernate.sql.ast.tree.from.NamedTableReference;
@ -298,9 +304,42 @@ public class OracleLegacySqlAstTranslator<T extends JdbcOperation> extends Abstr
@Override
public void visitFunctionTableReference(FunctionTableReference tableReference) {
append( "table(" );
tableReference.getFunctionExpression().accept( this );
append( CLOSE_PARENTHESIS );
if ( !tableReference.rendersIdentifierVariable() ) {
renderDerivedTableReferenceIdentificationVariable( tableReference );
}
}
@Override
public void renderNamedSetReturningFunction(String functionName, List<? extends SqlAstNode> sqlAstArguments, AnonymousTupleTableGroupProducer tupleType, String tableIdentifierVariable, SqlAstNodeRenderingMode argumentRenderingMode) {
final ModelPart ordinalitySubPart = tupleType.findSubPart( CollectionPart.Nature.INDEX.getName(), null );
if ( ordinalitySubPart != null ) {
appendSql( "lateral (select t.*, rownum " );
appendSql( ordinalitySubPart.asBasicValuedModelPart().getSelectionExpression() );
appendSql( " from table(" );
renderSimpleNamedFunction( functionName, sqlAstArguments, argumentRenderingMode );
append( ") t)" );
}
else {
appendSql( "table(" );
super.renderNamedSetReturningFunction( functionName, sqlAstArguments, tupleType, tableIdentifierVariable, argumentRenderingMode );
append( ')' );
}
}
@Override
protected void renderDerivedTableReference(DerivedTableReference tableReference) {
if ( tableReference instanceof FunctionTableReference && tableReference.isLateral() ) {
// No need for a lateral keyword for functions
tableReference.accept( this );
}
else {
super.renderDerivedTableReference( tableReference );
}
}
@Override
protected void renderDerivedTableReferenceIdentificationVariable(DerivedTableReference tableReference) {
renderTableReferenceIdentificationVariable( tableReference );
}

View File

@ -17,6 +17,7 @@ import java.util.List;
import java.util.Map;
import java.util.TimeZone;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.hibernate.Length;
import org.hibernate.LockMode;
import org.hibernate.LockOptions;
@ -109,7 +110,6 @@ import static org.hibernate.type.SqlTypes.GEOGRAPHY;
import static org.hibernate.type.SqlTypes.GEOMETRY;
import static org.hibernate.type.SqlTypes.INET;
import static org.hibernate.type.SqlTypes.JSON;
import static org.hibernate.type.SqlTypes.JSON_ARRAY;
import static org.hibernate.type.SqlTypes.LONG32NVARCHAR;
import static org.hibernate.type.SqlTypes.LONG32VARBINARY;
import static org.hibernate.type.SqlTypes.LONG32VARCHAR;
@ -259,11 +259,9 @@ public class PostgreSQLLegacyDialect extends Dialect {
// Prefer jsonb if possible
if ( getVersion().isSameOrAfter( 9, 4 ) ) {
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( JSON, "jsonb", this ) );
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( JSON_ARRAY, "jsonb", this ) );
}
else {
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( JSON, "json", this ) );
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( JSON_ARRAY, "json", this ) );
}
}
ddlTypeRegistry.addDescriptor( new NamedNativeEnumDdlTypeImpl( this ) );
@ -706,6 +704,18 @@ public class PostgreSQLLegacyDialect extends Dialect {
);
functionContributions.getFunctionRegistry().registerAlternateKey( "truncate", "trunc" );
functionFactory.dateTrunc();
if ( getVersion().isSameOrAfter( 17 ) ) {
functionFactory.unnest( null, "ordinality" );
}
else {
functionFactory.unnest_postgresql();
}
}
@Override
public @Nullable String getDefaultOrdinalityColumnName() {
return "ordinality";
}
/**
@ -1460,21 +1470,21 @@ public class PostgreSQLLegacyDialect extends Dialect {
if ( getVersion().isSameOrAfter( 9, 4 ) ) {
if ( PgJdbcHelper.isUsable( serviceRegistry ) ) {
jdbcTypeRegistry.addDescriptorIfAbsent( PgJdbcHelper.getJsonbJdbcType( serviceRegistry ) );
jdbcTypeRegistry.addDescriptorIfAbsent( PgJdbcHelper.getJsonbArrayJdbcType( serviceRegistry ) );
jdbcTypeRegistry.addTypeConstructorIfAbsent( PgJdbcHelper.getJsonbArrayJdbcType( serviceRegistry ) );
}
else {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonJdbcType.JSONB_INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonArrayJdbcType.JSONB_INSTANCE );
jdbcTypeRegistry.addTypeConstructorIfAbsent( PostgreSQLCastingJsonArrayJdbcTypeConstructor.JSONB_INSTANCE );
}
}
else {
if ( PgJdbcHelper.isUsable( serviceRegistry ) ) {
jdbcTypeRegistry.addDescriptorIfAbsent( PgJdbcHelper.getJsonJdbcType( serviceRegistry ) );
jdbcTypeRegistry.addDescriptorIfAbsent( PgJdbcHelper.getJsonArrayJdbcType( serviceRegistry ) );
jdbcTypeRegistry.addTypeConstructorIfAbsent( PgJdbcHelper.getJsonArrayJdbcType( serviceRegistry ) );
}
else {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonJdbcType.JSON_INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonArrayJdbcType.JSON_INSTANCE );
jdbcTypeRegistry.addTypeConstructorIfAbsent( PostgreSQLCastingJsonArrayJdbcTypeConstructor.JSON_INSTANCE );
}
}
}
@ -1490,11 +1500,11 @@ public class PostgreSQLLegacyDialect extends Dialect {
if ( getVersion().isSameOrAfter( 9, 2 ) ) {
if ( getVersion().isSameOrAfter( 9, 4 ) ) {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonJdbcType.JSONB_INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonArrayJdbcType.JSONB_INSTANCE );
jdbcTypeRegistry.addTypeConstructorIfAbsent( PostgreSQLCastingJsonArrayJdbcTypeConstructor.JSONB_INSTANCE );
}
else {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonJdbcType.JSON_INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonArrayJdbcType.JSON_INSTANCE );
jdbcTypeRegistry.addTypeConstructorIfAbsent( PostgreSQLCastingJsonArrayJdbcTypeConstructor.JSON_INSTANCE );
}
}
}
@ -1513,6 +1523,7 @@ public class PostgreSQLLegacyDialect extends Dialect {
)
);
// Replace the standard array constructor
jdbcTypeRegistry.addTypeConstructor( PostgreSQLArrayJdbcTypeConstructor.INSTANCE );
}

View File

@ -16,6 +16,8 @@ import org.hibernate.dialect.DatabaseVersion;
import org.hibernate.dialect.Dialect;
import org.hibernate.dialect.DmlTargetColumnQualifierSupport;
import org.hibernate.dialect.Replacer;
import org.hibernate.dialect.SQLServerCastingXmlArrayJdbcTypeConstructor;
import org.hibernate.dialect.SQLServerCastingXmlJdbcType;
import org.hibernate.dialect.TimeZoneSupport;
import org.hibernate.dialect.function.CommonFunctionFactory;
import org.hibernate.dialect.function.CountFunction;
@ -74,7 +76,6 @@ import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.TimestampUtcAsJdbcTimestampJdbcType;
import org.hibernate.type.descriptor.jdbc.TinyIntAsSmallIntJdbcType;
import org.hibernate.type.descriptor.jdbc.UUIDJdbcType;
import org.hibernate.type.descriptor.jdbc.XmlJdbcType;
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
import org.hibernate.type.descriptor.sql.internal.DdlTypeImpl;
import org.hibernate.type.descriptor.sql.spi.DdlTypeRegistry;
@ -244,6 +245,11 @@ public class SQLServerLegacyDialect extends AbstractTransactSQLDialect {
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( UUID, "uniqueidentifier", this ) );
}
@Override
public int getPreferredSqlTypeCodeForArray() {
return XML_ARRAY;
}
@Override
public JdbcType resolveSqlTypeDescriptor(
String columnTypeName,
@ -308,8 +314,9 @@ public class SQLServerLegacyDialect extends AbstractTransactSQLDialect {
Types.TINYINT,
TinyIntAsSmallIntJdbcType.INSTANCE
);
typeContributions.contributeJdbcType( XmlJdbcType.INSTANCE );
typeContributions.contributeJdbcType( SQLServerCastingXmlJdbcType.INSTANCE );
typeContributions.contributeJdbcType( UUIDJdbcType.INSTANCE );
typeContributions.contributeJdbcTypeConstructor( SQLServerCastingXmlArrayJdbcTypeConstructor.INSTANCE );
}
@Override
@ -421,6 +428,9 @@ public class SQLServerLegacyDialect extends AbstractTransactSQLDialect {
functionFactory.xmlquery_sqlserver();
functionFactory.xmlexists_sqlserver();
functionFactory.xmlagg_sqlserver();
functionFactory.unnest_sqlserver();
if ( getVersion().isSameOrAfter( 14 ) ) {
functionFactory.listagg_stringAggWithinGroup( "varchar(max)" );
functionFactory.jsonArrayAgg_sqlserver( getVersion().isSameOrAfter( 16 ) );

View File

@ -11,14 +11,19 @@ import org.hibernate.LockOptions;
import org.hibernate.dialect.DatabaseVersion;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.internal.util.collections.Stack;
import org.hibernate.metamodel.mapping.CollectionPart;
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
import org.hibernate.metamodel.mapping.ModelPart;
import org.hibernate.query.IllegalQueryOperationException;
import org.hibernate.query.derived.AnonymousTupleTableGroupProducer;
import org.hibernate.query.sqm.ComparisonOperator;
import org.hibernate.query.sqm.FetchClauseType;
import org.hibernate.sql.ast.Clause;
import org.hibernate.sql.ast.SqlAstJoinType;
import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
import org.hibernate.sql.ast.spi.AbstractSqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlSelection;
import org.hibernate.sql.ast.tree.SqlAstNode;
import org.hibernate.sql.ast.tree.Statement;
import org.hibernate.sql.ast.tree.delete.DeleteStatement;
import org.hibernate.sql.ast.tree.expression.BinaryArithmeticExpression;
@ -26,10 +31,9 @@ import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.expression.Literal;
import org.hibernate.sql.ast.tree.expression.SqlTuple;
import org.hibernate.sql.ast.tree.expression.Summarization;
import org.hibernate.sql.ast.tree.from.DerivedTableReference;
import org.hibernate.sql.ast.tree.from.NamedTableReference;
import org.hibernate.sql.ast.tree.from.TableGroup;
import org.hibernate.sql.ast.tree.from.TableGroupJoin;
import org.hibernate.sql.ast.tree.from.TableReference;
import org.hibernate.sql.ast.tree.from.UnionTableReference;
import org.hibernate.sql.ast.tree.insert.ConflictClause;
import org.hibernate.sql.ast.tree.insert.InsertSelectStatement;
@ -180,17 +184,24 @@ public class SQLServerLegacySqlAstTranslator<T extends JdbcOperation> extends Ab
}
}
protected boolean renderPrimaryTableReference(TableGroup tableGroup, LockMode lockMode) {
if ( shouldInlineCte( tableGroup ) ) {
inlineCteTableGroup( tableGroup, lockMode );
return false;
}
final TableReference tableReference = tableGroup.getPrimaryTableReference();
if ( tableReference instanceof NamedTableReference ) {
return renderNamedTableReference( (NamedTableReference) tableReference, lockMode );
}
@Override
protected void renderDerivedTableReference(DerivedTableReference tableReference) {
tableReference.accept( this );
return false;
}
@Override
public void renderNamedSetReturningFunction(String functionName, List<? extends SqlAstNode> sqlAstArguments, AnonymousTupleTableGroupProducer tupleType, String tableIdentifierVariable, SqlAstNodeRenderingMode argumentRenderingMode) {
final ModelPart ordinalitySubPart = tupleType.findSubPart( CollectionPart.Nature.INDEX.getName(), null );
if ( ordinalitySubPart != null ) {
appendSql( "(select t.*, row_number() over(order by (select 1)) " );
appendSql( ordinalitySubPart.asBasicValuedModelPart().getSelectionExpression() );
appendSql( " from " );
renderSimpleNamedFunction( functionName, sqlAstArguments, argumentRenderingMode );
append( " t)" );
}
else {
super.renderNamedSetReturningFunction( functionName, sqlAstArguments, tupleType, tableIdentifierVariable, argumentRenderingMode );
}
}
@Override

View File

@ -12,10 +12,12 @@ import java.sql.Types;
import org.hibernate.LockMode;
import org.hibernate.LockOptions;
import org.hibernate.QueryTimeoutException;
import org.hibernate.boot.model.FunctionContributions;
import org.hibernate.boot.model.TypeContributions;
import org.hibernate.dialect.DatabaseVersion;
import org.hibernate.dialect.Dialect;
import org.hibernate.dialect.SybaseDriverKind;
import org.hibernate.dialect.function.CommonFunctionFactory;
import org.hibernate.dialect.pagination.LimitHandler;
import org.hibernate.dialect.pagination.TopLimitHandler;
import org.hibernate.engine.jdbc.Size;
@ -52,6 +54,7 @@ import static org.hibernate.type.SqlTypes.DATE;
import static org.hibernate.type.SqlTypes.TIME;
import static org.hibernate.type.SqlTypes.TIMESTAMP;
import static org.hibernate.type.SqlTypes.TIMESTAMP_WITH_TIMEZONE;
import static org.hibernate.type.SqlTypes.XML_ARRAY;
/**
* A {@linkplain Dialect SQL dialect} for Sybase Adaptive Server Enterprise 11.9 and above.
@ -140,6 +143,11 @@ public class SybaseASELegacyDialect extends SybaseLegacyDialect {
}
}
@Override
public int getPreferredSqlTypeCodeForArray() {
return XML_ARRAY;
}
@Override
public int getMaxVarcharLength() {
// the maximum length of a VARCHAR or VARBINARY
@ -151,6 +159,15 @@ public class SybaseASELegacyDialect extends SybaseLegacyDialect {
return 16_384;
}
@Override
public void initializeFunctionRegistry(FunctionContributions functionContributions) {
super.initializeFunctionRegistry( functionContributions );
CommonFunctionFactory functionFactory = new CommonFunctionFactory( functionContributions);
functionFactory.unnest_sybasease();
}
private static boolean isAnsiNull(DatabaseMetaData databaseMetaData) {
if ( databaseMetaData != null ) {
try (java.sql.Statement s = databaseMetaData.getConnection().createStatement() ) {

View File

@ -48,6 +48,8 @@ import org.hibernate.sql.ast.tree.select.SelectClause;
import org.hibernate.sql.ast.tree.update.UpdateStatement;
import org.hibernate.sql.exec.spi.JdbcOperation;
import static org.hibernate.dialect.SybaseASESqlAstTranslator.isLob;
/**
* A SQL AST translator for Sybase ASE.
*
@ -336,7 +338,7 @@ public class SybaseASELegacySqlAstTranslator<T extends JdbcOperation> extends Ab
append( '(' );
visitValuesListEmulateSelectUnion( tableReference.getValuesList() );
append( ')' );
renderDerivedTableReference( tableReference );
renderDerivedTableReferenceIdentificationVariable( tableReference );
}
@Override
@ -371,8 +373,56 @@ public class SybaseASELegacySqlAstTranslator<T extends JdbcOperation> extends Ab
@Override
protected void renderComparison(Expression lhs, ComparisonOperator operator, Expression rhs) {
// In Sybase ASE, XMLTYPE is not "comparable", so we have to cast the two parts to varchar for this purpose
final boolean isLob = isLob( lhs.getExpressionType() );
if ( isLob ) {
switch ( operator ) {
case EQUAL:
lhs.accept( this );
appendSql( " like " );
rhs.accept( this );
return;
case NOT_EQUAL:
lhs.accept( this );
appendSql( " not like " );
rhs.accept( this );
return;
default:
// Fall through
break;
}
}
// I think intersect is only supported in 16.0 SP3
if ( getDialect().isAnsiNullOn() ) {
if ( isLob ) {
switch ( operator ) {
case DISTINCT_FROM:
appendSql( "case when " );
lhs.accept( this );
appendSql( " like " );
rhs.accept( this );
appendSql( " or " );
lhs.accept( this );
appendSql( " is null and " );
rhs.accept( this );
appendSql( " is null then 0 else 1 end=1" );
return;
case NOT_DISTINCT_FROM:
appendSql( "case when " );
lhs.accept( this );
appendSql( " like " );
rhs.accept( this );
appendSql( " or " );
lhs.accept( this );
appendSql( " is null and " );
rhs.accept( this );
appendSql( " is null then 0 else 1 end=0" );
return;
default:
// Fall through
break;
}
}
if ( supportsDistinctFromPredicate() ) {
renderComparisonEmulateIntersect( lhs, operator, rhs );
}
@ -393,10 +443,20 @@ public class SybaseASELegacySqlAstTranslator<T extends JdbcOperation> extends Ab
lhs.accept( this );
switch ( operator ) {
case DISTINCT_FROM:
appendSql( "<>" );
if ( isLob ) {
appendSql( " not like " );
}
else {
appendSql( "<>" );
}
break;
case NOT_DISTINCT_FROM:
appendSql( '=' );
if ( isLob ) {
appendSql( " like " );
}
else {
appendSql( '=' );
}
break;
case LESS_THAN:
case GREATER_THAN:

View File

@ -204,7 +204,7 @@ public class SybaseLegacySqlAstTranslator<T extends JdbcOperation> extends Abstr
append( '(' );
visitValuesListEmulateSelectUnion( tableReference.getValuesList() );
append( ')' );
renderDerivedTableReference( tableReference );
renderDerivedTableReferenceIdentificationVariable( tableReference );
}
@Override

View File

@ -225,6 +225,7 @@ entityWithJoins
fromRoot
: entityName variable? # RootEntity
| LEFT_PAREN subquery RIGHT_PAREN variable? # RootSubquery
| setReturningFunction variable? # RootFunction
;
/**
@ -275,8 +276,9 @@ joinType
* The joined path, with an optional identification variable
*/
joinTarget
: path variable? #JoinPath
| LATERAL? LEFT_PAREN subquery RIGHT_PAREN variable? #JoinSubquery
: path variable? # JoinPath
| LATERAL? LEFT_PAREN subquery RIGHT_PAREN variable? # JoinSubquery
| LATERAL? setReturningFunction variable? # JoinFunction
;
/**
@ -1114,6 +1116,17 @@ function
| genericFunction
;
setReturningFunction
: simpleSetReturningFunction
;
/**
* A simple set returning function invocation without special syntax.
*/
simpleSetReturningFunction
: identifier LEFT_PAREN genericFunctionArguments? RIGHT_PAREN
;
/**
* A syntax for calling user-defined or native database functions, required by JPQL
*/

View File

@ -85,8 +85,11 @@ import org.hibernate.type.descriptor.java.ByteArrayJavaType;
import org.hibernate.type.descriptor.java.CharacterArrayJavaType;
import org.hibernate.type.descriptor.java.spi.JavaTypeRegistry;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.JsonArrayAsStringJdbcType;
import org.hibernate.type.descriptor.jdbc.JsonArrayJdbcTypeConstructor;
import org.hibernate.type.descriptor.jdbc.JsonAsStringArrayJdbcTypeConstructor;
import org.hibernate.type.descriptor.jdbc.JsonAsStringJdbcType;
import org.hibernate.type.descriptor.jdbc.XmlArrayJdbcTypeConstructor;
import org.hibernate.type.descriptor.jdbc.XmlAsStringArrayJdbcTypeConstructor;
import org.hibernate.type.descriptor.jdbc.XmlAsStringJdbcType;
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
import org.hibernate.type.descriptor.sql.DdlType;
@ -103,7 +106,6 @@ import org.jboss.jandex.Indexer;
import jakarta.persistence.AttributeConverter;
import static org.hibernate.internal.util.collections.CollectionHelper.mutableJoin;
import static org.hibernate.internal.util.config.ConfigurationHelper.getPreferredSqlTypeCodeForArray;
import static org.hibernate.internal.util.config.ConfigurationHelper.getPreferredSqlTypeCodeForDuration;
import static org.hibernate.internal.util.config.ConfigurationHelper.getPreferredSqlTypeCodeForInstant;
import static org.hibernate.internal.util.config.ConfigurationHelper.getPreferredSqlTypeCodeForUuid;
@ -763,9 +765,6 @@ public class MetadataBuildingProcess {
// add Dialect contributed types
final Dialect dialect = options.getServiceRegistry().requireService( JdbcServices.class ).getDialect();
dialect.contribute( typeContributions, options.getServiceRegistry() );
// Capture the dialect configured JdbcTypes so that we can detect if a TypeContributor overwrote them,
// which has precedence over the fallback and preferred type registrations
final JdbcType dialectArrayDescriptor = jdbcTypeRegistry.findDescriptor( SqlTypes.ARRAY );
// add TypeContributor contributed types.
for ( TypeContributor contributor : classLoaderService.loadJavaServices( TypeContributor.class ) ) {
@ -790,17 +789,23 @@ public class MetadataBuildingProcess {
addFallbackIfNecessary( jdbcTypeRegistry, SqlTypes.UUID, SqlTypes.BINARY );
}
final int preferredSqlTypeCodeForArray = getPreferredSqlTypeCodeForArray( serviceRegistry );
if ( preferredSqlTypeCodeForArray != SqlTypes.ARRAY ) {
adaptToPreferredSqlTypeCode(
jdbcTypeRegistry,
dialectArrayDescriptor,
SqlTypes.ARRAY,
preferredSqlTypeCodeForArray
);
jdbcTypeRegistry.addDescriptorIfAbsent( JsonAsStringJdbcType.VARCHAR_INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( XmlAsStringJdbcType.VARCHAR_INSTANCE );
if ( jdbcTypeRegistry.getConstructor( SqlTypes.JSON_ARRAY ) == null ) {
if ( jdbcTypeRegistry.getDescriptor( SqlTypes.JSON ).getDdlTypeCode() == SqlTypes.JSON ) {
jdbcTypeRegistry.addTypeConstructor( JsonArrayJdbcTypeConstructor.INSTANCE );
}
else {
jdbcTypeRegistry.addTypeConstructor( JsonAsStringArrayJdbcTypeConstructor.INSTANCE );
}
}
else {
addFallbackIfNecessary( jdbcTypeRegistry, SqlTypes.ARRAY, SqlTypes.VARBINARY );
if ( jdbcTypeRegistry.getConstructor( SqlTypes.XML_ARRAY ) == null ) {
if ( jdbcTypeRegistry.getDescriptor( SqlTypes.SQLXML ).getDdlTypeCode() == SqlTypes.SQLXML ) {
jdbcTypeRegistry.addTypeConstructor( XmlArrayJdbcTypeConstructor.INSTANCE );
}
else {
jdbcTypeRegistry.addTypeConstructor( XmlAsStringArrayJdbcTypeConstructor.INSTANCE );
}
}
final int preferredSqlTypeCodeForDuration = getPreferredSqlTypeCodeForDuration( serviceRegistry );
@ -823,10 +828,6 @@ public class MetadataBuildingProcess {
addFallbackIfNecessary( jdbcTypeRegistry, SqlTypes.POINT, SqlTypes.VARBINARY );
addFallbackIfNecessary( jdbcTypeRegistry, SqlTypes.GEOGRAPHY, SqlTypes.GEOMETRY );
jdbcTypeRegistry.addDescriptorIfAbsent( JsonAsStringJdbcType.VARCHAR_INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( JsonArrayAsStringJdbcType.VARCHAR_INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( XmlAsStringJdbcType.VARCHAR_INSTANCE );
addFallbackIfNecessary( jdbcTypeRegistry, SqlTypes.MATERIALIZED_BLOB, SqlTypes.BLOB );
addFallbackIfNecessary( jdbcTypeRegistry, SqlTypes.MATERIALIZED_CLOB, SqlTypes.CLOB );
addFallbackIfNecessary( jdbcTypeRegistry, SqlTypes.MATERIALIZED_NCLOB, SqlTypes.NCLOB );

View File

@ -19,6 +19,7 @@ import java.util.TimeZone;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.hibernate.LockMode;
import org.hibernate.LockOptions;
import org.hibernate.PessimisticLockException;
@ -95,7 +96,6 @@ import static org.hibernate.type.SqlTypes.GEOMETRY;
import static org.hibernate.type.SqlTypes.INET;
import static org.hibernate.type.SqlTypes.INTEGER;
import static org.hibernate.type.SqlTypes.JSON;
import static org.hibernate.type.SqlTypes.JSON_ARRAY;
import static org.hibernate.type.SqlTypes.LONG32NVARCHAR;
import static org.hibernate.type.SqlTypes.LONG32VARBINARY;
import static org.hibernate.type.SqlTypes.LONG32VARCHAR;
@ -260,7 +260,6 @@ public class CockroachDialect extends Dialect {
// Prefer jsonb if possible
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( INET, "inet", this ) );
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( JSON, "jsonb", this ) );
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( JSON_ARRAY, "jsonb", this ) );
ddlTypeRegistry.addDescriptor( new NamedNativeEnumDdlTypeImpl( this ) );
ddlTypeRegistry.addDescriptor( new NamedNativeOrdinalEnumDdlTypeImpl( this ) );
@ -356,13 +355,13 @@ public class CockroachDialect extends Dialect {
jdbcTypeRegistry.addDescriptorIfAbsent( PgJdbcHelper.getIntervalJdbcType( serviceRegistry ) );
jdbcTypeRegistry.addDescriptorIfAbsent( PgJdbcHelper.getInetJdbcType( serviceRegistry ) );
jdbcTypeRegistry.addDescriptorIfAbsent( PgJdbcHelper.getJsonbJdbcType( serviceRegistry ) );
jdbcTypeRegistry.addDescriptorIfAbsent( PgJdbcHelper.getJsonbArrayJdbcType( serviceRegistry ) );
jdbcTypeRegistry.addTypeConstructorIfAbsent( PgJdbcHelper.getJsonbArrayJdbcType( serviceRegistry ) );
}
else {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingIntervalSecondJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingInetJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonJdbcType.JSONB_INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonArrayJdbcType.JSONB_INSTANCE );
jdbcTypeRegistry.addTypeConstructorIfAbsent( PostgreSQLCastingJsonArrayJdbcTypeConstructor.JSONB_INSTANCE );
}
}
else {
@ -370,7 +369,7 @@ public class CockroachDialect extends Dialect {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingInetJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingIntervalSecondJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonJdbcType.JSONB_INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonArrayJdbcType.JSONB_INSTANCE );
jdbcTypeRegistry.addTypeConstructorIfAbsent( PostgreSQLCastingJsonArrayJdbcTypeConstructor.JSONB_INSTANCE );
}
// Force Blob binding to byte[] for CockroachDB
@ -391,6 +390,7 @@ public class CockroachDialect extends Dialect {
)
);
// Replace the standard array constructor
jdbcTypeRegistry.addTypeConstructor( PostgreSQLArrayJdbcTypeConstructor.INSTANCE );
}
@ -485,6 +485,8 @@ public class CockroachDialect extends Dialect {
functionFactory.jsonArrayAppend_postgresql( false );
functionFactory.jsonArrayInsert_postgresql();
functionFactory.unnest_postgresql();
// Postgres uses # instead of ^ for XOR
functionContributions.getFunctionRegistry().patternDescriptorBuilder( "bitxor", "(?1#?2)" )
.setExactArgumentCount( 2 )
@ -498,6 +500,11 @@ public class CockroachDialect extends Dialect {
functionContributions.getFunctionRegistry().registerAlternateKey( "truncate", "trunc" );
}
@Override
public @Nullable String getDefaultOrdinalityColumnName() {
return "ordinality";
}
@Override
public TimeZoneSupport getTimeZoneSupport() {
return TimeZoneSupport.NORMALIZE;

View File

@ -78,6 +78,7 @@ import org.hibernate.tool.schema.extract.internal.SequenceInformationExtractorDB
import org.hibernate.tool.schema.extract.internal.SequenceInformationExtractorNoOpImpl;
import org.hibernate.tool.schema.extract.spi.SequenceInformationExtractor;
import org.hibernate.type.JavaObjectType;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.StandardBasicTypes;
import org.hibernate.type.descriptor.ValueExtractor;
import org.hibernate.type.descriptor.java.JavaType;
@ -440,6 +441,14 @@ public class DB2Dialect extends Dialect {
functionFactory.xmlexists_db2_legacy();
}
functionFactory.xmlagg();
functionFactory.unnest_emulated();
}
@Override
public int getPreferredSqlTypeCodeForArray() {
// Even if DB2 11 supports JSON functions, it's not possible to unnest a JSON array to rows, so stick to XML
return SqlTypes.XML_ARRAY;
}
@Override

View File

@ -9,8 +9,11 @@ import java.util.function.Consumer;
import org.hibernate.LockMode;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.metamodel.mapping.CollectionPart;
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
import org.hibernate.metamodel.mapping.ModelPart;
import org.hibernate.query.IllegalQueryOperationException;
import org.hibernate.query.derived.AnonymousTupleTableGroupProducer;
import org.hibernate.query.sqm.ComparisonOperator;
import org.hibernate.query.sqm.FetchClauseType;
import org.hibernate.sql.ast.Clause;
@ -18,6 +21,7 @@ import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
import org.hibernate.sql.ast.spi.AbstractSqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlSelection;
import org.hibernate.sql.ast.tree.MutationStatement;
import org.hibernate.sql.ast.tree.SqlAstNode;
import org.hibernate.sql.ast.tree.Statement;
import org.hibernate.sql.ast.tree.delete.DeleteStatement;
import org.hibernate.sql.ast.tree.expression.CaseSearchedExpression;
@ -26,6 +30,8 @@ import org.hibernate.sql.ast.tree.expression.ColumnReference;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.expression.Literal;
import org.hibernate.sql.ast.tree.expression.SqlTuple;
import org.hibernate.sql.ast.tree.from.DerivedTableReference;
import org.hibernate.sql.ast.tree.from.FunctionTableReference;
import org.hibernate.sql.ast.tree.from.NamedTableReference;
import org.hibernate.sql.ast.tree.from.QueryPartTableReference;
import org.hibernate.sql.ast.tree.from.TableGroup;
@ -252,6 +258,34 @@ public class DB2SqlAstTranslator<T extends JdbcOperation> extends AbstractSqlAst
inLateral = oldLateral;
}
@Override
protected void renderDerivedTableReference(DerivedTableReference tableReference) {
if ( tableReference instanceof FunctionTableReference && tableReference.isLateral() ) {
// No need for a lateral keyword for functions
tableReference.accept( this );
}
else {
super.renderDerivedTableReference( tableReference );
}
}
@Override
public void renderNamedSetReturningFunction(String functionName, List<? extends SqlAstNode> sqlAstArguments, AnonymousTupleTableGroupProducer tupleType, String tableIdentifierVariable, SqlAstNodeRenderingMode argumentRenderingMode) {
final ModelPart ordinalitySubPart = tupleType.findSubPart( CollectionPart.Nature.INDEX.getName(), null );
if ( ordinalitySubPart != null ) {
appendSql( "lateral (select t.*, row_number() over() " );
appendSql( ordinalitySubPart.asBasicValuedModelPart().getSelectionExpression() );
appendSql( " from table(" );
renderSimpleNamedFunction( functionName, sqlAstArguments, argumentRenderingMode );
append( ") t)" );
}
else {
appendSql( "table(" );
super.renderNamedSetReturningFunction( functionName, sqlAstArguments, tupleType, tableIdentifierVariable, argumentRenderingMode );
append( ')' );
}
}
@Override
public void visitSelectStatement(SelectStatement statement) {
if ( getQueryPartForRowNumbering() == statement.getQueryPart() && inLateral ) {

View File

@ -63,6 +63,11 @@ public class DB2StructJdbcType implements StructJdbcType {
return SqlTypes.SQLXML;
}
@Override
public int getDdlTypeCode() {
return SqlTypes.SQLXML;
}
@Override
public int getDefaultSqlTypeCode() {
return SqlTypes.STRUCT;

View File

@ -4,15 +4,11 @@
*/
package org.hibernate.dialect;
import org.hibernate.LockMode;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.query.sqm.ComparisonOperator;
import org.hibernate.sql.ast.tree.Statement;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.from.FunctionTableReference;
import org.hibernate.sql.ast.tree.from.NamedTableReference;
import org.hibernate.sql.ast.tree.from.TableGroup;
import org.hibernate.sql.ast.tree.from.TableReference;
import org.hibernate.sql.ast.tree.from.QueryPartTableReference;
import org.hibernate.sql.ast.tree.select.QueryPart;
import org.hibernate.sql.exec.spi.JdbcOperation;
@ -51,28 +47,10 @@ public class DB2zSqlAstTranslator<T extends JdbcOperation> extends DB2SqlAstTran
}
@Override
protected boolean renderPrimaryTableReference(TableGroup tableGroup, LockMode lockMode) {
if ( shouldInlineCte( tableGroup ) ) {
inlineCteTableGroup( tableGroup, lockMode );
return false;
}
final TableReference tableReference = tableGroup.getPrimaryTableReference();
if ( tableReference instanceof NamedTableReference ) {
return renderNamedTableReference( (NamedTableReference) tableReference, lockMode );
}
public void visitQueryPartTableReference(QueryPartTableReference tableReference) {
// DB2 z/OS we need the "table" qualifier for table valued functions or lateral sub-queries
append( "table " );
tableReference.accept( this );
return false;
}
@Override
public void visitFunctionTableReference(FunctionTableReference tableReference) {
// For the table qualifier we need parenthesis on DB2 z/OS
append( OPEN_PARENTHESIS );
tableReference.getFunctionExpression().accept( this );
append( CLOSE_PARENTHESIS );
renderDerivedTableReference( tableReference );
super.visitQueryPartTableReference( tableReference );
}
@Override

View File

@ -1871,7 +1871,7 @@ public abstract class Dialect implements ConversionContext, TypeContributor, Fun
}
if ( supportsStandardArrays() ) {
jdbcTypeRegistry.addTypeConstructor( ArrayJdbcTypeConstructor.INSTANCE );
jdbcTypeRegistry.addTypeConstructorIfAbsent( ArrayJdbcTypeConstructor.INSTANCE );
}
if ( supportsMaterializedLobAccess() ) {
jdbcTypeRegistry.addDescriptor( SqlTypes.MATERIALIZED_BLOB, BlobJdbcType.MATERIALIZED );
@ -5290,6 +5290,14 @@ public abstract class Dialect implements ConversionContext, TypeContributor, Fun
return null;
}
/**
* Returns the default name of the ordinality column for a set-returning function
* if it supports that, otherwise returns {@code null}.
*/
public @Nullable String getDefaultOrdinalityColumnName() {
return null;
}
/**
* Pluggable strategy for determining the {@link Size} to use for
* columns of a given SQL type.

View File

@ -14,6 +14,7 @@ import java.util.Date;
import java.util.List;
import java.util.TimeZone;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.hibernate.PessimisticLockException;
import org.hibernate.QueryTimeoutException;
import org.hibernate.boot.model.FunctionContributions;
@ -89,7 +90,6 @@ import static org.hibernate.type.SqlTypes.FLOAT;
import static org.hibernate.type.SqlTypes.GEOMETRY;
import static org.hibernate.type.SqlTypes.INTERVAL_SECOND;
import static org.hibernate.type.SqlTypes.JSON;
import static org.hibernate.type.SqlTypes.JSON_ARRAY;
import static org.hibernate.type.SqlTypes.LONG32NVARCHAR;
import static org.hibernate.type.SqlTypes.LONG32VARBINARY;
import static org.hibernate.type.SqlTypes.LONG32VARCHAR;
@ -229,7 +229,6 @@ public class H2Dialect extends Dialect {
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( GEOMETRY, "geometry", this ) );
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( INTERVAL_SECOND, "interval second($p,$s)", this ) );
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( JSON, "json", this ) );
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( JSON_ARRAY, "json", this ) );
ddlTypeRegistry.addDescriptor( new NativeEnumDdlTypeImpl( this ) );
ddlTypeRegistry.addDescriptor( new NativeOrdinalEnumDdlTypeImpl( this ) );
}
@ -246,7 +245,8 @@ public class H2Dialect extends Dialect {
jdbcTypeRegistry.addDescriptorIfAbsent( UUIDJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( H2DurationIntervalSecondJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( H2JsonJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( H2JsonArrayJdbcType.INSTANCE );
// Replace the standard array constructor
jdbcTypeRegistry.addTypeConstructor( H2JsonArrayJdbcTypeConstructor.INSTANCE );
jdbcTypeRegistry.addDescriptor( EnumJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptor( OrdinalEnumJdbcType.INSTANCE );
}
@ -358,6 +358,8 @@ public class H2Dialect extends Dialect {
functionFactory.xmlforest_h2();
functionFactory.xmlconcat_h2();
functionFactory.xmlpi_h2();
functionFactory.unnest_h2( getMaximumArraySize() );
}
/**
@ -370,6 +372,11 @@ public class H2Dialect extends Dialect {
return 1000;
}
@Override
public @Nullable String getDefaultOrdinalityColumnName() {
return "nord";
}
@Override
public void augmentPhysicalTableTypes(List<String> tableTypesList) {
tableTypesList.add( "BASE TABLE" );

View File

@ -13,18 +13,16 @@ import org.hibernate.type.descriptor.ValueBinder;
import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.jdbc.BasicBinder;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.JsonArrayJdbcType;
/**
* H2 requires binding JSON via {@code setBytes} methods.
*/
public class H2JsonArrayJdbcType extends JsonArrayJdbcType {
/**
* Singleton access
*/
public static final H2JsonArrayJdbcType INSTANCE = new H2JsonArrayJdbcType();
protected H2JsonArrayJdbcType() {
public H2JsonArrayJdbcType(JdbcType elementJdbcType) {
super( elementJdbcType );
}
@Override

View File

@ -0,0 +1,43 @@
/*
* SPDX-License-Identifier: LGPL-2.1-or-later
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.dialect;
import org.hibernate.tool.schema.extract.spi.ColumnTypeInformation;
import org.hibernate.type.BasicType;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.JdbcTypeConstructor;
import org.hibernate.type.spi.TypeConfiguration;
/**
* Factory for {@link H2JsonArrayJdbcType}.
*/
public class H2JsonArrayJdbcTypeConstructor implements JdbcTypeConstructor {
public static final H2JsonArrayJdbcTypeConstructor INSTANCE = new H2JsonArrayJdbcTypeConstructor();
@Override
public JdbcType resolveType(
TypeConfiguration typeConfiguration,
Dialect dialect,
BasicType<?> elementType,
ColumnTypeInformation columnTypeInformation) {
return resolveType( typeConfiguration, dialect, elementType.getJdbcType(), columnTypeInformation );
}
@Override
public JdbcType resolveType(
TypeConfiguration typeConfiguration,
Dialect dialect,
JdbcType elementType,
ColumnTypeInformation columnTypeInformation) {
return new H2JsonArrayJdbcType( elementType );
}
@Override
public int getDefaultSqlTypeCode() {
return SqlTypes.JSON_ARRAY;
}
}

View File

@ -66,6 +66,7 @@ import org.hibernate.tool.schema.extract.internal.SequenceInformationExtractorHA
import org.hibernate.tool.schema.extract.spi.SequenceInformationExtractor;
import org.hibernate.tool.schema.internal.StandardTableExporter;
import org.hibernate.tool.schema.spi.Exporter;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.StandardBasicTypes;
import org.hibernate.type.descriptor.ValueBinder;
import org.hibernate.type.descriptor.ValueExtractor;
@ -413,6 +414,12 @@ public class HANADialect extends Dialect {
return 5000;
}
@Override
public int getPreferredSqlTypeCodeForArray() {
// Prefer XML since JSON was only added later
return getVersion().isSameOrAfter( 2 ) ? SqlTypes.XML_ARRAY : super.getPreferredSqlTypeCodeForArray();
}
@Override
public void initializeFunctionRegistry(FunctionContributions functionContributions) {
super.initializeFunctionRegistry(functionContributions);
@ -490,15 +497,23 @@ public class HANADialect extends Dialect {
typeConfiguration
);
// Introduced in 2.0 SPS 02
functionFactory.jsonValue_no_passing();
functionFactory.jsonQuery_no_passing();
functionFactory.jsonExists_hana();
// Introduced in 2.0 SPS 04
functionFactory.jsonObject_hana();
functionFactory.jsonArray_hana();
functionFactory.jsonArrayAgg_hana();
functionFactory.jsonObjectAgg_hana();
// Introduced in 2.0 SPS 00
functionFactory.jsonValue_no_passing();
functionFactory.jsonQuery_no_passing();
functionFactory.jsonExists_hana();
functionFactory.unnest_hana();
// functionFactory.json_table();
// Introduced in 2.0 SPS 04
functionFactory.jsonObject_hana();
functionFactory.jsonArray_hana();
functionFactory.jsonArrayAgg_hana();
functionFactory.jsonObjectAgg_hana();
// functionFactory.xmltable();
// functionFactory.xmlextract();
}
@Override

View File

@ -9,17 +9,22 @@ import java.util.List;
import org.hibernate.MappingException;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.internal.util.collections.Stack;
import org.hibernate.metamodel.mapping.CollectionPart;
import org.hibernate.metamodel.mapping.ModelPart;
import org.hibernate.query.IllegalQueryOperationException;
import org.hibernate.query.derived.AnonymousTupleTableGroupProducer;
import org.hibernate.query.sqm.ComparisonOperator;
import org.hibernate.sql.ast.Clause;
import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
import org.hibernate.sql.ast.spi.AbstractSqlAstTranslator;
import org.hibernate.sql.ast.tree.SqlAstNode;
import org.hibernate.sql.ast.tree.Statement;
import org.hibernate.sql.ast.tree.cte.CteStatement;
import org.hibernate.sql.ast.tree.expression.BinaryArithmeticExpression;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.expression.Literal;
import org.hibernate.sql.ast.tree.expression.Summarization;
import org.hibernate.sql.ast.tree.from.DerivedTableReference;
import org.hibernate.sql.ast.tree.from.FunctionTableReference;
import org.hibernate.sql.ast.tree.from.NamedTableReference;
import org.hibernate.sql.ast.tree.from.QueryPartTableReference;
@ -34,6 +39,8 @@ import org.hibernate.sql.ast.tree.update.UpdateStatement;
import org.hibernate.sql.exec.spi.JdbcOperation;
import org.hibernate.sql.model.internal.TableInsertStandard;
import static org.hibernate.dialect.SybaseASESqlAstTranslator.isLob;
/**
* An SQL AST translator for HANA.
*
@ -193,6 +200,32 @@ public class HANASqlAstTranslator<T extends JdbcOperation> extends AbstractSqlAs
}
}
@Override
protected void renderDerivedTableReference(DerivedTableReference tableReference) {
if ( tableReference instanceof FunctionTableReference && tableReference.isLateral() ) {
// No need for a lateral keyword for functions
tableReference.accept( this );
}
else {
super.renderDerivedTableReference( tableReference );
}
}
@Override
public void renderNamedSetReturningFunction(String functionName, List<? extends SqlAstNode> sqlAstArguments, AnonymousTupleTableGroupProducer tupleType, String tableIdentifierVariable, SqlAstNodeRenderingMode argumentRenderingMode) {
final ModelPart ordinalitySubPart = tupleType.findSubPart( CollectionPart.Nature.INDEX.getName(), null );
if ( ordinalitySubPart != null ) {
appendSql( "(select t.*, row_number() over() " );
appendSql( ordinalitySubPart.asBasicValuedModelPart().getSelectionExpression() );
appendSql( " from " );
renderSimpleNamedFunction( functionName, sqlAstArguments, argumentRenderingMode );
append( " t)" );
}
else {
super.renderNamedSetReturningFunction( functionName, sqlAstArguments, tupleType, tableIdentifierVariable, argumentRenderingMode );
}
}
@Override
protected SqlAstNodeRenderingMode getParameterRenderingMode() {
// HANA does not support parameters in lateral subqueries for some reason, so inline all the parameters in this case
@ -200,8 +233,7 @@ public class HANASqlAstTranslator<T extends JdbcOperation> extends AbstractSqlAs
}
@Override
public void visitFunctionTableReference(FunctionTableReference tableReference) {
tableReference.getFunctionExpression().accept( this );
protected void renderDerivedTableReferenceIdentificationVariable(DerivedTableReference tableReference) {
renderTableReferenceIdentificationVariable( tableReference );
}
@ -214,7 +246,38 @@ public class HANASqlAstTranslator<T extends JdbcOperation> extends AbstractSqlAs
@Override
protected void renderComparison(Expression lhs, ComparisonOperator operator, Expression rhs) {
// In SAP HANA, LOBs are not "comparable", so we have to use a like predicate for comparison
final boolean isLob = isLob( lhs.getExpressionType() );
if ( operator == ComparisonOperator.DISTINCT_FROM || operator == ComparisonOperator.NOT_DISTINCT_FROM ) {
if ( isLob ) {
switch ( operator ) {
case DISTINCT_FROM:
appendSql( "case when " );
lhs.accept( this );
appendSql( " like " );
rhs.accept( this );
appendSql( " or " );
lhs.accept( this );
appendSql( " is null and " );
rhs.accept( this );
appendSql( " is null then 0 else 1 end=1" );
return;
case NOT_DISTINCT_FROM:
appendSql( "case when " );
lhs.accept( this );
appendSql( " like " );
rhs.accept( this );
appendSql( " or " );
lhs.accept( this );
appendSql( " is null and " );
rhs.accept( this );
appendSql( " is null then 0 else 1 end=0" );
return;
default:
// Fall through
break;
}
}
// HANA does not support plain parameters in the select clause of the intersect emulation
withParameterRenderingMode(
SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER,
@ -222,7 +285,24 @@ public class HANASqlAstTranslator<T extends JdbcOperation> extends AbstractSqlAs
);
}
else {
renderComparisonEmulateIntersect( lhs, operator, rhs );
if ( isLob ) {
switch ( operator ) {
case EQUAL:
lhs.accept( this );
appendSql( " like " );
rhs.accept( this );
return;
case NOT_EQUAL:
lhs.accept( this );
appendSql( " not like " );
rhs.accept( this );
return;
default:
// Fall through
break;
}
}
renderComparisonStandard( lhs, operator, rhs );
}
}

View File

@ -8,6 +8,7 @@ import java.sql.DatabaseMetaData;
import java.sql.SQLException;
import java.sql.Types;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.hibernate.boot.model.FunctionContributions;
import org.hibernate.dialect.function.CommonFunctionFactory;
import org.hibernate.dialect.function.TrimFunction;
@ -212,6 +213,8 @@ public class HSQLDialect extends Dialect {
functionFactory.jsonObjectAgg_h2();
}
functionFactory.unnest( "c1", "c2" );
//trim() requires parameters to be cast when used as trim character
functionContributions.getFunctionRegistry().register( "trim", new TrimFunction(
this,
@ -220,6 +223,11 @@ public class HSQLDialect extends Dialect {
) );
}
@Override
public @Nullable String getDefaultOrdinalityColumnName() {
return "c2";
}
@Override
public String currentTime() {
return "localtime";

View File

@ -22,6 +22,8 @@ import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.expression.Literal;
import org.hibernate.sql.ast.tree.expression.SqlTuple;
import org.hibernate.sql.ast.tree.expression.Summarization;
import org.hibernate.sql.ast.tree.from.DerivedTableReference;
import org.hibernate.sql.ast.tree.from.FunctionTableReference;
import org.hibernate.sql.ast.tree.from.NamedTableReference;
import org.hibernate.sql.ast.tree.insert.ConflictClause;
import org.hibernate.sql.ast.tree.insert.InsertSelectStatement;
@ -72,6 +74,17 @@ public class HSQLSqlAstTranslator<T extends JdbcOperation> extends AbstractSqlAs
}
}
@Override
protected void renderDerivedTableReference(DerivedTableReference tableReference) {
if ( tableReference instanceof FunctionTableReference && tableReference.isLateral() ) {
// No need for a lateral keyword for functions
tableReference.accept( this );
}
else {
super.renderDerivedTableReference( tableReference );
}
}
@Override
protected void visitConflictClause(ConflictClause conflictClause) {
if ( conflictClause != null ) {

View File

@ -31,6 +31,8 @@ import org.hibernate.type.BasicPluralType;
import org.hibernate.type.BasicType;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.java.BasicPluralJavaType;
import org.hibernate.type.descriptor.java.EnumJavaType;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.java.JdbcDateJavaType;
import org.hibernate.type.descriptor.java.JdbcTimeJavaType;
@ -38,6 +40,9 @@ import org.hibernate.type.descriptor.java.JdbcTimestampJavaType;
import org.hibernate.type.descriptor.java.OffsetDateTimeJavaType;
import org.hibernate.type.descriptor.java.PrimitiveByteArrayJavaType;
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
import org.hibernate.type.descriptor.jdbc.ArrayJdbcType;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.JsonArrayJdbcType;
import static org.hibernate.dialect.StructHelper.getEmbeddedPart;
import static org.hibernate.dialect.StructHelper.instantiate;
@ -57,6 +62,43 @@ public class JsonHelper {
return sb.toString();
}
public static String arrayToString(MappingType elementMappingType, Object[] values, WrapperOptions options) {
if ( values.length == 0 ) {
return "[]";
}
final StringBuilder sb = new StringBuilder();
final JsonAppender jsonAppender = new JsonAppender( sb );
char separator = '[';
for ( Object value : values ) {
sb.append( separator );
toString( elementMappingType, value, options, jsonAppender );
separator = ',';
}
sb.append( ']' );
return sb.toString();
}
public static String arrayToString(
JavaType<?> elementJavaType,
JdbcType elementJdbcType,
Object[] values,
WrapperOptions options) {
if ( values.length == 0 ) {
return "[]";
}
final StringBuilder sb = new StringBuilder();
final JsonAppender jsonAppender = new JsonAppender( sb );
char separator = '[';
for ( Object value : values ) {
sb.append( separator );
//noinspection unchecked
convertedValueToString( (JavaType<Object>) elementJavaType, elementJdbcType, value, options, jsonAppender );
separator = ',';
}
sb.append( ']' );
return sb.toString();
}
private static void toString(EmbeddableMappingType embeddableMappingType, Object value, WrapperOptions options, JsonAppender appender) {
toString( embeddableMappingType, options, appender, value, '{' );
appender.append( '}' );
@ -130,34 +172,45 @@ public class JsonHelper {
}
private static void convertedValueToString(
MappingType mappedType,
JavaType<Object> javaType,
JdbcType jdbcType,
Object value,
WrapperOptions options,
JsonAppender appender) {
if ( value == null ) {
appender.append( "null" );
}
else if ( mappedType instanceof EmbeddableMappingType ) {
toString( (EmbeddableMappingType) mappedType, value, options, appender );
}
else if ( mappedType instanceof BasicType<?> ) {
//noinspection unchecked
final BasicType<Object> basicType = (BasicType<Object>) mappedType;
convertedBasicValueToString( value, options, appender, basicType );
else if ( jdbcType instanceof AggregateJdbcType aggregateJdbcType ) {
toString( aggregateJdbcType.getEmbeddableMappingType(), value, options, appender );
}
else {
throw new UnsupportedOperationException( "Support for mapping type not yet implemented: " + mappedType.getClass().getName() );
convertedBasicValueToString( value, options, appender, javaType, jdbcType );
}
}
private static void convertedBasicValueToString(
Object value,
WrapperOptions options,
JsonAppender appender,
BasicType<Object> basicType) {
//noinspection unchecked
final JavaType<Object> javaType = (JavaType<Object>) basicType.getJdbcJavaType();
switch ( basicType.getJdbcType().getDefaultSqlTypeCode() ) {
convertedBasicValueToString(
value,
options,
appender,
(JavaType<Object>) basicType.getJdbcJavaType(),
basicType.getJdbcType()
);
}
private static void convertedBasicValueToString(
Object value,
WrapperOptions options,
JsonAppender appender,
JavaType<Object> javaType,
JdbcType jdbcType) {
switch ( jdbcType.getDefaultSqlTypeCode() ) {
case SqlTypes.TINYINT:
case SqlTypes.SMALLINT:
case SqlTypes.INTEGER:
@ -272,19 +325,21 @@ public class JsonHelper {
final int length = Array.getLength( value );
appender.append( '[' );
if ( length != 0 ) {
final BasicType<Object> elementType = ( (BasicPluralType<?, Object>) basicType ).getElementType();
//noinspection unchecked
final JavaType<Object> elementJavaType = ( (BasicPluralJavaType<Object>) javaType ).getElementJavaType();
final JdbcType elementJdbcType = ( (ArrayJdbcType) jdbcType ).getElementJdbcType();
Object arrayElement = Array.get( value, 0 );
convertedValueToString( elementType, arrayElement, options, appender );
convertedValueToString( elementJavaType, elementJdbcType, arrayElement, options, appender );
for ( int i = 1; i < length; i++ ) {
arrayElement = Array.get( value, i );
appender.append( ',' );
convertedValueToString( elementType, arrayElement, options, appender );
convertedValueToString( elementJavaType, elementJdbcType, arrayElement, options, appender );
}
}
appender.append( ']' );
break;
default:
throw new UnsupportedOperationException( "Unsupported JdbcType nested in JSON: " + basicType.getJdbcType() );
throw new UnsupportedOperationException( "Unsupported JdbcType nested in JSON: " + jdbcType );
}
}
@ -314,6 +369,39 @@ public class JsonHelper {
return (X) values;
}
public static <X> X arrayFromString(
JavaType<X> javaType,
JsonArrayJdbcType jsonArrayJdbcType,
String string,
WrapperOptions options) throws SQLException {
if ( string == null ) {
return null;
}
final JavaType<?> elementJavaType = ((BasicPluralJavaType<?>) javaType).getElementJavaType();
final Class<?> preferredJavaTypeClass = jsonArrayJdbcType.getElementJdbcType().getPreferredJavaTypeClass( options );
final JavaType<?> jdbcJavaType;
if ( preferredJavaTypeClass == null || preferredJavaTypeClass == elementJavaType.getJavaTypeClass() ) {
jdbcJavaType = elementJavaType;
}
else {
jdbcJavaType = options.getSessionFactory().getTypeConfiguration().getJavaTypeRegistry()
.resolveDescriptor( preferredJavaTypeClass );
}
final CustomArrayList arrayList = new CustomArrayList();
final int i = fromArrayString(
string,
false,
options,
0,
arrayList,
elementJavaType,
jdbcJavaType,
jsonArrayJdbcType.getElementJdbcType()
);
assert string.charAt( i - 1 ) == ']';
return javaType.wrap( arrayList, options );
}
private static int fromString(
EmbeddableMappingType embeddableMappingType,
String string,
@ -559,7 +647,30 @@ public class JsonHelper {
int begin,
CustomArrayList arrayList,
BasicType<?> elementType) throws SQLException {
return fromArrayString(
string,
returnEmbeddable,
options,
begin,
arrayList,
elementType.getMappedJavaType(),
elementType.getJdbcJavaType(),
elementType.getJdbcType()
);
}
private static int fromArrayString(
String string,
boolean returnEmbeddable,
WrapperOptions options,
int begin,
CustomArrayList arrayList,
JavaType<?> javaType,
JavaType<?> jdbcJavaType,
JdbcType jdbcType) throws SQLException {
if ( string.length() == begin + 2 ) {
return begin + 2;
}
boolean hasEscape = false;
assert string.charAt( begin ) == '[';
int start = begin + 1;
@ -586,7 +697,9 @@ public class JsonHelper {
s = State.VALUE_END;
arrayList.add(
fromString(
elementType,
javaType,
jdbcJavaType,
jdbcType,
string,
start,
i,
@ -693,7 +806,9 @@ public class JsonHelper {
string,
i,
arrayList.getUnderlyingArray(),
elementType,
javaType,
jdbcJavaType,
jdbcType,
elementIndex,
returnEmbeddable,
options
@ -728,6 +843,29 @@ public class JsonHelper {
int selectableIndex,
boolean returnEmbeddable,
WrapperOptions options) throws SQLException {
return consumeLiteral(
string,
start,
values,
jdbcMapping.getMappedJavaType(),
jdbcMapping.getJdbcJavaType(),
jdbcMapping.getJdbcType(),
selectableIndex,
returnEmbeddable,
options
);
}
private static int consumeLiteral(
String string,
int start,
Object[] values,
JavaType<?> javaType,
JavaType<?> jdbcJavaType,
JdbcType jdbcType,
int selectableIndex,
boolean returnEmbeddable,
WrapperOptions options) throws SQLException {
final char c = string.charAt( start );
switch ( c ) {
case 'n':
@ -750,7 +888,9 @@ public class JsonHelper {
start,
start + 1,
values,
jdbcMapping,
javaType,
jdbcJavaType,
jdbcType,
selectableIndex,
returnEmbeddable,
options
@ -762,14 +902,18 @@ public class JsonHelper {
start,
start + 1,
values,
jdbcMapping,
javaType,
jdbcJavaType,
jdbcType,
selectableIndex,
returnEmbeddable,
options
);
}
values[selectableIndex] = fromString(
jdbcMapping,
javaType,
jdbcJavaType,
jdbcType,
string,
start,
start + 1,
@ -806,7 +950,9 @@ public class JsonHelper {
start,
i,
values,
jdbcMapping,
javaType,
jdbcJavaType,
jdbcType,
selectableIndex,
returnEmbeddable,
options
@ -818,7 +964,9 @@ public class JsonHelper {
start,
i,
values,
jdbcMapping,
javaType,
jdbcJavaType,
jdbcType,
selectableIndex,
returnEmbeddable,
options
@ -836,7 +984,9 @@ public class JsonHelper {
break;
default:
values[selectableIndex] = fromString(
jdbcMapping,
javaType,
jdbcJavaType,
jdbcType,
string,
start,
i,
@ -856,7 +1006,9 @@ public class JsonHelper {
int start,
int dotIndex,
Object[] values,
JdbcMapping jdbcMapping,
JavaType<?> javaType,
JavaType<?> jdbcJavaType,
JdbcType jdbcType,
int selectableIndex,
boolean returnEmbeddable,
WrapperOptions options) throws SQLException {
@ -870,7 +1022,9 @@ public class JsonHelper {
start,
i,
values,
jdbcMapping,
javaType,
jdbcJavaType,
jdbcType,
selectableIndex,
returnEmbeddable,
options
@ -888,7 +1042,9 @@ public class JsonHelper {
break;
default:
values[selectableIndex] = fromString(
jdbcMapping,
javaType,
jdbcJavaType,
jdbcType,
string,
start,
i,
@ -906,7 +1062,9 @@ public class JsonHelper {
int start,
int eIndex,
Object[] values,
JdbcMapping jdbcMapping,
JavaType<?> javaType,
JavaType<?> jdbcJavaType,
JdbcType jdbcType,
int selectableIndex,
boolean returnEmbeddable,
WrapperOptions options) throws SQLException {
@ -933,7 +1091,9 @@ public class JsonHelper {
break;
default:
values[selectableIndex] = fromString(
jdbcMapping,
javaType,
jdbcJavaType,
jdbcType,
string,
start,
i,
@ -1001,10 +1161,35 @@ public class JsonHelper {
boolean hasEscape,
boolean returnEmbeddable,
WrapperOptions options) throws SQLException {
return fromString(
jdbcMapping.getMappedJavaType(),
jdbcMapping.getJdbcJavaType(),
jdbcMapping.getJdbcType(),
string,
start,
end,
hasEscape,
returnEmbeddable,
options
);
}
private static Object fromString(
JavaType<?> javaType,
JavaType<?> jdbcJavaType,
JdbcType jdbcType,
String string,
int start,
int end,
boolean hasEscape,
boolean returnEmbeddable,
WrapperOptions options) throws SQLException {
if ( hasEscape ) {
final String unescaped = unescape( string, start, end );
return fromString(
jdbcMapping,
javaType,
jdbcJavaType,
jdbcType,
unescaped,
0,
unescaped.length(),
@ -1012,22 +1197,33 @@ public class JsonHelper {
options
);
}
return fromString( jdbcMapping, string, start, end, returnEmbeddable, options );
return fromString(
javaType,
jdbcJavaType,
jdbcType,
string,
start,
end,
returnEmbeddable,
options
);
}
private static Object fromString(
JdbcMapping jdbcMapping,
JavaType<?> javaType,
JavaType<?> jdbcJavaType,
JdbcType jdbcType,
String string,
int start,
int end,
boolean returnEmbeddable,
WrapperOptions options) throws SQLException {
switch ( jdbcMapping.getJdbcType().getDefaultSqlTypeCode() ) {
switch ( jdbcType.getDefaultSqlTypeCode() ) {
case SqlTypes.BINARY:
case SqlTypes.VARBINARY:
case SqlTypes.LONGVARBINARY:
case SqlTypes.LONG32VARBINARY:
return jdbcMapping.getJdbcJavaType().wrap(
return jdbcJavaType.wrap(
PrimitiveByteArrayJavaType.INSTANCE.fromEncodedString(
string,
start,
@ -1036,7 +1232,7 @@ public class JsonHelper {
options
);
case SqlTypes.DATE:
return jdbcMapping.getJdbcJavaType().wrap(
return jdbcJavaType.wrap(
JdbcDateJavaType.INSTANCE.fromEncodedString(
string,
start,
@ -1047,7 +1243,7 @@ public class JsonHelper {
case SqlTypes.TIME:
case SqlTypes.TIME_WITH_TIMEZONE:
case SqlTypes.TIME_UTC:
return jdbcMapping.getJdbcJavaType().wrap(
return jdbcJavaType.wrap(
JdbcTimeJavaType.INSTANCE.fromEncodedString(
string,
start,
@ -1056,7 +1252,7 @@ public class JsonHelper {
options
);
case SqlTypes.TIMESTAMP:
return jdbcMapping.getJdbcJavaType().wrap(
return jdbcJavaType.wrap(
JdbcTimestampJavaType.INSTANCE.fromEncodedString(
string,
start,
@ -1066,7 +1262,7 @@ public class JsonHelper {
);
case SqlTypes.TIMESTAMP_WITH_TIMEZONE:
case SqlTypes.TIMESTAMP_UTC:
return jdbcMapping.getJdbcJavaType().wrap(
return jdbcJavaType.wrap(
OffsetDateTimeJavaType.INSTANCE.fromEncodedString(
string,
start,
@ -1077,28 +1273,21 @@ public class JsonHelper {
case SqlTypes.TINYINT:
case SqlTypes.SMALLINT:
case SqlTypes.INTEGER:
if ( jdbcMapping.getValueConverter() == null ) {
Class<?> javaTypeClass = jdbcMapping.getJavaTypeDescriptor().getJavaTypeClass();
if ( javaTypeClass == Boolean.class ) {
// BooleanJavaType has this as an implicit conversion
return Integer.parseInt( string, start, end, 10 ) == 1;
}
if ( javaTypeClass.isEnum() ) {
return javaTypeClass.getEnumConstants()[Integer.parseInt( string, start, end, 10 )];
}
if ( jdbcJavaType.getJavaTypeClass() == Boolean.class ) {
return jdbcJavaType.wrap( Integer.parseInt( string, start, end, 10 ), options );
}
else if ( jdbcJavaType instanceof EnumJavaType<?> ) {
return jdbcJavaType.wrap( Integer.parseInt( string, start, end, 10 ), options );
}
case SqlTypes.CHAR:
case SqlTypes.NCHAR:
case SqlTypes.VARCHAR:
case SqlTypes.NVARCHAR:
if ( jdbcMapping.getValueConverter() == null
&& jdbcMapping.getJavaTypeDescriptor().getJavaTypeClass() == Boolean.class ) {
// BooleanJavaType has this as an implicit conversion
return end == start + 1 && string.charAt( start ) == 'Y';
if ( jdbcJavaType.getJavaTypeClass() == Boolean.class && end == start + 1 ) {
return jdbcJavaType.wrap( string.charAt( start ), options );
}
default:
if ( jdbcMapping.getJdbcType() instanceof AggregateJdbcType ) {
final AggregateJdbcType aggregateJdbcType = (AggregateJdbcType) jdbcMapping.getJdbcType();
if ( jdbcType instanceof AggregateJdbcType aggregateJdbcType ) {
final Object[] subValues = aggregateJdbcType.extractJdbcValues(
CharSequenceHelper.subSequence(
string,
@ -1119,7 +1308,7 @@ public class JsonHelper {
return subValues;
}
return jdbcMapping.getJdbcJavaType().fromEncodedString(
return jdbcJavaType.fromEncodedString(
string,
start,
end

View File

@ -30,7 +30,7 @@ import org.hibernate.tool.schema.extract.spi.SequenceInformationExtractor;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.StandardBasicTypes;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.JsonArrayJdbcType;
import org.hibernate.type.descriptor.jdbc.JsonArrayJdbcTypeConstructor;
import org.hibernate.type.descriptor.jdbc.JsonJdbcType;
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
import org.hibernate.type.descriptor.sql.internal.DdlTypeImpl;
@ -99,6 +99,11 @@ public class MariaDBDialect extends MySQLDialect {
commonFunctionFactory.jsonArrayAgg_mariadb();
commonFunctionFactory.jsonObjectAgg_mariadb();
commonFunctionFactory.jsonArrayAppend_mariadb();
if ( getVersion().isSameOrAfter( 10, 6 ) ) {
commonFunctionFactory.unnest_emulated();
}
commonFunctionFactory.inverseDistributionOrderedSetAggregates_windowEmulation();
functionContributions.getFunctionRegistry().patternDescriptorBuilder( "median", "median(?1) over ()" )
.setInvariantType( functionContributions.getTypeConfiguration().getBasicTypeRegistry().resolve( StandardBasicTypes.DOUBLE ) )
@ -152,7 +157,7 @@ public class MariaDBDialect extends MySQLDialect {
final JdbcTypeRegistry jdbcTypeRegistry = typeContributions.getTypeConfiguration().getJdbcTypeRegistry();
// Make sure we register the JSON type descriptor before calling super, because MariaDB does not need casting
jdbcTypeRegistry.addDescriptorIfAbsent( SqlTypes.JSON, JsonJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( SqlTypes.JSON_ARRAY, JsonArrayJdbcType.INSTANCE );
jdbcTypeRegistry.addTypeConstructorIfAbsent( JsonArrayJdbcTypeConstructor.INSTANCE );
super.contributeTypes( typeContributions, serviceRegistry );
if ( getVersion().isSameOrAfter( 10, 7 ) ) {

View File

@ -21,6 +21,7 @@ import org.hibernate.sql.ast.tree.expression.ColumnReference;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.expression.Literal;
import org.hibernate.sql.ast.tree.expression.Summarization;
import org.hibernate.sql.ast.tree.from.DerivedTableReference;
import org.hibernate.sql.ast.tree.from.NamedTableReference;
import org.hibernate.sql.ast.tree.from.QueryPartTableReference;
import org.hibernate.sql.ast.tree.insert.ConflictClause;
@ -280,6 +281,11 @@ public class MariaDBSqlAstTranslator<T extends JdbcOperation> extends AbstractSq
emulateQueryPartTableReferenceColumnAliasing( tableReference );
}
@Override
protected void renderDerivedTableReferenceIdentificationVariable(DerivedTableReference tableReference) {
renderTableReferenceIdentificationVariable( tableReference );
}
@Override
public void visitOffsetFetchClause(QueryPart queryPart) {
if ( !isRowNumberingCurrentQueryPart() ) {

View File

@ -5,16 +5,17 @@
package org.hibernate.dialect;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.JsonArrayJdbcType;
/**
* @author Christian Beikov
*/
public class MySQLCastingJsonArrayJdbcType extends JsonArrayJdbcType {
/**
* Singleton access
*/
public static final JsonArrayJdbcType INSTANCE = new MySQLCastingJsonArrayJdbcType();
public MySQLCastingJsonArrayJdbcType(JdbcType elementJdbcType) {
super( elementJdbcType );
}
@Override
public void appendWriteExpression(

View File

@ -0,0 +1,43 @@
/*
* SPDX-License-Identifier: LGPL-2.1-or-later
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.dialect;
import org.hibernate.tool.schema.extract.spi.ColumnTypeInformation;
import org.hibernate.type.BasicType;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.JdbcTypeConstructor;
import org.hibernate.type.spi.TypeConfiguration;
/**
* Factory for {@link MySQLCastingJsonArrayJdbcType}.
*/
public class MySQLCastingJsonArrayJdbcTypeConstructor implements JdbcTypeConstructor {
public static final MySQLCastingJsonArrayJdbcTypeConstructor INSTANCE = new MySQLCastingJsonArrayJdbcTypeConstructor();
@Override
public JdbcType resolveType(
TypeConfiguration typeConfiguration,
Dialect dialect,
BasicType<?> elementType,
ColumnTypeInformation columnTypeInformation) {
return resolveType( typeConfiguration, dialect, elementType.getJdbcType(), columnTypeInformation );
}
@Override
public JdbcType resolveType(
TypeConfiguration typeConfiguration,
Dialect dialect,
JdbcType elementType,
ColumnTypeInformation columnTypeInformation) {
return new MySQLCastingJsonArrayJdbcType( elementType );
}
@Override
public int getDefaultSqlTypeCode() {
return SqlTypes.JSON_ARRAY;
}
}

View File

@ -537,6 +537,11 @@ public class MySQLDialect extends Dialect {
return Types.BIT;
}
@Override
public int getPreferredSqlTypeCodeForArray() {
return SqlTypes.JSON_ARRAY;
}
// @Override
// public int getDefaultDecimalPrecision() {
// //this is the maximum, but I guess it's too high
@ -652,6 +657,10 @@ public class MySQLDialect extends Dialect {
functionFactory.jsonMergepatch_mysql();
functionFactory.jsonArrayAppend_mysql();
functionFactory.jsonArrayInsert_mysql();
if ( getMySQLVersion().isSameOrAfter( 8 ) ) {
functionFactory.unnest_emulated();
}
}
@Override
@ -661,7 +670,7 @@ public class MySQLDialect extends Dialect {
final JdbcTypeRegistry jdbcTypeRegistry = typeContributions.getTypeConfiguration().getJdbcTypeRegistry();
jdbcTypeRegistry.addDescriptorIfAbsent( SqlTypes.JSON, MySQLCastingJsonJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( SqlTypes.JSON_ARRAY, MySQLCastingJsonArrayJdbcType.INSTANCE );
jdbcTypeRegistry.addTypeConstructorIfAbsent( MySQLCastingJsonArrayJdbcTypeConstructor.INSTANCE );
// MySQL requires a custom binder for binding untyped nulls with the NULL type
typeContributions.contributeJdbcType( NullJdbcType.INSTANCE );

View File

@ -23,6 +23,8 @@ import org.hibernate.sql.ast.tree.expression.ColumnReference;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.expression.Literal;
import org.hibernate.sql.ast.tree.expression.Summarization;
import org.hibernate.sql.ast.tree.from.DerivedTableReference;
import org.hibernate.sql.ast.tree.from.FunctionTableReference;
import org.hibernate.sql.ast.tree.from.NamedTableReference;
import org.hibernate.sql.ast.tree.from.QueryPartTableReference;
import org.hibernate.sql.ast.tree.from.ValuesTableReference;
@ -315,6 +317,17 @@ public class MySQLSqlAstTranslator<T extends JdbcOperation> extends AbstractSqlA
emulateValuesTableReferenceColumnAliasing( tableReference );
}
@Override
protected void renderDerivedTableReference(DerivedTableReference tableReference) {
if ( tableReference instanceof FunctionTableReference && tableReference.isLateral() ) {
// No need for a lateral keyword for functions
tableReference.accept( this );
}
else {
super.renderDerivedTableReference( tableReference );
}
}
@Override
public void visitOffsetFetchClause(QueryPart queryPart) {
if ( !isRowNumberingCurrentQueryPart() ) {

View File

@ -83,13 +83,10 @@ import org.hibernate.type.JavaObjectType;
import org.hibernate.type.NullType;
import org.hibernate.type.StandardBasicTypes;
import org.hibernate.type.descriptor.java.PrimitiveByteArrayJavaType;
import org.hibernate.type.descriptor.jdbc.ArrayJdbcType;
import org.hibernate.type.descriptor.jdbc.BlobJdbcType;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.NullJdbcType;
import org.hibernate.type.descriptor.jdbc.ObjectJdbcType;
import org.hibernate.type.descriptor.jdbc.ObjectNullAsNullTypeJdbcType;
import org.hibernate.type.descriptor.jdbc.OracleJsonArrayBlobJdbcType;
import org.hibernate.type.descriptor.jdbc.OracleJsonBlobJdbcType;
import org.hibernate.type.descriptor.jdbc.SqlTypedJdbcType;
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
@ -134,7 +131,6 @@ import static org.hibernate.type.SqlTypes.FLOAT;
import static org.hibernate.type.SqlTypes.GEOMETRY;
import static org.hibernate.type.SqlTypes.INTEGER;
import static org.hibernate.type.SqlTypes.JSON;
import static org.hibernate.type.SqlTypes.JSON_ARRAY;
import static org.hibernate.type.SqlTypes.NUMERIC;
import static org.hibernate.type.SqlTypes.NVARCHAR;
import static org.hibernate.type.SqlTypes.REAL;
@ -425,6 +421,8 @@ public class OracleDialect extends Dialect {
functionFactory.xmlquery_oracle();
functionFactory.xmlexists();
functionFactory.xmlagg();
functionFactory.unnest_oracle();
}
@Override
@ -821,11 +819,9 @@ public class OracleDialect extends Dialect {
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( GEOMETRY, "MDSYS.SDO_GEOMETRY", this ) );
if ( getVersion().isSameOrAfter( 21 ) ) {
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( JSON, "json", this ) );
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( JSON_ARRAY, "json", this ) );
}
else {
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( JSON, "blob", this ) );
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( JSON_ARRAY, "blob", this ) );
}
ddlTypeRegistry.addDescriptor( new ArrayDdlTypeImpl( this, false ) );
@ -995,16 +991,16 @@ public class OracleDialect extends Dialect {
if ( getVersion().isSameOrAfter( 21 ) ) {
typeContributions.contributeJdbcType( OracleJsonJdbcType.INSTANCE );
typeContributions.contributeJdbcType( OracleJsonArrayJdbcType.INSTANCE );
typeContributions.contributeJdbcTypeConstructor( OracleJsonArrayJdbcTypeConstructor.NATIVE_INSTANCE );
}
else {
typeContributions.contributeJdbcType( OracleJsonBlobJdbcType.INSTANCE );
typeContributions.contributeJdbcType( OracleJsonArrayBlobJdbcType.INSTANCE );
typeContributions.contributeJdbcTypeConstructor( OracleJsonArrayJdbcTypeConstructor.BLOB_INSTANCE );
}
if ( OracleJdbcHelper.isUsable( serviceRegistry ) ) {
// Register a JdbcType to allow reading from native queries
typeContributions.contributeJdbcType( new ArrayJdbcType( ObjectJdbcType.INSTANCE ) );
// typeContributions.contributeJdbcType( new ArrayJdbcType( ObjectJdbcType.INSTANCE ) );
typeContributions.contributeJdbcTypeConstructor( getArrayJdbcTypeConstructor( serviceRegistry ) );
typeContributions.contributeJdbcTypeConstructor( getNestedTableJdbcTypeConstructor( serviceRegistry ) );
}

View File

@ -6,6 +6,7 @@ package org.hibernate.dialect;
import org.hibernate.type.descriptor.converter.spi.BasicValueConverter;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.OracleJsonArrayBlobJdbcType;
/**
@ -14,12 +15,9 @@ import org.hibernate.type.descriptor.jdbc.OracleJsonArrayBlobJdbcType;
* @author Christian Beikov
*/
public class OracleJsonArrayJdbcType extends OracleJsonArrayBlobJdbcType {
/**
* Singleton access
*/
public static final OracleJsonArrayJdbcType INSTANCE = new OracleJsonArrayJdbcType();
private OracleJsonArrayJdbcType() {
public OracleJsonArrayJdbcType(JdbcType elementJdbcType) {
super( elementJdbcType );
}
@Override

View File

@ -0,0 +1,51 @@
/*
* SPDX-License-Identifier: LGPL-2.1-or-later
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.dialect;
import org.hibernate.tool.schema.extract.spi.ColumnTypeInformation;
import org.hibernate.type.BasicType;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.JdbcTypeConstructor;
import org.hibernate.type.descriptor.jdbc.OracleJsonArrayBlobJdbcType;
import org.hibernate.type.spi.TypeConfiguration;
/**
* Factory for {@link OracleJsonArrayJdbcType} and {@link OracleJsonArrayBlobJdbcType}.
*/
public class OracleJsonArrayJdbcTypeConstructor implements JdbcTypeConstructor {
public static final OracleJsonArrayJdbcTypeConstructor NATIVE_INSTANCE = new OracleJsonArrayJdbcTypeConstructor( true );
public static final OracleJsonArrayJdbcTypeConstructor BLOB_INSTANCE = new OracleJsonArrayJdbcTypeConstructor( false );
private final boolean nativeJson;
public OracleJsonArrayJdbcTypeConstructor(boolean nativeJson) {
this.nativeJson = nativeJson;
}
@Override
public JdbcType resolveType(
TypeConfiguration typeConfiguration,
Dialect dialect,
BasicType<?> elementType,
ColumnTypeInformation columnTypeInformation) {
return resolveType( typeConfiguration, dialect, elementType.getJdbcType(), columnTypeInformation );
}
@Override
public JdbcType resolveType(
TypeConfiguration typeConfiguration,
Dialect dialect,
JdbcType elementType,
ColumnTypeInformation columnTypeInformation) {
return nativeJson ? new OracleJsonArrayJdbcType( elementType ) : new OracleJsonArrayBlobJdbcType( elementType );
}
@Override
public int getDefaultSqlTypeCode() {
return SqlTypes.JSON_ARRAY;
}
}

View File

@ -8,17 +8,22 @@ import java.util.ArrayList;
import java.util.List;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.metamodel.mapping.CollectionPart;
import org.hibernate.metamodel.mapping.EmbeddableValuedModelPart;
import org.hibernate.metamodel.mapping.EntityIdentifierMapping;
import org.hibernate.metamodel.mapping.EntityMappingType;
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
import org.hibernate.metamodel.mapping.ModelPart;
import org.hibernate.query.IllegalQueryOperationException;
import org.hibernate.query.derived.AnonymousTupleTableGroupProducer;
import org.hibernate.query.sqm.ComparisonOperator;
import org.hibernate.query.sqm.FetchClauseType;
import org.hibernate.query.sqm.FrameExclusion;
import org.hibernate.query.sqm.FrameKind;
import org.hibernate.sql.ast.Clause;
import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
import org.hibernate.sql.ast.spi.SqlSelection;
import org.hibernate.sql.ast.tree.SqlAstNode;
import org.hibernate.sql.ast.tree.Statement;
import org.hibernate.sql.ast.tree.cte.CteMaterialization;
import org.hibernate.sql.ast.tree.expression.BinaryArithmeticExpression;
@ -30,6 +35,7 @@ import org.hibernate.sql.ast.tree.expression.Over;
import org.hibernate.sql.ast.tree.expression.SqlTuple;
import org.hibernate.sql.ast.tree.expression.SqlTupleContainer;
import org.hibernate.sql.ast.tree.expression.Summarization;
import org.hibernate.sql.ast.tree.from.DerivedTableReference;
import org.hibernate.sql.ast.tree.from.FromClause;
import org.hibernate.sql.ast.tree.from.FunctionTableReference;
import org.hibernate.sql.ast.tree.from.NamedTableReference;
@ -254,9 +260,42 @@ public class OracleSqlAstTranslator<T extends JdbcOperation> extends SqlAstTrans
@Override
public void visitFunctionTableReference(FunctionTableReference tableReference) {
append( "table(" );
tableReference.getFunctionExpression().accept( this );
append( CLOSE_PARENTHESIS );
if ( !tableReference.rendersIdentifierVariable() ) {
renderDerivedTableReferenceIdentificationVariable( tableReference );
}
}
@Override
public void renderNamedSetReturningFunction(String functionName, List<? extends SqlAstNode> sqlAstArguments, AnonymousTupleTableGroupProducer tupleType, String tableIdentifierVariable, SqlAstNodeRenderingMode argumentRenderingMode) {
final ModelPart ordinalitySubPart = tupleType.findSubPart( CollectionPart.Nature.INDEX.getName(), null );
if ( ordinalitySubPart != null ) {
appendSql( "lateral (select t.*, rownum " );
appendSql( ordinalitySubPart.asBasicValuedModelPart().getSelectionExpression() );
appendSql( " from table(" );
renderSimpleNamedFunction( functionName, sqlAstArguments, argumentRenderingMode );
append( ") t)" );
}
else {
appendSql( "table(" );
super.renderNamedSetReturningFunction( functionName, sqlAstArguments, tupleType, tableIdentifierVariable, argumentRenderingMode );
append( ')' );
}
}
@Override
protected void renderDerivedTableReference(DerivedTableReference tableReference) {
if ( tableReference instanceof FunctionTableReference && tableReference.isLateral() ) {
// No need for a lateral keyword for functions
tableReference.accept( this );
}
else {
super.renderDerivedTableReference( tableReference );
}
}
@Override
protected void renderDerivedTableReferenceIdentificationVariable(DerivedTableReference tableReference) {
renderTableReferenceIdentificationVariable( tableReference );
}

View File

@ -12,6 +12,7 @@ import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.boot.registry.classloading.spi.ClassLoadingException;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.JdbcTypeConstructor;
/**
* The following class provides some convenience methods for accessing JdbcType instance,
@ -52,12 +53,12 @@ public final class PgJdbcHelper {
return createJdbcType( serviceRegistry, "org.hibernate.dialect.PostgreSQLJsonPGObjectJsonbType" );
}
public static JdbcType getJsonArrayJdbcType(ServiceRegistry serviceRegistry) {
return createJdbcType( serviceRegistry, "org.hibernate.dialect.PostgreSQLJsonArrayPGObjectJsonType" );
public static JdbcTypeConstructor getJsonArrayJdbcType(ServiceRegistry serviceRegistry) {
return createJdbcTypeConstructor( serviceRegistry, "org.hibernate.dialect.PostgreSQLJsonArrayPGObjectJsonJdbcTypeConstructor" );
}
public static JdbcType getJsonbArrayJdbcType(ServiceRegistry serviceRegistry) {
return createJdbcType( serviceRegistry, "org.hibernate.dialect.PostgreSQLJsonArrayPGObjectJsonbType" );
public static JdbcTypeConstructor getJsonbArrayJdbcType(ServiceRegistry serviceRegistry) {
return createJdbcTypeConstructor( serviceRegistry, "org.hibernate.dialect.PostgreSQLJsonArrayPGObjectJsonbJdbcTypeConstructor" );
}
public static JdbcType createJdbcType(ServiceRegistry serviceRegistry, String className) {
@ -74,4 +75,19 @@ public final class PgJdbcHelper {
throw new HibernateError( "Could not construct JdbcType", e );
}
}
public static JdbcTypeConstructor createJdbcTypeConstructor(ServiceRegistry serviceRegistry, String className) {
final ClassLoaderService classLoaderService = serviceRegistry.requireService( ClassLoaderService.class );
try {
final Class<?> clazz = classLoaderService.classForName( className );
final Constructor<?> constructor = clazz.getConstructor();
return (JdbcTypeConstructor) constructor.newInstance();
}
catch (NoSuchMethodException e) {
throw new HibernateError( "Class does not have an empty constructor", e );
}
catch (InstantiationException | IllegalAccessException | InvocationTargetException e) {
throw new HibernateError( "Could not construct JdbcTypeConstructor", e );
}
}
}

View File

@ -73,7 +73,9 @@ public class PostgreSQLArrayJdbcType extends ArrayJdbcType {
);
objects = new Object[domainObjects.length];
for ( int i = 0; i < domainObjects.length; i++ ) {
objects[i] = aggregateJdbcType.createJdbcValue( domainObjects[i], options );
if ( domainObjects[i] != null ) {
objects[i] = aggregateJdbcType.createJdbcValue( domainObjects[i], options );
}
}
}
else {

View File

@ -5,6 +5,7 @@
package org.hibernate.dialect;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.JsonArrayJdbcType;
/**
@ -12,12 +13,10 @@ import org.hibernate.type.descriptor.jdbc.JsonArrayJdbcType;
*/
public class PostgreSQLCastingJsonArrayJdbcType extends JsonArrayJdbcType {
public static final PostgreSQLCastingJsonArrayJdbcType JSON_INSTANCE = new PostgreSQLCastingJsonArrayJdbcType( false );
public static final PostgreSQLCastingJsonArrayJdbcType JSONB_INSTANCE = new PostgreSQLCastingJsonArrayJdbcType( true );
private final boolean jsonb;
public PostgreSQLCastingJsonArrayJdbcType(boolean jsonb) {
public PostgreSQLCastingJsonArrayJdbcType(JdbcType elementJdbcType, boolean jsonb) {
super( elementJdbcType );
this.jsonb = jsonb;
}

View File

@ -0,0 +1,50 @@
/*
* SPDX-License-Identifier: LGPL-2.1-or-later
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.dialect;
import org.hibernate.tool.schema.extract.spi.ColumnTypeInformation;
import org.hibernate.type.BasicType;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.JdbcTypeConstructor;
import org.hibernate.type.spi.TypeConfiguration;
/**
* Factory for {@link PostgreSQLCastingJsonArrayJdbcType}.
*/
public class PostgreSQLCastingJsonArrayJdbcTypeConstructor implements JdbcTypeConstructor {
public static final PostgreSQLCastingJsonArrayJdbcTypeConstructor JSONB_INSTANCE = new PostgreSQLCastingJsonArrayJdbcTypeConstructor( true );
public static final PostgreSQLCastingJsonArrayJdbcTypeConstructor JSON_INSTANCE = new PostgreSQLCastingJsonArrayJdbcTypeConstructor( false );
private final boolean jsonb;
public PostgreSQLCastingJsonArrayJdbcTypeConstructor(boolean jsonb) {
this.jsonb = jsonb;
}
@Override
public JdbcType resolveType(
TypeConfiguration typeConfiguration,
Dialect dialect,
BasicType<?> elementType,
ColumnTypeInformation columnTypeInformation) {
return resolveType( typeConfiguration, dialect, elementType.getJdbcType(), columnTypeInformation );
}
@Override
public JdbcType resolveType(
TypeConfiguration typeConfiguration,
Dialect dialect,
JdbcType elementType,
ColumnTypeInformation columnTypeInformation) {
return new PostgreSQLCastingJsonArrayJdbcType( elementType, jsonb );
}
@Override
public int getDefaultSqlTypeCode() {
return SqlTypes.JSON_ARRAY;
}
}

View File

@ -17,6 +17,7 @@ import java.util.List;
import java.util.Map;
import java.util.TimeZone;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.hibernate.Length;
import org.hibernate.LockMode;
import org.hibernate.LockOptions;
@ -111,7 +112,6 @@ import static org.hibernate.type.SqlTypes.GEOGRAPHY;
import static org.hibernate.type.SqlTypes.GEOMETRY;
import static org.hibernate.type.SqlTypes.INET;
import static org.hibernate.type.SqlTypes.JSON;
import static org.hibernate.type.SqlTypes.JSON_ARRAY;
import static org.hibernate.type.SqlTypes.LONG32NVARCHAR;
import static org.hibernate.type.SqlTypes.LONG32VARBINARY;
import static org.hibernate.type.SqlTypes.LONG32VARCHAR;
@ -261,7 +261,6 @@ public class PostgreSQLDialect extends Dialect {
// Prefer jsonb if possible
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( JSON, "jsonb", this ) );
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( JSON_ARRAY, "jsonb", this ) );
ddlTypeRegistry.addDescriptor( new NamedNativeEnumDdlTypeImpl( this ) );
ddlTypeRegistry.addDescriptor( new NamedNativeOrdinalEnumDdlTypeImpl( this ) );
@ -665,6 +664,18 @@ public class PostgreSQLDialect extends Dialect {
);
functionContributions.getFunctionRegistry().registerAlternateKey( "truncate", "trunc" );
functionFactory.dateTrunc();
if ( getVersion().isSameOrAfter( 17 ) ) {
functionFactory.unnest( null, "ordinality" );
}
else {
functionFactory.unnest_postgresql();
}
}
@Override
public @Nullable String getDefaultOrdinalityColumnName() {
return "ordinality";
}
/**
@ -1415,14 +1426,14 @@ public class PostgreSQLDialect extends Dialect {
jdbcTypeRegistry.addDescriptorIfAbsent( PgJdbcHelper.getIntervalJdbcType( serviceRegistry ) );
jdbcTypeRegistry.addDescriptorIfAbsent( PgJdbcHelper.getStructJdbcType( serviceRegistry ) );
jdbcTypeRegistry.addDescriptorIfAbsent( PgJdbcHelper.getJsonbJdbcType( serviceRegistry ) );
jdbcTypeRegistry.addDescriptorIfAbsent( PgJdbcHelper.getJsonbArrayJdbcType( serviceRegistry ) );
jdbcTypeRegistry.addTypeConstructorIfAbsent( PgJdbcHelper.getJsonbArrayJdbcType( serviceRegistry ) );
}
else {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingInetJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingIntervalSecondJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLStructCastingJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonJdbcType.JSONB_INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonArrayJdbcType.JSONB_INSTANCE );
jdbcTypeRegistry.addTypeConstructorIfAbsent( PostgreSQLCastingJsonArrayJdbcTypeConstructor.JSONB_INSTANCE );
}
}
else {
@ -1430,7 +1441,7 @@ public class PostgreSQLDialect extends Dialect {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingIntervalSecondJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLStructCastingJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonJdbcType.JSONB_INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonArrayJdbcType.JSONB_INSTANCE );
jdbcTypeRegistry.addTypeConstructorIfAbsent( PostgreSQLCastingJsonArrayJdbcTypeConstructor.JSONB_INSTANCE );
}
// PostgreSQL requires a custom binder for binding untyped nulls as VARBINARY
@ -1450,6 +1461,7 @@ public class PostgreSQLDialect extends Dialect {
jdbcTypeRegistry.addDescriptor( PostgreSQLOrdinalEnumJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptor( PostgreSQLUUIDJdbcType.INSTANCE );
// Replace the standard array constructor
jdbcTypeRegistry.addTypeConstructor( PostgreSQLArrayJdbcTypeConstructor.INSTANCE );
}

View File

@ -0,0 +1,41 @@
/*
* SPDX-License-Identifier: LGPL-2.1-or-later
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.dialect;
import org.hibernate.tool.schema.extract.spi.ColumnTypeInformation;
import org.hibernate.type.BasicType;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.JdbcTypeConstructor;
import org.hibernate.type.spi.TypeConfiguration;
/**
* Factory for {@link PostgreSQLCastingJsonArrayJdbcType}.
*/
public class PostgreSQLJsonArrayPGObjectJsonJdbcTypeConstructor implements JdbcTypeConstructor {
@Override
public JdbcType resolveType(
TypeConfiguration typeConfiguration,
Dialect dialect,
BasicType<?> elementType,
ColumnTypeInformation columnTypeInformation) {
return resolveType( typeConfiguration, dialect, elementType.getJdbcType(), columnTypeInformation );
}
@Override
public JdbcType resolveType(
TypeConfiguration typeConfiguration,
Dialect dialect,
JdbcType elementType,
ColumnTypeInformation columnTypeInformation) {
return new PostgreSQLJsonArrayPGObjectType( elementType, false );
}
@Override
public int getDefaultSqlTypeCode() {
return SqlTypes.JSON_ARRAY;
}
}

View File

@ -1,14 +0,0 @@
/*
* SPDX-License-Identifier: LGPL-2.1-or-later
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.dialect;
/**
* @author Christian Beikov
*/
public class PostgreSQLJsonArrayPGObjectJsonType extends AbstractPostgreSQLJsonArrayPGObjectType {
public PostgreSQLJsonArrayPGObjectJsonType() {
super( false );
}
}

View File

@ -0,0 +1,41 @@
/*
* SPDX-License-Identifier: LGPL-2.1-or-later
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.dialect;
import org.hibernate.tool.schema.extract.spi.ColumnTypeInformation;
import org.hibernate.type.BasicType;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.JdbcTypeConstructor;
import org.hibernate.type.spi.TypeConfiguration;
/**
* Factory for {@link PostgreSQLCastingJsonArrayJdbcType}.
*/
public class PostgreSQLJsonArrayPGObjectJsonbJdbcTypeConstructor implements JdbcTypeConstructor {
@Override
public JdbcType resolveType(
TypeConfiguration typeConfiguration,
Dialect dialect,
BasicType<?> elementType,
ColumnTypeInformation columnTypeInformation) {
return resolveType( typeConfiguration, dialect, elementType.getJdbcType(), columnTypeInformation );
}
@Override
public JdbcType resolveType(
TypeConfiguration typeConfiguration,
Dialect dialect,
JdbcType elementType,
ColumnTypeInformation columnTypeInformation) {
return new PostgreSQLJsonArrayPGObjectType( elementType, true );
}
@Override
public int getDefaultSqlTypeCode() {
return SqlTypes.JSON_ARRAY;
}
}

View File

@ -1,14 +0,0 @@
/*
* SPDX-License-Identifier: LGPL-2.1-or-later
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.dialect;
/**
* @author Christian Beikov
*/
public class PostgreSQLJsonArrayPGObjectJsonbType extends AbstractPostgreSQLJsonArrayPGObjectType {
public PostgreSQLJsonArrayPGObjectJsonbType() {
super( true );
}
}

View File

@ -16,6 +16,7 @@ import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.jdbc.BasicBinder;
import org.hibernate.type.descriptor.jdbc.BasicExtractor;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.JsonArrayJdbcType;
import org.postgresql.util.PGobject;
@ -23,10 +24,12 @@ import org.postgresql.util.PGobject;
/**
* @author Christian Beikov
*/
public abstract class AbstractPostgreSQLJsonArrayPGObjectType extends JsonArrayJdbcType {
public class PostgreSQLJsonArrayPGObjectType extends JsonArrayJdbcType {
private final boolean jsonb;
protected AbstractPostgreSQLJsonArrayPGObjectType(boolean jsonb) {
public PostgreSQLJsonArrayPGObjectType(JdbcType elementJdbcType, boolean jsonb) {
super( elementJdbcType );
this.jsonb = jsonb;
}
@ -41,7 +44,7 @@ public abstract class AbstractPostgreSQLJsonArrayPGObjectType extends JsonArrayJ
@Override
protected void doBind(PreparedStatement st, X value, int index, WrapperOptions options)
throws SQLException {
final String stringValue = ( (AbstractPostgreSQLJsonArrayPGObjectType) getJdbcType() ).toString(
final String stringValue = ( (PostgreSQLJsonArrayPGObjectType) getJdbcType() ).toString(
value,
getJavaType(),
options
@ -55,7 +58,7 @@ public abstract class AbstractPostgreSQLJsonArrayPGObjectType extends JsonArrayJ
@Override
protected void doBind(CallableStatement st, X value, String name, WrapperOptions options)
throws SQLException {
final String stringValue = ( (AbstractPostgreSQLJsonArrayPGObjectType) getJdbcType() ).toString(
final String stringValue = ( (PostgreSQLJsonArrayPGObjectType) getJdbcType() ).toString(
value,
getJavaType(),
options
@ -91,7 +94,7 @@ public abstract class AbstractPostgreSQLJsonArrayPGObjectType extends JsonArrayJ
if ( object == null ) {
return null;
}
return ( (AbstractPostgreSQLJsonArrayPGObjectType) getJdbcType() ).fromString(
return ( (PostgreSQLJsonArrayPGObjectType) getJdbcType() ).fromString(
object.toString(),
getJavaType(),
options

View File

@ -0,0 +1,29 @@
/*
* SPDX-License-Identifier: LGPL-2.1-or-later
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.dialect;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.XmlArrayJdbcType;
/**
* @author Christian Beikov
*/
public class SQLServerCastingXmlArrayJdbcType extends XmlArrayJdbcType {
public SQLServerCastingXmlArrayJdbcType(JdbcType elementJdbcType) {
super( elementJdbcType );
}
@Override
public void appendWriteExpression(
String writeExpression,
SqlAppender appender,
Dialect dialect) {
appender.append( "cast(" );
appender.append( writeExpression );
appender.append( " as xml)" );
}
}

View File

@ -0,0 +1,43 @@
/*
* SPDX-License-Identifier: LGPL-2.1-or-later
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.dialect;
import org.hibernate.tool.schema.extract.spi.ColumnTypeInformation;
import org.hibernate.type.BasicType;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.JdbcTypeConstructor;
import org.hibernate.type.spi.TypeConfiguration;
/**
* Factory for {@link SQLServerCastingXmlArrayJdbcType}.
*/
public class SQLServerCastingXmlArrayJdbcTypeConstructor implements JdbcTypeConstructor {
public static final SQLServerCastingXmlArrayJdbcTypeConstructor INSTANCE = new SQLServerCastingXmlArrayJdbcTypeConstructor();
@Override
public JdbcType resolveType(
TypeConfiguration typeConfiguration,
Dialect dialect,
BasicType<?> elementType,
ColumnTypeInformation columnTypeInformation) {
return resolveType( typeConfiguration, dialect, elementType.getJdbcType(), columnTypeInformation );
}
@Override
public JdbcType resolveType(
TypeConfiguration typeConfiguration,
Dialect dialect,
JdbcType elementType,
ColumnTypeInformation columnTypeInformation) {
return new SQLServerCastingXmlArrayJdbcType( elementType );
}
@Override
public int getDefaultSqlTypeCode() {
return SqlTypes.XML_ARRAY;
}
}

View File

@ -0,0 +1,43 @@
/*
* SPDX-License-Identifier: LGPL-2.1-or-later
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.dialect;
import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.metamodel.spi.RuntimeModelCreationContext;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
import org.hibernate.type.descriptor.jdbc.XmlJdbcType;
/**
* @author Christian Beikov
*/
public class SQLServerCastingXmlJdbcType extends XmlJdbcType {
/**
* Singleton access
*/
public static final XmlJdbcType INSTANCE = new SQLServerCastingXmlJdbcType( null );
public SQLServerCastingXmlJdbcType(EmbeddableMappingType embeddableMappingType) {
super( embeddableMappingType );
}
@Override
public AggregateJdbcType resolveAggregateJdbcType(
EmbeddableMappingType mappingType,
String sqlType,
RuntimeModelCreationContext creationContext) {
return new SQLServerCastingXmlJdbcType( mappingType );
}
@Override
public void appendWriteExpression(
String writeExpression,
SqlAppender appender,
Dialect dialect) {
appender.append( "cast(" );
appender.append( writeExpression );
appender.append( " as xml)" );
}
}

View File

@ -82,7 +82,6 @@ import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.TimestampUtcAsJdbcTimestampJdbcType;
import org.hibernate.type.descriptor.jdbc.TinyIntAsSmallIntJdbcType;
import org.hibernate.type.descriptor.jdbc.UUIDJdbcType;
import org.hibernate.type.descriptor.jdbc.XmlJdbcType;
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
import org.hibernate.type.descriptor.sql.internal.DdlTypeImpl;
import org.hibernate.type.descriptor.sql.spi.DdlTypeRegistry;
@ -116,6 +115,7 @@ import static org.hibernate.type.SqlTypes.TIME_WITH_TIMEZONE;
import static org.hibernate.type.SqlTypes.UUID;
import static org.hibernate.type.SqlTypes.VARBINARY;
import static org.hibernate.type.SqlTypes.VARCHAR;
import static org.hibernate.type.SqlTypes.XML_ARRAY;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsDate;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsTime;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsTimestampWithMicros;
@ -267,6 +267,11 @@ public class SQLServerDialect extends AbstractTransactSQLDialect {
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( UUID, "uniqueidentifier", this ) );
}
@Override
public int getPreferredSqlTypeCodeForArray() {
return XML_ARRAY;
}
@Override
public JdbcType resolveSqlTypeDescriptor(
String columnTypeName,
@ -329,8 +334,9 @@ public class SQLServerDialect extends AbstractTransactSQLDialect {
Types.TINYINT,
TinyIntAsSmallIntJdbcType.INSTANCE
);
typeContributions.contributeJdbcType( XmlJdbcType.INSTANCE );
typeContributions.contributeJdbcType( SQLServerCastingXmlJdbcType.INSTANCE );
typeContributions.contributeJdbcType( UUIDJdbcType.INSTANCE );
typeContributions.contributeJdbcTypeConstructor( SQLServerCastingXmlArrayJdbcTypeConstructor.INSTANCE );
}
@Override
@ -439,6 +445,9 @@ public class SQLServerDialect extends AbstractTransactSQLDialect {
functionFactory.xmlquery_sqlserver();
functionFactory.xmlexists_sqlserver();
functionFactory.xmlagg_sqlserver();
functionFactory.unnest_sqlserver();
if ( getVersion().isSameOrAfter( 14 ) ) {
functionFactory.listagg_stringAggWithinGroup( "varchar(max)" );
functionFactory.jsonArrayAgg_sqlserver( getVersion().isSameOrAfter( 16 ) );

View File

@ -10,13 +10,18 @@ import org.hibernate.LockMode;
import org.hibernate.LockOptions;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.internal.util.collections.Stack;
import org.hibernate.metamodel.mapping.CollectionPart;
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
import org.hibernate.metamodel.mapping.ModelPart;
import org.hibernate.query.IllegalQueryOperationException;
import org.hibernate.query.derived.AnonymousTupleTableGroupProducer;
import org.hibernate.query.sqm.ComparisonOperator;
import org.hibernate.query.sqm.FetchClauseType;
import org.hibernate.sql.ast.Clause;
import org.hibernate.sql.ast.SqlAstJoinType;
import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
import org.hibernate.sql.ast.spi.SqlSelection;
import org.hibernate.sql.ast.tree.SqlAstNode;
import org.hibernate.sql.ast.tree.Statement;
import org.hibernate.sql.ast.tree.delete.DeleteStatement;
import org.hibernate.sql.ast.tree.expression.BinaryArithmeticExpression;
@ -24,10 +29,9 @@ import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.expression.Literal;
import org.hibernate.sql.ast.tree.expression.SqlTuple;
import org.hibernate.sql.ast.tree.expression.Summarization;
import org.hibernate.sql.ast.tree.from.DerivedTableReference;
import org.hibernate.sql.ast.tree.from.NamedTableReference;
import org.hibernate.sql.ast.tree.from.TableGroup;
import org.hibernate.sql.ast.tree.from.TableGroupJoin;
import org.hibernate.sql.ast.tree.from.TableReference;
import org.hibernate.sql.ast.tree.from.UnionTableReference;
import org.hibernate.sql.ast.tree.insert.ConflictClause;
import org.hibernate.sql.ast.tree.insert.InsertSelectStatement;
@ -180,17 +184,24 @@ public class SQLServerSqlAstTranslator<T extends JdbcOperation> extends SqlAstTr
}
}
protected boolean renderPrimaryTableReference(TableGroup tableGroup, LockMode lockMode) {
if ( shouldInlineCte( tableGroup ) ) {
inlineCteTableGroup( tableGroup, lockMode );
return false;
}
final TableReference tableReference = tableGroup.getPrimaryTableReference();
if ( tableReference instanceof NamedTableReference ) {
return renderNamedTableReference( (NamedTableReference) tableReference, lockMode );
}
@Override
protected void renderDerivedTableReference(DerivedTableReference tableReference) {
tableReference.accept( this );
return false;
}
@Override
public void renderNamedSetReturningFunction(String functionName, List<? extends SqlAstNode> sqlAstArguments, AnonymousTupleTableGroupProducer tupleType, String tableIdentifierVariable, SqlAstNodeRenderingMode argumentRenderingMode) {
final ModelPart ordinalitySubPart = tupleType.findSubPart( CollectionPart.Nature.INDEX.getName(), null );
if ( ordinalitySubPart != null ) {
appendSql( "(select t.*, row_number() over(order by (select 1)) " );
appendSql( ordinalitySubPart.asBasicValuedModelPart().getSelectionExpression() );
appendSql( " from " );
renderSimpleNamedFunction( functionName, sqlAstArguments, argumentRenderingMode );
append( " t)" );
}
else {
super.renderNamedSetReturningFunction( functionName, sqlAstArguments, tupleType, tableIdentifierVariable, argumentRenderingMode );
}
}
@Override

View File

@ -6,7 +6,6 @@ package org.hibernate.dialect;
import java.util.List;
import org.hibernate.LockMode;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.query.sqm.ComparisonOperator;
import org.hibernate.sql.ast.Clause;
@ -18,9 +17,6 @@ import org.hibernate.sql.ast.tree.expression.Literal;
import org.hibernate.sql.ast.tree.expression.SqlTuple;
import org.hibernate.sql.ast.tree.expression.Summarization;
import org.hibernate.sql.ast.tree.from.DerivedTableReference;
import org.hibernate.sql.ast.tree.from.NamedTableReference;
import org.hibernate.sql.ast.tree.from.TableGroup;
import org.hibernate.sql.ast.tree.from.TableReference;
import org.hibernate.sql.ast.tree.select.QueryPart;
import org.hibernate.sql.ast.tree.select.QuerySpec;
import org.hibernate.sql.ast.tree.select.SelectClause;
@ -107,17 +103,8 @@ public class SpannerSqlAstTranslator<T extends JdbcOperation> extends AbstractSq
}
@Override
protected boolean renderPrimaryTableReference(TableGroup tableGroup, LockMode lockMode) {
if ( shouldInlineCte( tableGroup ) ) {
inlineCteTableGroup( tableGroup, lockMode );
return false;
}
final TableReference tableReference = tableGroup.getPrimaryTableReference();
if ( tableReference instanceof NamedTableReference ) {
return renderNamedTableReference( (NamedTableReference) tableReference, lockMode );
}
final DerivedTableReference derivedTableReference = (DerivedTableReference) tableReference;
final boolean correlated = derivedTableReference.isLateral();
protected void renderDerivedTableReference(DerivedTableReference tableReference) {
final boolean correlated = tableReference.isLateral();
final boolean oldCorrelated = this.correlated;
if ( correlated ) {
this.correlated = true;
@ -128,7 +115,6 @@ public class SpannerSqlAstTranslator<T extends JdbcOperation> extends AbstractSq
this.correlated = oldCorrelated;
appendSql( CLOSE_PARENTHESIS );
}
return false;
}
@Override

View File

@ -13,7 +13,9 @@ import org.hibernate.Length;
import org.hibernate.LockMode;
import org.hibernate.LockOptions;
import org.hibernate.QueryTimeoutException;
import org.hibernate.boot.model.FunctionContributions;
import org.hibernate.boot.model.TypeContributions;
import org.hibernate.dialect.function.CommonFunctionFactory;
import org.hibernate.dialect.pagination.LimitHandler;
import org.hibernate.dialect.pagination.TopLimitHandler;
import org.hibernate.engine.jdbc.Size;
@ -52,6 +54,7 @@ import static org.hibernate.type.SqlTypes.NCLOB;
import static org.hibernate.type.SqlTypes.TIME;
import static org.hibernate.type.SqlTypes.TIMESTAMP;
import static org.hibernate.type.SqlTypes.TIMESTAMP_WITH_TIMEZONE;
import static org.hibernate.type.SqlTypes.XML_ARRAY;
/**
* A {@linkplain Dialect SQL dialect} for Sybase Adaptive Server Enterprise 16 and above.
@ -157,6 +160,11 @@ public class SybaseASEDialect extends SybaseDialect {
}
}
@Override
public int getPreferredSqlTypeCodeForArray() {
return XML_ARRAY;
}
@Override
public int getMaxVarcharLength() {
// the maximum length of a VARCHAR or VARBINARY
@ -168,6 +176,15 @@ public class SybaseASEDialect extends SybaseDialect {
return 16_384;
}
@Override
public void initializeFunctionRegistry(FunctionContributions functionContributions) {
super.initializeFunctionRegistry( functionContributions );
CommonFunctionFactory functionFactory = new CommonFunctionFactory( functionContributions);
functionFactory.unnest_sybasease();
}
@Override
public long getDefaultLobLength() {
return Length.LONG32;

View File

@ -11,6 +11,7 @@ import org.hibernate.LockMode;
import org.hibernate.LockOptions;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.internal.util.collections.Stack;
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
import org.hibernate.query.IllegalQueryOperationException;
import org.hibernate.query.sqm.ComparisonOperator;
import org.hibernate.sql.ast.Clause;
@ -46,6 +47,7 @@ import org.hibernate.sql.ast.tree.select.QuerySpec;
import org.hibernate.sql.ast.tree.select.SelectClause;
import org.hibernate.sql.ast.tree.update.UpdateStatement;
import org.hibernate.sql.exec.spi.JdbcOperation;
import org.hibernate.type.SqlTypes;
/**
* A SQL AST translator for Sybase ASE.
@ -320,7 +322,7 @@ public class SybaseASESqlAstTranslator<T extends JdbcOperation> extends Abstract
append( '(' );
visitValuesListEmulateSelectUnion( tableReference.getValuesList() );
append( ')' );
renderDerivedTableReference( tableReference );
renderDerivedTableReferenceIdentificationVariable( tableReference );
}
@Override
@ -355,8 +357,56 @@ public class SybaseASESqlAstTranslator<T extends JdbcOperation> extends Abstract
@Override
protected void renderComparison(Expression lhs, ComparisonOperator operator, Expression rhs) {
// In Sybase ASE, XMLTYPE is not "comparable", so we have to cast the two parts to varchar for this purpose
final boolean isLob = isLob( lhs.getExpressionType() );
if ( isLob ) {
switch ( operator ) {
case EQUAL:
lhs.accept( this );
appendSql( " like " );
rhs.accept( this );
return;
case NOT_EQUAL:
lhs.accept( this );
appendSql( " not like " );
rhs.accept( this );
return;
default:
// Fall through
break;
}
}
// I think intersect is only supported in 16.0 SP3
if ( getDialect().isAnsiNullOn() ) {
if ( isLob ) {
switch ( operator ) {
case DISTINCT_FROM:
appendSql( "case when " );
lhs.accept( this );
appendSql( " like " );
rhs.accept( this );
appendSql( " or " );
lhs.accept( this );
appendSql( " is null and " );
rhs.accept( this );
appendSql( " is null then 0 else 1 end=1" );
return;
case NOT_DISTINCT_FROM:
appendSql( "case when " );
lhs.accept( this );
appendSql( " like " );
rhs.accept( this );
appendSql( " or " );
lhs.accept( this );
appendSql( " is null and " );
rhs.accept( this );
appendSql( " is null then 0 else 1 end=0" );
return;
default:
// Fall through
break;
}
}
if ( supportsDistinctFromPredicate() ) {
renderComparisonEmulateIntersect( lhs, operator, rhs );
}
@ -377,10 +427,20 @@ public class SybaseASESqlAstTranslator<T extends JdbcOperation> extends Abstract
lhs.accept( this );
switch ( operator ) {
case DISTINCT_FROM:
appendSql( "<>" );
if ( isLob ) {
appendSql( " not like " );
}
else {
appendSql( "<>" );
}
break;
case NOT_DISTINCT_FROM:
appendSql( '=' );
if ( isLob ) {
appendSql( " like " );
}
else {
appendSql( '=' );
}
break;
case LESS_THAN:
case GREATER_THAN:
@ -416,6 +476,21 @@ public class SybaseASESqlAstTranslator<T extends JdbcOperation> extends Abstract
}
}
public static boolean isLob(JdbcMappingContainer expressionType) {
return expressionType != null && expressionType.getJdbcTypeCount() == 1 && switch ( expressionType.getSingleJdbcMapping().getJdbcType().getDdlTypeCode() ) {
case SqlTypes.LONG32NVARCHAR,
SqlTypes.LONG32VARCHAR,
SqlTypes.LONGNVARCHAR,
SqlTypes.LONGVARCHAR,
SqlTypes.LONG32VARBINARY,
SqlTypes.LONGVARBINARY,
SqlTypes.CLOB,
SqlTypes.NCLOB,
SqlTypes.BLOB -> true;
default -> false;
};
}
@Override
protected boolean supportsIntersect() {
// At least the version that

View File

@ -204,7 +204,7 @@ public class SybaseSqlAstTranslator<T extends JdbcOperation> extends AbstractSql
append( '(' );
visitValuesListEmulateSelectUnion( tableReference.getValuesList() );
append( ')' );
renderDerivedTableReference( tableReference );
renderDerivedTableReferenceIdentificationVariable( tableReference );
}
@Override

View File

@ -22,6 +22,7 @@ import org.hibernate.sql.ast.tree.expression.ColumnReference;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.expression.Literal;
import org.hibernate.sql.ast.tree.expression.Summarization;
import org.hibernate.sql.ast.tree.from.DerivedTableReference;
import org.hibernate.sql.ast.tree.from.NamedTableReference;
import org.hibernate.sql.ast.tree.from.QueryPartTableReference;
import org.hibernate.sql.ast.tree.from.ValuesTableReference;
@ -249,6 +250,11 @@ public class TiDBSqlAstTranslator<T extends JdbcOperation> extends AbstractSqlAs
emulateQueryPartTableReferenceColumnAliasing( tableReference );
}
@Override
protected void renderDerivedTableReferenceIdentificationVariable(DerivedTableReference tableReference) {
renderTableReferenceIdentificationVariable( tableReference );
}
@Override
public void visitOffsetFetchClause(QueryPart queryPart) {
if ( !isRowNumberingCurrentQueryPart() ) {

View File

@ -15,6 +15,8 @@ import java.util.Base64;
import java.util.List;
import org.hibernate.Internal;
import org.hibernate.engine.spi.LazySessionWrapperOptions;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.internal.util.CharSequenceHelper;
import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.metamodel.mapping.JdbcMapping;
@ -24,6 +26,7 @@ import org.hibernate.metamodel.mapping.internal.EmbeddedAttributeMapping;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.java.BasicPluralJavaType;
import org.hibernate.type.descriptor.java.IntegerJavaType;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.java.JdbcDateJavaType;
@ -32,6 +35,8 @@ import org.hibernate.type.descriptor.java.JdbcTimestampJavaType;
import org.hibernate.type.descriptor.java.OffsetDateTimeJavaType;
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
import static java.lang.Character.isLetter;
import static java.lang.Character.isLetterOrDigit;
import static org.hibernate.dialect.StructHelper.getEmbeddedPart;
import static org.hibernate.dialect.StructHelper.instantiate;
@ -486,7 +491,6 @@ public class XmlHelper {
WrapperOptions options,
XMLAppender sb) {
final Object[] array = embeddableMappingType.getValues( value );
final int numberOfAttributes = embeddableMappingType.getNumberOfAttributeMappings();
for ( int i = 0; i < array.length; i++ ) {
if ( array[i] == null ) {
continue;
@ -646,6 +650,28 @@ public class XmlHelper {
return selectableIndex;
}
public static boolean isValidXmlName(String name) {
if ( name.isEmpty()
|| !isValidXmlNameStart( name.charAt( 0 ) )
|| name.regionMatches( true, 0, "xml", 0, 3 ) ) {
return false;
}
for ( int i = 1; i < name.length(); i++ ) {
if ( !isValidXmlNameChar( name.charAt( i ) ) ) {
return false;
}
}
return true;
}
public static boolean isValidXmlNameStart(char c) {
return isLetter( c ) || c == '_' || c == ':';
}
public static boolean isValidXmlNameChar(char c) {
return isLetterOrDigit( c ) || c == '_' || c == ':' || c == '-' || c == '.';
}
private static class XMLAppender extends OutputStream implements SqlAppender {
private final static char[] HEX_ARRAY = "0123456789ABCDEF".toCharArray();
@ -781,4 +807,47 @@ public class XmlHelper {
}
}
public static CollectionTags determineCollectionTags(BasicPluralJavaType<?> pluralJavaType, SessionFactoryImplementor sessionFactory) {
//noinspection unchecked
final JavaType<Object> javaType = (JavaType<Object>) pluralJavaType;
final LazySessionWrapperOptions lazySessionWrapperOptions = new LazySessionWrapperOptions( sessionFactory );
// Produce the XML string for a collection with a null element to find out the root and element tag names
final String nullElementXml;
try {
nullElementXml = sessionFactory.getSessionFactoryOptions().getXmlFormatMapper().toString(
javaType.fromString( "{null}" ),
javaType,
lazySessionWrapperOptions
);
}
finally {
lazySessionWrapperOptions.cleanup();
}
// There must be an end tag for the root, so find that first
final int rootCloseTagPosition = nullElementXml.lastIndexOf( '<' );
assert nullElementXml.charAt( rootCloseTagPosition + 1 ) == '/';
final int rootNameStart = rootCloseTagPosition + 2;
final int rootCloseTagEnd = nullElementXml.indexOf( '>', rootCloseTagPosition );
final String rootTag = nullElementXml.substring( rootNameStart, rootCloseTagEnd );
// Then search for the open tag of the root and determine the start of the first item
final int itemTagStart = nullElementXml.indexOf(
'<',
nullElementXml.indexOf( "<" + rootTag + ">" ) + rootTag.length() + 2
);
final int itemNameStart = itemTagStart + 1;
int itemNameEnd = itemNameStart;
for ( int i = itemNameStart + 1; i < nullElementXml.length(); i++ ) {
if ( !isValidXmlNameChar( nullElementXml.charAt( i ) ) ) {
itemNameEnd = i;
break;
}
}
final String elementNodeName = nullElementXml.substring( itemNameStart, itemNameEnd );
return new CollectionTags( rootTag, elementNodeName );
}
public record CollectionTags(String rootName, String elementName) {}
}

View File

@ -13,6 +13,8 @@ import org.hibernate.dialect.Dialect;
import org.hibernate.mapping.AggregateColumn;
import org.hibernate.mapping.Column;
import org.hibernate.metamodel.mapping.SelectableMapping;
import org.hibernate.metamodel.mapping.SqlTypedMapping;
import org.hibernate.metamodel.mapping.internal.SqlTypedMappingImpl;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.sql.ast.tree.expression.Expression;
@ -39,13 +41,51 @@ public interface AggregateSupport {
* @param aggregateColumn The type information for the aggregate column
* @param column The column within the aggregate type, for which to return the read expression
*/
String aggregateComponentCustomReadExpression(
default String aggregateComponentCustomReadExpression(
String template,
String placeholder,
String aggregateParentReadExpression,
String columnExpression,
AggregateColumn aggregateColumn,
Column column);
Column column) {
return aggregateComponentCustomReadExpression(
template,
placeholder,
aggregateParentReadExpression,
columnExpression,
aggregateColumn.getTypeCode(),
new SqlTypedMappingImpl(
column.getTypeName(),
column.getLength(),
column.getPrecision(),
column.getScale(),
column.getTemporalPrecision(),
column.getType()
)
);
}
/**
* Returns the custom read expression to use for {@code column}.
* Replaces the given {@code placeholder} in the given {@code template}
* by the custom read expression to use for {@code column}.
*
* @param template The custom read expression template of the column
* @param placeholder The placeholder to replace with the actual read expression
* @param aggregateParentReadExpression The expression to the aggregate column, which contains the column
* @param columnExpression The column within the aggregate type, for which to return the read expression
* @param aggregateColumnTypeCode The SQL type code of the aggregate column
* @param column The column within the aggregate type, for which to return the read expression
*
* @since 7.0
*/
String aggregateComponentCustomReadExpression(
String template,
String placeholder,
String aggregateParentReadExpression,
String columnExpression,
int aggregateColumnTypeCode,
SqlTypedMapping column);
/**
* Returns the assignment expression to use for {@code column},

View File

@ -12,6 +12,7 @@ import org.hibernate.boot.model.relational.Namespace;
import org.hibernate.mapping.AggregateColumn;
import org.hibernate.mapping.Column;
import org.hibernate.metamodel.mapping.SelectableMapping;
import org.hibernate.metamodel.mapping.SqlTypedMapping;
import org.hibernate.type.spi.TypeConfiguration;
public class AggregateSupportImpl implements AggregateSupport {
@ -19,13 +20,7 @@ public class AggregateSupportImpl implements AggregateSupport {
public static final AggregateSupport INSTANCE = new AggregateSupportImpl();
@Override
public String aggregateComponentCustomReadExpression(
String template,
String placeholder,
String aggregateParentReadExpression,
String columnExpression,
AggregateColumn aggregateColumn,
Column column) {
public String aggregateComponentCustomReadExpression(String template, String placeholder, String aggregateParentReadExpression, String columnExpression, int aggregateColumnTypeCode, SqlTypedMapping column) {
throw new UnsupportedOperationException( "Dialect does not support aggregateComponentCustomReadExpression: " + getClass().getName() );
}

View File

@ -26,6 +26,7 @@ import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.metamodel.mapping.SelectableMapping;
import org.hibernate.metamodel.mapping.SelectablePath;
import org.hibernate.metamodel.mapping.SqlExpressible;
import org.hibernate.metamodel.mapping.SqlTypedMapping;
import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
@ -47,13 +48,13 @@ public class DB2AggregateSupport extends AggregateSupportImpl {
String placeholder,
String aggregateParentReadExpression,
String columnExpression,
AggregateColumn aggregateColumn,
Column column) {
switch ( aggregateColumn.getTypeCode() ) {
int aggregateColumnTypeCode,
SqlTypedMapping column) {
switch ( aggregateColumnTypeCode ) {
case STRUCT:
return template.replace( placeholder, aggregateParentReadExpression + ".." + columnExpression );
}
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateColumn.getTypeCode() );
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateColumnTypeCode );
}
@Override

View File

@ -24,6 +24,7 @@ import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.metamodel.mapping.SelectableMapping;
import org.hibernate.metamodel.mapping.SelectablePath;
import org.hibernate.metamodel.mapping.SqlTypedMapping;
import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.AbstractSqlAstTranslator;
@ -113,9 +114,9 @@ public class OracleAggregateSupport extends AggregateSupportImpl {
String placeholder,
String aggregateParentReadExpression,
String columnExpression,
AggregateColumn aggregateColumn,
Column column) {
switch ( aggregateColumn.getTypeCode() ) {
int aggregateColumnTypeCode,
SqlTypedMapping column) {
switch ( aggregateColumnTypeCode ) {
case JSON:
String jsonTypeName = "json";
switch ( jsonSupport ) {
@ -132,14 +133,14 @@ public class OracleAggregateSupport extends AggregateSupportImpl {
else {
parentPartExpression = aggregateParentReadExpression + ",'$.";
}
switch ( column.getTypeCode() ) {
switch ( column.getJdbcMapping().getJdbcType().getDefaultSqlTypeCode() ) {
case BIT:
return template.replace(
placeholder,
"decode(json_value(" + parentPartExpression + columnExpression + "'),'true',1,'false',0,null)"
);
case BOOLEAN:
if ( column.getTypeName().toLowerCase( Locale.ROOT ).trim().startsWith( "number" ) ) {
if ( column.getColumnDefinition().toLowerCase( Locale.ROOT ).trim().startsWith( "number" ) ) {
return template.replace(
placeholder,
"decode(json_value(" + parentPartExpression + columnExpression + "'),'true',1,'false',0,null)"
@ -152,7 +153,7 @@ public class OracleAggregateSupport extends AggregateSupportImpl {
case BIGINT:
return template.replace(
placeholder,
"json_value(" + parentPartExpression + columnExpression + "' returning " + column.getTypeName() + ')'
"json_value(" + parentPartExpression + columnExpression + "' returning " + column.getColumnDefinition() + ')'
);
case DATE:
return template.replace(
@ -189,10 +190,10 @@ public class OracleAggregateSupport extends AggregateSupportImpl {
// We encode binary data as hex, so we have to decode here
return template.replace(
placeholder,
"(select * from json_table(" + aggregateParentReadExpression + ",'$' columns (" + columnExpression + " " + column.getTypeName() + " path '$." + columnExpression + "')))"
"(select * from json_table(" + aggregateParentReadExpression + ",'$' columns (" + columnExpression + " " + column.getColumnDefinition() + " path '$." + columnExpression + "')))"
);
case ARRAY:
final BasicPluralType<?, ?> pluralType = (BasicPluralType<?, ?>) column.getValue().getType();
final BasicPluralType<?, ?> pluralType = (BasicPluralType<?, ?>) column.getJdbcMapping();
final OracleArrayJdbcType jdbcType = (OracleArrayJdbcType) pluralType.getJdbcType();
switch ( jdbcType.getElementJdbcType().getDefaultSqlTypeCode() ) {
case BOOLEAN:
@ -211,7 +212,7 @@ public class OracleAggregateSupport extends AggregateSupportImpl {
default:
return template.replace(
placeholder,
"json_value(" + parentPartExpression + columnExpression + "' returning " + column.getTypeName() + ')'
"json_value(" + parentPartExpression + columnExpression + "' returning " + column.getColumnDefinition() + ')'
);
}
case JSON:
@ -222,7 +223,7 @@ public class OracleAggregateSupport extends AggregateSupportImpl {
default:
return template.replace(
placeholder,
"cast(json_value(" + parentPartExpression + columnExpression + "') as " + column.getTypeName() + ')'
"cast(json_value(" + parentPartExpression + columnExpression + "') as " + column.getColumnDefinition() + ')'
);
}
case NONE:
@ -233,7 +234,7 @@ public class OracleAggregateSupport extends AggregateSupportImpl {
case STRUCT_TABLE:
return template.replace( placeholder, aggregateParentReadExpression + "." + columnExpression );
}
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateColumn.getTypeCode() );
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateColumnTypeCode );
}
@Override

View File

@ -14,6 +14,7 @@ import org.hibernate.mapping.Column;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.metamodel.mapping.SelectableMapping;
import org.hibernate.metamodel.mapping.SelectablePath;
import org.hibernate.metamodel.mapping.SqlTypedMapping;
import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
@ -51,12 +52,12 @@ public class PostgreSQLAggregateSupport extends AggregateSupportImpl {
String placeholder,
String aggregateParentReadExpression,
String columnExpression,
AggregateColumn aggregateColumn,
Column column) {
switch ( aggregateColumn.getTypeCode() ) {
int aggregateColumnTypeCode,
SqlTypedMapping column) {
switch ( aggregateColumnTypeCode ) {
case JSON_ARRAY:
case JSON:
switch ( column.getTypeCode() ) {
switch ( column.getJdbcMapping().getJdbcType().getDefaultSqlTypeCode() ) {
case JSON:
return template.replace(
placeholder,
@ -71,7 +72,7 @@ public class PostgreSQLAggregateSupport extends AggregateSupportImpl {
"decode(" + aggregateParentReadExpression + "->>'" + columnExpression + "','hex')"
);
case ARRAY:
final BasicPluralType<?, ?> pluralType = (BasicPluralType<?, ?>) column.getValue().getType();
final BasicPluralType<?, ?> pluralType = (BasicPluralType<?, ?>) column.getJdbcMapping();
switch ( pluralType.getElementType().getJdbcType().getDefaultSqlTypeCode() ) {
case BOOLEAN:
case TINYINT:
@ -85,7 +86,7 @@ public class PostgreSQLAggregateSupport extends AggregateSupportImpl {
// because casting a jsonb[] to text[] will not omit the quotes of the jsonb text values
return template.replace(
placeholder,
"cast(array(select jsonb_array_elements(" + aggregateParentReadExpression + "->'" + columnExpression + "')) as " + column.getTypeName() + ')'
"cast(array(select jsonb_array_elements(" + aggregateParentReadExpression + "->'" + columnExpression + "')) as " + column.getColumnDefinition() + ')'
);
case BINARY:
case VARBINARY:
@ -98,13 +99,13 @@ public class PostgreSQLAggregateSupport extends AggregateSupportImpl {
default:
return template.replace(
placeholder,
"cast(array(select jsonb_array_elements_text(" + aggregateParentReadExpression + "->'" + columnExpression + "')) as " + column.getTypeName() + ')'
"cast(array(select jsonb_array_elements_text(" + aggregateParentReadExpression + "->'" + columnExpression + "')) as " + column.getColumnDefinition() + ')'
);
}
default:
return template.replace(
placeholder,
"cast(" + aggregateParentReadExpression + "->>'" + columnExpression + "' as " + column.getTypeName() + ')'
"cast(" + aggregateParentReadExpression + "->>'" + columnExpression + "' as " + column.getColumnDefinition() + ')'
);
}
case STRUCT:
@ -112,7 +113,7 @@ public class PostgreSQLAggregateSupport extends AggregateSupportImpl {
case STRUCT_TABLE:
return template.replace( placeholder, '(' + aggregateParentReadExpression + ")." + columnExpression );
}
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateColumn.getTypeCode() );
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateColumnTypeCode );
}
private static String jsonCustomWriteExpression(String customWriteExpression, JdbcMapping jdbcMapping) {

View File

@ -7,75 +7,11 @@ package org.hibernate.dialect.function;
import java.util.Date;
import java.util.Arrays;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.hibernate.boot.model.FunctionContributions;
import org.hibernate.dialect.Dialect;
import org.hibernate.dialect.function.array.ArrayAggFunction;
import org.hibernate.dialect.function.array.ArrayAndElementArgumentTypeResolver;
import org.hibernate.dialect.function.array.ArrayAndElementArgumentValidator;
import org.hibernate.dialect.function.array.ArrayArgumentValidator;
import org.hibernate.dialect.function.array.ArrayConcatElementFunction;
import org.hibernate.dialect.function.array.ArrayConcatFunction;
import org.hibernate.dialect.function.array.ArrayConstructorFunction;
import org.hibernate.dialect.function.array.ArrayContainsOperatorFunction;
import org.hibernate.dialect.function.array.ArrayContainsUnnestFunction;
import org.hibernate.dialect.function.array.ArrayIncludesOperatorFunction;
import org.hibernate.dialect.function.array.ArrayIncludesUnnestFunction;
import org.hibernate.dialect.function.array.ArrayIntersectsOperatorFunction;
import org.hibernate.dialect.function.array.ArrayIntersectsUnnestFunction;
import org.hibernate.dialect.function.array.ArrayGetUnnestFunction;
import org.hibernate.dialect.function.array.ArrayRemoveIndexUnnestFunction;
import org.hibernate.dialect.function.array.ArrayReplaceUnnestFunction;
import org.hibernate.dialect.function.array.ArraySetUnnestFunction;
import org.hibernate.dialect.function.array.ArraySliceUnnestFunction;
import org.hibernate.dialect.function.array.ArrayToStringFunction;
import org.hibernate.dialect.function.array.ArrayViaArgumentReturnTypeResolver;
import org.hibernate.dialect.function.array.CockroachArrayFillFunction;
import org.hibernate.dialect.function.array.ElementViaArrayArgumentReturnTypeResolver;
import org.hibernate.dialect.function.array.H2ArrayContainsFunction;
import org.hibernate.dialect.function.array.H2ArrayFillFunction;
import org.hibernate.dialect.function.array.H2ArrayIncludesFunction;
import org.hibernate.dialect.function.array.H2ArrayIntersectsFunction;
import org.hibernate.dialect.function.array.H2ArrayPositionFunction;
import org.hibernate.dialect.function.array.H2ArrayPositionsFunction;
import org.hibernate.dialect.function.array.H2ArrayRemoveFunction;
import org.hibernate.dialect.function.array.H2ArrayRemoveIndexFunction;
import org.hibernate.dialect.function.array.H2ArrayReplaceFunction;
import org.hibernate.dialect.function.array.H2ArraySetFunction;
import org.hibernate.dialect.function.array.H2ArrayToStringFunction;
import org.hibernate.dialect.function.array.HSQLArrayConstructorFunction;
import org.hibernate.dialect.function.array.HSQLArrayFillFunction;
import org.hibernate.dialect.function.array.HSQLArrayPositionFunction;
import org.hibernate.dialect.function.array.HSQLArrayPositionsFunction;
import org.hibernate.dialect.function.array.HSQLArrayRemoveFunction;
import org.hibernate.dialect.function.array.HSQLArraySetFunction;
import org.hibernate.dialect.function.array.HSQLArrayToStringFunction;
import org.hibernate.dialect.function.array.OracleArrayConcatElementFunction;
import org.hibernate.dialect.function.array.OracleArrayConcatFunction;
import org.hibernate.dialect.function.array.OracleArrayFillFunction;
import org.hibernate.dialect.function.array.OracleArrayIncludesFunction;
import org.hibernate.dialect.function.array.OracleArrayIntersectsFunction;
import org.hibernate.dialect.function.array.OracleArrayGetFunction;
import org.hibernate.dialect.function.array.OracleArrayLengthFunction;
import org.hibernate.dialect.function.array.OracleArrayPositionFunction;
import org.hibernate.dialect.function.array.OracleArrayPositionsFunction;
import org.hibernate.dialect.function.array.OracleArrayRemoveFunction;
import org.hibernate.dialect.function.array.OracleArrayRemoveIndexFunction;
import org.hibernate.dialect.function.array.OracleArrayReplaceFunction;
import org.hibernate.dialect.function.array.OracleArraySetFunction;
import org.hibernate.dialect.function.array.OracleArraySliceFunction;
import org.hibernate.dialect.function.array.OracleArrayToStringFunction;
import org.hibernate.dialect.function.array.OracleArrayTrimFunction;
import org.hibernate.dialect.function.array.PostgreSQLArrayConcatElementFunction;
import org.hibernate.dialect.function.array.PostgreSQLArrayConcatFunction;
import org.hibernate.dialect.function.array.PostgreSQLArrayFillFunction;
import org.hibernate.dialect.function.array.PostgreSQLArrayPositionFunction;
import org.hibernate.dialect.function.array.PostgreSQLArrayConstructorFunction;
import org.hibernate.dialect.function.array.OracleArrayAggEmulation;
import org.hibernate.dialect.function.array.OracleArrayConstructorFunction;
import org.hibernate.dialect.function.array.OracleArrayContainsFunction;
import org.hibernate.dialect.function.array.PostgreSQLArrayPositionsFunction;
import org.hibernate.dialect.function.array.PostgreSQLArrayTrimEmulation;
import org.hibernate.dialect.function.array.*;
import org.hibernate.dialect.function.json.CockroachDBJsonExistsFunction;
import org.hibernate.dialect.function.json.CockroachDBJsonQueryFunction;
import org.hibernate.dialect.function.json.CockroachDBJsonRemoveFunction;
@ -4294,4 +4230,61 @@ public class CommonFunctionFactory {
public void xmlagg_sqlserver() {
functionRegistry.register( "xmlagg", new SQLServerXmlAggFunction( typeConfiguration ) );
}
/**
* Standard unnest() function
*/
public void unnest(@Nullable String defaultBasicArrayElementColumnName, String defaultIndexSelectionExpression) {
functionRegistry.register( "unnest", new UnnestFunction( defaultBasicArrayElementColumnName, defaultIndexSelectionExpression ) );
}
/**
* Standard unnest() function for databases that don't support arrays natively
*/
public void unnest_emulated() {
// Pass an arbitrary value
unnest( "v", "i" );
}
/**
* H2 unnest() function
*/
public void unnest_h2(int maxArraySize) {
functionRegistry.register( "unnest", new H2UnnestFunction( maxArraySize ) );
}
/**
* Oracle unnest() function
*/
public void unnest_oracle() {
functionRegistry.register( "unnest", new OracleUnnestFunction() );
}
/**
* PostgreSQL unnest() function
*/
public void unnest_postgresql() {
functionRegistry.register( "unnest", new PostgreSQLUnnestFunction() );
}
/**
* SQL Server unnest() function
*/
public void unnest_sqlserver() {
functionRegistry.register( "unnest", new SQLServerUnnestFunction() );
}
/**
* Sybase ASE unnest() function
*/
public void unnest_sybasease() {
functionRegistry.register( "unnest", new SybaseASEUnnestFunction() );
}
/**
* HANA unnest() function
*/
public void unnest_hana() {
functionRegistry.register( "unnest", new HANAUnnestFunction() );
}
}

View File

@ -0,0 +1,222 @@
/*
* SPDX-License-Identifier: LGPL-2.1-or-later
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.dialect.function;
import java.util.ArrayList;
import java.util.List;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.hibernate.metamodel.mapping.AttributeMapping;
import org.hibernate.metamodel.mapping.CollectionPart;
import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
import org.hibernate.metamodel.mapping.MappingType;
import org.hibernate.metamodel.mapping.SelectableMapping;
import org.hibernate.metamodel.mapping.SelectablePath;
import org.hibernate.metamodel.mapping.SqlTypedMapping;
import org.hibernate.metamodel.mapping.internal.SelectableMappingImpl;
import org.hibernate.query.derived.AnonymousTupleType;
import org.hibernate.query.sqm.SqmExpressible;
import org.hibernate.query.sqm.produce.function.SetReturningFunctionTypeResolver;
import org.hibernate.query.sqm.tree.SqmTypedNode;
import org.hibernate.sql.ast.tree.SqlAstNode;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.type.BasicPluralType;
import org.hibernate.type.BasicType;
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
import org.hibernate.type.spi.TypeConfiguration;
/**
*
* @since 7.0
*/
public class UnnestSetReturningFunctionTypeResolver implements SetReturningFunctionTypeResolver {
protected final @Nullable String defaultBasicArrayColumnName;
protected final String defaultIndexSelectionExpression;
public UnnestSetReturningFunctionTypeResolver(@Nullable String defaultBasicArrayColumnName, String defaultIndexSelectionExpression) {
this.defaultBasicArrayColumnName = defaultBasicArrayColumnName;
this.defaultIndexSelectionExpression = defaultIndexSelectionExpression;
}
@Override
public AnonymousTupleType<?> resolveTupleType(List<? extends SqmTypedNode<?>> arguments, TypeConfiguration typeConfiguration) {
final SqmTypedNode<?> arrayArgument = arguments.get( 0 );
final SqmExpressible<?> expressible = arrayArgument.getExpressible();
if ( expressible == null ) {
throw new IllegalArgumentException( "Couldn't determine array type of argument to function 'unnest'" );
}
if ( !( expressible.getSqmType() instanceof BasicPluralType<?,?> pluralType ) ) {
throw new IllegalArgumentException( "Argument passed to function 'unnest' is not a BasicPluralType. Found: " + expressible );
}
final BasicType<?> elementType = pluralType.getElementType();
final SqmExpressible<?>[] componentTypes;
final String[] componentNames;
if ( elementType.getJdbcType() instanceof AggregateJdbcType aggregateJdbcType
&& aggregateJdbcType.getEmbeddableMappingType() != null ) {
final EmbeddableMappingType embeddableMappingType = aggregateJdbcType.getEmbeddableMappingType();
componentTypes = determineComponentTypes( embeddableMappingType );
componentNames = new String[componentTypes.length];
final int numberOfAttributeMappings = embeddableMappingType.getNumberOfAttributeMappings();
int index = 0;
for ( int i = 0; i < numberOfAttributeMappings; i++ ) {
final AttributeMapping attributeMapping = embeddableMappingType.getAttributeMapping( i );
if ( attributeMapping.getMappedType() instanceof SqmExpressible<?> ) {
componentNames[index++] = attributeMapping.getAttributeName();
}
}
assert index == componentNames.length - 1;
componentTypes[index] = typeConfiguration.getBasicTypeForJavaType( Long.class );
componentNames[index] = CollectionPart.Nature.INDEX.getName();
}
else {
componentTypes = new SqmExpressible<?>[]{ elementType, typeConfiguration.getBasicTypeForJavaType( Long.class ) };
componentNames = new String[]{ CollectionPart.Nature.ELEMENT.getName(), CollectionPart.Nature.INDEX.getName() };
}
return new AnonymousTupleType<>( componentTypes, componentNames );
}
@Override
public SelectableMapping[] resolveFunctionReturnType(
List<? extends SqlAstNode> arguments,
String tableIdentifierVariable,
boolean withOrdinality,
TypeConfiguration typeConfiguration) {
final Expression expression = (Expression) arguments.get( 0 );
final JdbcMappingContainer expressionType = expression.getExpressionType();
if ( expressionType == null ) {
throw new IllegalArgumentException( "Couldn't determine array type of argument to function 'unnest'" );
}
if ( !( expressionType.getSingleJdbcMapping() instanceof BasicPluralType<?,?> pluralType ) ) {
throw new IllegalArgumentException( "Argument passed to function 'unnest' is not a BasicPluralType. Found: " + expressionType );
}
final SelectableMapping indexMapping = withOrdinality ? new SelectableMappingImpl(
"",
defaultIndexSelectionExpression,
new SelectablePath( CollectionPart.Nature.INDEX.getName() ),
null,
null,
null,
null,
null,
null,
null,
false,
false,
false,
false,
false,
false,
typeConfiguration.getBasicTypeForJavaType( Long.class )
) : null;
final BasicType<?> elementType = pluralType.getElementType();
final SelectableMapping[] returnType;
if ( elementType.getJdbcType() instanceof AggregateJdbcType aggregateJdbcType
&& aggregateJdbcType.getEmbeddableMappingType() != null ) {
final EmbeddableMappingType embeddableMappingType = aggregateJdbcType.getEmbeddableMappingType();
final int jdbcValueCount = embeddableMappingType.getJdbcValueCount();
returnType = new SelectableMapping[jdbcValueCount + (indexMapping == null ? 0 : 1)];
for ( int i = 0; i < jdbcValueCount; i++ ) {
final SelectableMapping selectableMapping = embeddableMappingType.getJdbcValueSelectable( i );
final String selectableName = selectableMapping.getSelectableName();
returnType[i] = new SelectableMappingImpl(
selectableMapping.getContainingTableExpression(),
selectableName,
new SelectablePath( selectableName ),
null,
null,
selectableMapping.getColumnDefinition(),
selectableMapping.getLength(),
selectableMapping.getPrecision(),
selectableMapping.getScale(),
selectableMapping.getTemporalPrecision(),
selectableMapping.isLob(),
true,
false,
false,
false,
selectableMapping.isFormula(),
selectableMapping.getJdbcMapping()
);
if ( indexMapping != null ) {
returnType[jdbcValueCount] = indexMapping;
}
}
}
else {
final String elementSelectionExpression = defaultBasicArrayColumnName == null
? tableIdentifierVariable
: defaultBasicArrayColumnName;
final SelectableMapping elementMapping;
if ( expressionType instanceof SqlTypedMapping typedMapping ) {
elementMapping = new SelectableMappingImpl(
"",
elementSelectionExpression,
new SelectablePath( CollectionPart.Nature.ELEMENT.getName() ),
null,
null,
typedMapping.getColumnDefinition(),
typedMapping.getLength(),
typedMapping.getPrecision(),
typedMapping.getScale(),
typedMapping.getTemporalPrecision(),
typedMapping.isLob(),
true,
false,
false,
false,
false,
elementType
);
}
else {
elementMapping = new SelectableMappingImpl(
"",
elementSelectionExpression,
new SelectablePath( CollectionPart.Nature.ELEMENT.getName() ),
null,
null,
null,
null,
null,
null,
null,
false,
true,
false,
false,
false,
false,
elementType
);
}
if ( indexMapping == null ) {
returnType = new SelectableMapping[]{ elementMapping };
}
else {
returnType = new SelectableMapping[] {elementMapping, indexMapping};
}
}
return returnType;
}
private static SqmExpressible<?>[] determineComponentTypes(EmbeddableMappingType embeddableMappingType) {
final int numberOfAttributeMappings = embeddableMappingType.getNumberOfAttributeMappings();
final ArrayList<SqmExpressible<?>> expressibles = new ArrayList<>( numberOfAttributeMappings + 1 );
for ( int i = 0; i < numberOfAttributeMappings; i++ ) {
final AttributeMapping attributeMapping = embeddableMappingType.getAttributeMapping( i );
final MappingType mappedType = attributeMapping.getMappedType();
if ( mappedType instanceof SqmExpressible<?> ) {
expressibles.add( (SqmExpressible<?>) mappedType );
}
}
return expressibles.toArray( new SqmExpressible<?>[expressibles.size() + 1] );
}
}

View File

@ -0,0 +1,338 @@
/*
* SPDX-License-Identifier: LGPL-2.1-or-later
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.dialect.function.array;
import java.util.List;
import org.hibernate.dialect.function.UnnestSetReturningFunctionTypeResolver;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.internal.util.NullnessUtil;
import org.hibernate.metamodel.mapping.CollectionPart;
import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
import org.hibernate.metamodel.mapping.SelectableMapping;
import org.hibernate.metamodel.mapping.SelectablePath;
import org.hibernate.metamodel.mapping.SqlTypedMapping;
import org.hibernate.metamodel.mapping.internal.SelectableMappingImpl;
import org.hibernate.query.derived.AnonymousTupleTableGroupProducer;
import org.hibernate.query.derived.AnonymousTupleType;
import org.hibernate.query.spi.QueryEngine;
import org.hibernate.query.sqm.ComparisonOperator;
import org.hibernate.query.sqm.function.SelfRenderingSqmSetReturningFunction;
import org.hibernate.query.sqm.sql.SqmToSqlAstConverter;
import org.hibernate.query.sqm.tree.SqmTypedNode;
import org.hibernate.spi.NavigablePath;
import org.hibernate.sql.Template;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.FromClauseAccess;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.sql.ast.tree.SqlAstNode;
import org.hibernate.sql.ast.tree.expression.ColumnReference;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.expression.SelfRenderingExpression;
import org.hibernate.sql.ast.tree.from.FunctionTableGroup;
import org.hibernate.sql.ast.tree.from.TableGroup;
import org.hibernate.sql.ast.tree.from.TableGroupJoin;
import org.hibernate.sql.ast.tree.predicate.ComparisonPredicate;
import org.hibernate.type.BasicPluralType;
import org.hibernate.type.BasicType;
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.hibernate.type.spi.TypeConfiguration;
/**
* H2 unnest function.
* <p>
* H2 does not support "lateral" i.e. the use of a from node within another,
* but we can apply the same trick that we already applied everywhere else for H2,
* which is to join a sequence table to emulate array element rows
* and eliminate non-existing array elements by checking the index against array length.
* Finally, we rewrite the selection expressions to access the array by joined sequence index.
*/
public class H2UnnestFunction extends UnnestFunction {
private final int maximumArraySize;
public H2UnnestFunction(int maximumArraySize) {
super( new H2UnnestSetReturningFunctionTypeResolver() );
this.maximumArraySize = maximumArraySize;
}
@Override
protected <T> SelfRenderingSqmSetReturningFunction<T> generateSqmSetReturningFunctionExpression(
List<? extends SqmTypedNode<?>> arguments,
QueryEngine queryEngine) {
//noinspection unchecked
return new SelfRenderingSqmSetReturningFunction<>(
this,
this,
arguments,
getArgumentsValidator(),
getSetReturningTypeResolver(),
(AnonymousTupleType<T>) getSetReturningTypeResolver().resolveTupleType( arguments, queryEngine.getTypeConfiguration() ),
queryEngine.getCriteriaBuilder(),
getName()
) {
@Override
public TableGroup convertToSqlAst(
NavigablePath navigablePath,
String identifierVariable,
boolean lateral,
boolean canUseInnerJoins,
boolean withOrdinality,
SqmToSqlAstConverter walker) {
// Register a transformer that adds a join predicate "array_length(array) <= index"
final FunctionTableGroup functionTableGroup = (FunctionTableGroup) super.convertToSqlAst(
navigablePath,
identifierVariable,
lateral,
canUseInnerJoins,
withOrdinality,
walker
);
//noinspection unchecked
final List<SqlAstNode> sqlArguments = (List<SqlAstNode>) functionTableGroup.getPrimaryTableReference()
.getFunctionExpression()
.getArguments();
// Can only do this transformation if the argument is a column reference
final ColumnReference columnReference = ( (Expression) sqlArguments.get( 0 ) ).getColumnReference();
if ( columnReference != null ) {
final String tableQualifier = columnReference.getQualifier();
// Find the table group which the unnest argument refers to
final FromClauseAccess fromClauseAccess = walker.getFromClauseAccess();
final TableGroup sourceTableGroup =
fromClauseAccess.findTableGroupByIdentificationVariable( tableQualifier );
if ( sourceTableGroup != null ) {
// Register a query transformer to register a join predicate
walker.registerQueryTransformer( (cteContainer, querySpec, converter) -> {
final TableGroup parentTableGroup = querySpec.getFromClause().queryTableGroups(
tg -> tg.findTableGroupJoin( functionTableGroup ) == null ? null : tg
);
final TableGroupJoin join = parentTableGroup.findTableGroupJoin( functionTableGroup );
final BasicType<Integer> integerType = walker.getCreationContext()
.getSessionFactory()
.getNodeBuilder()
.getIntegerType();
final Expression lhs = new SelfRenderingExpression() {
@Override
public void renderToSql(
SqlAppender sqlAppender,
SqlAstTranslator<?> walker,
SessionFactoryImplementor sessionFactory) {
sqlAppender.append( "coalesce(array_length(" );
columnReference.accept( walker );
sqlAppender.append( "),0)" );
}
@Override
public JdbcMappingContainer getExpressionType() {
return integerType;
}
};
final Expression rhs = new ColumnReference(
functionTableGroup.getPrimaryTableReference().getIdentificationVariable(),
// The default column name for the system_range function
"x",
false,
null,
integerType
);
join.applyPredicate( new ComparisonPredicate( lhs, ComparisonOperator.GREATER_THAN_OR_EQUAL, rhs ) );
return querySpec;
} );
}
}
return functionTableGroup;
}
};
}
@Override
protected void renderJsonTable(
SqlAppender sqlAppender,
Expression array,
BasicPluralType<?, ?> pluralType,
@Nullable SqlTypedMapping sqlTypedMapping,
AnonymousTupleTableGroupProducer tupleType,
String tableIdentifierVariable,
SqlAstTranslator<?> walker) {
renderUnnest( sqlAppender, array, pluralType, sqlTypedMapping, tupleType, tableIdentifierVariable, walker );
}
@Override
protected void renderUnnest(
SqlAppender sqlAppender,
Expression array,
BasicPluralType<?, ?> pluralType,
@Nullable SqlTypedMapping sqlTypedMapping,
AnonymousTupleTableGroupProducer tupleType,
String tableIdentifierVariable,
SqlAstTranslator<?> walker) {
final ColumnReference columnReference = array.getColumnReference();
if ( columnReference != null ) {
sqlAppender.append( "system_range(1," );
sqlAppender.append( Integer.toString( maximumArraySize ) );
sqlAppender.append( ")" );
}
else {
super.renderUnnest( sqlAppender, array, pluralType, sqlTypedMapping, tupleType, tableIdentifierVariable, walker );
}
}
private static class H2UnnestSetReturningFunctionTypeResolver extends UnnestSetReturningFunctionTypeResolver {
public H2UnnestSetReturningFunctionTypeResolver() {
// c1 is the default column name for the "unnest()" function
super( "c1", "nord" );
}
@Override
public SelectableMapping[] resolveFunctionReturnType(
List<? extends SqlAstNode> arguments,
String tableIdentifierVariable,
boolean withOrdinality,
TypeConfiguration typeConfiguration) {
final Expression expression = (Expression) arguments.get( 0 );
final JdbcMappingContainer expressionType = expression.getExpressionType();
if ( expressionType == null ) {
throw new IllegalArgumentException( "Couldn't determine array type of argument to function 'unnest'" );
}
if ( !( expressionType.getSingleJdbcMapping() instanceof BasicPluralType<?,?> pluralType ) ) {
throw new IllegalArgumentException( "Argument passed to function 'unnest' is not a BasicPluralType. Found: " + expressionType );
}
final SelectableMapping indexMapping = withOrdinality ? new SelectableMappingImpl(
"",
expression.getColumnReference() != null ? "x" : defaultIndexSelectionExpression,
new SelectablePath( CollectionPart.Nature.INDEX.getName() ),
null,
null,
null,
null,
null,
null,
null,
false,
false,
false,
false,
false,
false,
typeConfiguration.getBasicTypeForJavaType( Long.class )
) : null;
final BasicType<?> elementType = pluralType.getElementType();
final SelectableMapping[] returnType;
if ( elementType.getJdbcType() instanceof AggregateJdbcType aggregateJdbcType
&& aggregateJdbcType.getEmbeddableMappingType() != null ) {
final ColumnReference arrayColumnReference = expression.getColumnReference();
if ( arrayColumnReference == null ) {
throw new IllegalArgumentException( "Argument passed to function 'unnest' is not a column reference, but an aggregate type, which is not yet supported." );
}
// For column references we render an emulation through system_range(),
// so we need to render an array access to get to the element
final String elementReadExpression = "array_get(" + arrayColumnReference.getExpressionText() + "," + Template.TEMPLATE + ".x)";
final String arrayReadExpression = NullnessUtil.castNonNull( arrayColumnReference.getReadExpression() );
final EmbeddableMappingType embeddableMappingType = aggregateJdbcType.getEmbeddableMappingType();
final int jdbcValueCount = embeddableMappingType.getJdbcValueCount();
returnType = new SelectableMapping[jdbcValueCount + (indexMapping == null ? 0 : 1)];
for ( int i = 0; i < jdbcValueCount; i++ ) {
final SelectableMapping selectableMapping = embeddableMappingType.getJdbcValueSelectable( i );
// The array expression has to be replaced with the actual array_get read expression in this emulation
final String customReadExpression = selectableMapping.getCustomReadExpression()
.replace( arrayReadExpression, elementReadExpression );
returnType[i] = new SelectableMappingImpl(
selectableMapping.getContainingTableExpression(),
selectableMapping.getSelectablePath().getSelectableName(),
new SelectablePath( selectableMapping.getSelectablePath().getSelectableName() ),
customReadExpression,
selectableMapping.getCustomWriteExpression(),
selectableMapping.getColumnDefinition(),
selectableMapping.getLength(),
selectableMapping.getPrecision(),
selectableMapping.getScale(),
selectableMapping.getTemporalPrecision(),
selectableMapping.isLob(),
true,
false,
false,
false,
selectableMapping.isFormula(),
selectableMapping.getJdbcMapping()
);
}
if ( indexMapping != null ) {
returnType[jdbcValueCount] = indexMapping;
}
}
else {
final String elementSelectionExpression;
final String elementReadExpression;
final ColumnReference columnReference = expression.getColumnReference();
if ( columnReference != null ) {
// For column references we render an emulation through system_range(),
// so we need to render an array access to get to the element
elementSelectionExpression = columnReference.getColumnExpression();
elementReadExpression = "array_get(" + columnReference.getExpressionText() + "," + Template.TEMPLATE + ".x)";
}
else {
elementSelectionExpression = defaultBasicArrayColumnName;
elementReadExpression = null;
}
final SelectableMapping elementMapping;
if ( expressionType instanceof SqlTypedMapping typedMapping ) {
elementMapping = new SelectableMappingImpl(
"",
elementSelectionExpression,
new SelectablePath( CollectionPart.Nature.ELEMENT.getName() ),
elementReadExpression,
null,
typedMapping.getColumnDefinition(),
typedMapping.getLength(),
typedMapping.getPrecision(),
typedMapping.getScale(),
typedMapping.getTemporalPrecision(),
typedMapping.isLob(),
true,
false,
false,
false,
false,
elementType
);
}
else {
elementMapping = new SelectableMappingImpl(
"",
elementSelectionExpression,
new SelectablePath( CollectionPart.Nature.ELEMENT.getName() ),
elementReadExpression,
null,
null,
null,
null,
null,
null,
false,
true,
false,
false,
false,
false,
elementType
);
}
if ( indexMapping == null ) {
returnType = new SelectableMapping[]{ elementMapping };
}
else {
returnType = new SelectableMapping[] {elementMapping, indexMapping};
}
}
return returnType;
}
}
}

View File

@ -0,0 +1,533 @@
/*
* SPDX-License-Identifier: LGPL-2.1-or-later
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.dialect.function.array;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import org.hibernate.QueryException;
import org.hibernate.dialect.XmlHelper;
import org.hibernate.dialect.function.json.ExpressionTypeHelper;
import org.hibernate.engine.jdbc.Size;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.metamodel.mapping.CollectionPart;
import org.hibernate.metamodel.mapping.EmbeddableValuedModelPart;
import org.hibernate.metamodel.mapping.EntityMappingType;
import org.hibernate.metamodel.mapping.EntityValuedModelPart;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
import org.hibernate.metamodel.mapping.ModelPartContainer;
import org.hibernate.metamodel.mapping.PluralAttributeMapping;
import org.hibernate.metamodel.mapping.SqlTypedMapping;
import org.hibernate.metamodel.mapping.ValuedModelPart;
import org.hibernate.metamodel.mapping.internal.EmbeddedCollectionPart;
import org.hibernate.query.derived.AnonymousTupleTableGroupProducer;
import org.hibernate.query.derived.AnonymousTupleType;
import org.hibernate.query.spi.QueryEngine;
import org.hibernate.query.sqm.ComparisonOperator;
import org.hibernate.query.sqm.function.SelfRenderingSqmSetReturningFunction;
import org.hibernate.query.sqm.sql.SqmToSqlAstConverter;
import org.hibernate.query.sqm.tree.SqmTypedNode;
import org.hibernate.spi.NavigablePath;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.internal.ColumnQualifierCollectorSqlAstWalker;
import org.hibernate.sql.ast.spi.FromClauseAccess;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.sql.ast.tree.SqlAstNode;
import org.hibernate.sql.ast.tree.cte.CteColumn;
import org.hibernate.sql.ast.tree.cte.CteStatement;
import org.hibernate.sql.ast.tree.cte.CteTable;
import org.hibernate.sql.ast.tree.expression.ColumnReference;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.expression.SelfRenderingExpression;
import org.hibernate.sql.ast.tree.expression.SqlTuple;
import org.hibernate.sql.ast.tree.from.FunctionTableGroup;
import org.hibernate.sql.ast.tree.from.StandardTableGroup;
import org.hibernate.sql.ast.tree.from.TableGroup;
import org.hibernate.sql.ast.tree.from.TableGroupJoin;
import org.hibernate.sql.ast.tree.from.TableGroupProducer;
import org.hibernate.sql.ast.tree.predicate.ComparisonPredicate;
import org.hibernate.sql.ast.tree.predicate.NullnessPredicate;
import org.hibernate.sql.ast.tree.select.QuerySpec;
import org.hibernate.sql.ast.tree.select.SelectStatement;
import org.hibernate.sql.results.internal.SqlSelectionImpl;
import org.hibernate.type.BasicPluralType;
import org.hibernate.type.BasicType;
import org.hibernate.type.Type;
import org.hibernate.type.descriptor.java.BasicPluralJavaType;
import org.hibernate.type.descriptor.sql.spi.DdlTypeRegistry;
import org.checkerframework.checker.nullness.qual.Nullable;
/**
* HANA unnest function.
*/
public class HANAUnnestFunction extends UnnestFunction {
public HANAUnnestFunction() {
super( "v", "i" );
}
@Override
protected <T> SelfRenderingSqmSetReturningFunction<T> generateSqmSetReturningFunctionExpression(
List<? extends SqmTypedNode<?>> arguments,
QueryEngine queryEngine) {
//noinspection unchecked
return new SelfRenderingSqmSetReturningFunction<>(
this,
this,
arguments,
getArgumentsValidator(),
getSetReturningTypeResolver(),
(AnonymousTupleType<T>) getSetReturningTypeResolver().resolveTupleType( arguments, queryEngine.getTypeConfiguration() ),
queryEngine.getCriteriaBuilder(),
getName()
) {
@Override
public TableGroup convertToSqlAst(
NavigablePath navigablePath,
String identifierVariable,
boolean lateral,
boolean canUseInnerJoins,
boolean withOrdinality,
SqmToSqlAstConverter walker) {
// SAP HANA only supports table column references i.e. `TABLE_NAME.COLUMN_NAME`
// or constants as arguments to xmltable/json_table, so it's impossible to do lateral joins.
// There is a nice trick we can apply to make this work though, which is to figure out
// the table group an expression belongs to and render a special CTE returning xml/json that can be joined.
// The xml/json of that CTE needs to be extended by table group primary key data,
// so we can join it later.
final FunctionTableGroup functionTableGroup = (FunctionTableGroup) super.convertToSqlAst(
navigablePath,
identifierVariable,
lateral,
canUseInnerJoins,
withOrdinality,
walker
);
//noinspection unchecked
final List<SqlAstNode> sqlArguments = (List<SqlAstNode>) functionTableGroup.getPrimaryTableReference()
.getFunctionExpression()
.getArguments();
final Expression argument = (Expression) sqlArguments.get( 0 );
final Set<String> qualifiers = ColumnQualifierCollectorSqlAstWalker.determineColumnQualifiers( argument );
// Can only do this transformation if the argument contains a single column reference qualifier
if ( qualifiers.size() == 1 ) {
final String tableQualifier = qualifiers.iterator().next();
// Find the table group which the unnest argument refers to
final FromClauseAccess fromClauseAccess = walker.getFromClauseAccess();
final TableGroup sourceTableGroup =
fromClauseAccess.findTableGroupByIdentificationVariable( tableQualifier );
if ( sourceTableGroup != null ) {
final List<ColumnInfo> idColumns = new ArrayList<>();
addIdColumns( sourceTableGroup.getModelPart(), idColumns );
// Register a query transformer to register the CTE and rewrite the array argument
walker.registerQueryTransformer( (cteContainer, querySpec, converter) -> {
// Determine a CTE name that is available
final String baseName = "_data";
String cteName;
int index = 0;
do {
cteName = baseName + ( index++ );
} while ( cteContainer.getCteStatement( cteName ) != null );
final TableGroup parentTableGroup = querySpec.getFromClause().queryTableGroups(
tg -> tg.findTableGroupJoin( functionTableGroup ) == null ? null : tg
);
final TableGroupJoin join = parentTableGroup.findTableGroupJoin( functionTableGroup );
final Expression lhs = createExpression( tableQualifier, idColumns );
final Expression rhs = createExpression(
functionTableGroup.getPrimaryTableReference().getIdentificationVariable(),
idColumns
);
join.applyPredicate( new ComparisonPredicate( lhs, ComparisonOperator.EQUAL, rhs ) );
final String tableName = cteName;
final List<CteColumn> cteColumns = List.of(
new CteColumn( "v", argument.getExpressionType().getSingleJdbcMapping() )
);
final QuerySpec cteQuery = new QuerySpec( false );
cteQuery.getFromClause().addRoot(
new StandardTableGroup(
true,
sourceTableGroup.getNavigablePath(),
(TableGroupProducer) sourceTableGroup.getModelPart(),
false,
null,
sourceTableGroup.findTableReference( tableQualifier ),
false,
null,
joinTableName -> false,
(joinTableName, tg) -> null,
null
)
);
final Expression wrapperExpression;
if ( ExpressionTypeHelper.isXml( argument ) ) {
wrapperExpression = new XmlWrapperExpression( idColumns, tableQualifier, argument );
// xmltable is allergic to null values and produces no result if one occurs,
// so we must filter them out
cteQuery.applyPredicate( new NullnessPredicate( argument, true ) );
}
else {
wrapperExpression = new JsonWrapperExpression( idColumns, tableQualifier, argument );
}
cteQuery.getSelectClause().addSqlSelection( new SqlSelectionImpl( wrapperExpression ) );
cteContainer.addCteStatement( new CteStatement(
new CteTable( tableName, cteColumns ),
new SelectStatement( cteQuery )
) );
sqlArguments.set( 0, new TableColumnReferenceExpression( argument, tableName, idColumns ) );
return querySpec;
} );
}
}
return functionTableGroup;
}
private Expression createExpression(String qualifier, List<ColumnInfo> idColumns) {
if ( idColumns.size() == 1 ) {
final ColumnInfo columnInfo = idColumns.get( 0 );
return new ColumnReference( qualifier, columnInfo.name(), false, null, columnInfo.jdbcMapping() );
}
else {
final ArrayList<Expression> expressions = new ArrayList<>( idColumns.size() );
for ( ColumnInfo columnInfo : idColumns ) {
expressions.add(
new ColumnReference(
qualifier,
columnInfo.name(),
false,
null,
columnInfo.jdbcMapping()
)
);
}
return new SqlTuple( expressions, null );
}
}
private void addIdColumns(ModelPartContainer modelPartContainer, List<ColumnInfo> idColumns) {
if ( modelPartContainer instanceof EntityValuedModelPart entityValuedModelPart ) {
addIdColumns( entityValuedModelPart.getEntityMappingType(), idColumns );
}
else if ( modelPartContainer instanceof PluralAttributeMapping pluralAttributeMapping ) {
addIdColumns( pluralAttributeMapping, idColumns );
}
else if ( modelPartContainer instanceof EmbeddableValuedModelPart embeddableModelPart ) {
addIdColumns( embeddableModelPart, idColumns );
}
else {
throw new QueryException( "Unsupported model part container: " + modelPartContainer );
}
}
private void addIdColumns(EmbeddableValuedModelPart embeddableModelPart, List<ColumnInfo> idColumns) {
if ( embeddableModelPart instanceof EmbeddedCollectionPart collectionPart ) {
addIdColumns( collectionPart.getCollectionAttribute(), idColumns );
}
else {
addIdColumns( embeddableModelPart.asAttributeMapping().getDeclaringType(), idColumns );
}
}
private void addIdColumns(PluralAttributeMapping pluralAttributeMapping, List<ColumnInfo> idColumns) {
final DdlTypeRegistry ddlTypeRegistry = pluralAttributeMapping.getCollectionDescriptor()
.getFactory()
.getTypeConfiguration()
.getDdlTypeRegistry();
addIdColumns( pluralAttributeMapping.getKeyDescriptor().getKeyPart(), ddlTypeRegistry, idColumns );
}
private void addIdColumns(EntityMappingType entityMappingType, List<ColumnInfo> idColumns) {
final DdlTypeRegistry ddlTypeRegistry = entityMappingType.getEntityPersister()
.getFactory()
.getTypeConfiguration()
.getDdlTypeRegistry();
addIdColumns( entityMappingType.getIdentifierMapping(), ddlTypeRegistry, idColumns );
}
private void addIdColumns(
ValuedModelPart modelPart,
DdlTypeRegistry ddlTypeRegistry,
List<ColumnInfo> idColumns) {
modelPart.forEachSelectable( (selectionIndex, selectableMapping) -> {
final JdbcMapping jdbcMapping = selectableMapping.getJdbcMapping().getSingleJdbcMapping();
idColumns.add( new ColumnInfo(
selectableMapping.getSelectionExpression(),
jdbcMapping,
ddlTypeRegistry.getTypeName(
jdbcMapping.getJdbcType().getDefaultSqlTypeCode(),
selectableMapping.toSize(),
(Type) jdbcMapping
)
) );
} );
}
};
}
record ColumnInfo(String name, JdbcMapping jdbcMapping, String ddlType) {}
static class TableColumnReferenceExpression implements SelfRenderingExpression {
private final Expression argument;
private final String tableName;
private final List<ColumnInfo> idColumns;
public TableColumnReferenceExpression(Expression argument, String tableName, List<ColumnInfo> idColumns) {
this.argument = argument;
this.tableName = tableName;
this.idColumns = idColumns;
}
@Override
public void renderToSql(
SqlAppender sqlAppender,
SqlAstTranslator<?> walker,
SessionFactoryImplementor sessionFactory) {
sqlAppender.appendSql( tableName );
sqlAppender.appendSql( ".v" );
}
@Override
public JdbcMappingContainer getExpressionType() {
return argument.getExpressionType();
}
public List<ColumnInfo> getIdColumns() {
return idColumns;
}
}
@Override
protected void renderXmlTable(
SqlAppender sqlAppender,
Expression array,
BasicPluralType<?, ?> pluralType,
@Nullable SqlTypedMapping sqlTypedMapping,
AnonymousTupleTableGroupProducer tupleType,
String tableIdentifierVariable,
SqlAstTranslator<?> walker) {
final XmlHelper.CollectionTags collectionTags = XmlHelper.determineCollectionTags(
(BasicPluralJavaType<?>) pluralType.getJavaTypeDescriptor(), walker.getSessionFactory()
);
sqlAppender.appendSql( "xmltable('/" );
sqlAppender.appendSql( collectionTags.rootName() );
sqlAppender.appendSql( '/' );
sqlAppender.appendSql( collectionTags.elementName() );
sqlAppender.appendSql( "' passing " );
array.accept( walker );
sqlAppender.appendSql( " columns" );
char separator = ' ';
final int offset;
if ( array instanceof TableColumnReferenceExpression expression ) {
offset = expression.getIdColumns().size();
for ( ColumnInfo columnInfo : expression.getIdColumns() ) {
sqlAppender.appendSql( separator );
sqlAppender.appendSql( columnInfo.name() );
sqlAppender.appendSql( ' ' );
sqlAppender.appendSql( columnInfo.ddlType() );
sqlAppender.appendSql( " path 'ancestor::" );
sqlAppender.appendSql( collectionTags.rootName() );
sqlAppender.appendSql( "/@" );
sqlAppender.appendSql( columnInfo.name() );
sqlAppender.appendSql( '\'' );
separator = ',';
}
}
else {
offset = 0;
}
if ( tupleType.findSubPart( CollectionPart.Nature.ELEMENT.getName(), null ) == null ) {
tupleType.forEachSelectable( offset, (selectionIndex, selectableMapping) -> {
if ( selectionIndex == 0 ) {
sqlAppender.append( ' ' );
}
else {
sqlAppender.append( ',' );
}
sqlAppender.append( selectableMapping.getSelectionExpression() );
if ( CollectionPart.Nature.INDEX.getName().equals( selectableMapping.getSelectableName() ) ) {
sqlAppender.append( " for ordinality" );
}
else {
sqlAppender.append( ' ' );
sqlAppender.append( getDdlType( selectableMapping, walker ) );
sqlAppender.appendSql( " path '" );
sqlAppender.appendSql( selectableMapping.getSelectableName() );
sqlAppender.appendSql( "'" );
}
} );
}
else {
tupleType.forEachSelectable( offset, (selectionIndex, selectableMapping) -> {
if ( selectionIndex == 0 ) {
sqlAppender.append( ' ' );
}
else {
sqlAppender.append( ',' );
}
sqlAppender.append( selectableMapping.getSelectionExpression() );
if ( CollectionPart.Nature.INDEX.getName().equals( selectableMapping.getSelectableName() ) ) {
sqlAppender.append( " for ordinality" );
}
else {
sqlAppender.append( ' ' );
sqlAppender.append( getDdlType( selectableMapping, walker ) );
sqlAppender.appendSql( " path '" );
sqlAppender.appendSql( "." );
sqlAppender.appendSql( "'" );
}
} );
}
sqlAppender.appendSql( ')' );
}
static class XmlWrapperExpression implements SelfRenderingExpression {
private final List<ColumnInfo> idColumns;
private final String tableQualifier;
private final Expression argument;
public XmlWrapperExpression(List<ColumnInfo> idColumns, String tableQualifier, Expression argument) {
this.idColumns = idColumns;
this.tableQualifier = tableQualifier;
this.argument = argument;
}
@Override
public void renderToSql(
SqlAppender sqlAppender,
SqlAstTranslator<?> walker,
SessionFactoryImplementor sessionFactory) {
final BasicPluralType<?, ?> pluralType = (BasicPluralType<?, ?>) argument.getExpressionType().getSingleJdbcMapping();
final XmlHelper.CollectionTags collectionTags = XmlHelper.determineCollectionTags(
(BasicPluralJavaType<?>) pluralType.getJavaTypeDescriptor(),
sessionFactory
);
// Produce a XML string e.g. <root id="1">...</root>
// which will contain the original XML as well as id column information for correlation
sqlAppender.appendSql( "trim('/>' from (select" );
char separator = ' ';
for ( ColumnInfo columnInfo : idColumns ) {
sqlAppender.appendSql( separator );
sqlAppender.appendSql( tableQualifier );
sqlAppender.appendSql( '.' );
sqlAppender.appendSql( columnInfo.name() );
sqlAppender.appendSql( ' ' );
sqlAppender.appendDoubleQuoteEscapedString( columnInfo.name() );
separator = ',';
}
sqlAppender.appendSql( " from sys.dummy for xml('root'='no','columnstyle'='attribute','rowname'='Strings','format'='no')))||" );
sqlAppender.appendSql( "substring(" );
argument.accept( walker );
sqlAppender.appendSql( ",locate('<" );
sqlAppender.appendSql( collectionTags.rootName() );
sqlAppender.appendSql( ">'," );
argument.accept( walker );
sqlAppender.appendSql( ")+" );
sqlAppender.appendSql( collectionTags.rootName().length() + 2 );
sqlAppender.appendSql( ",length(" );
argument.accept( walker );
sqlAppender.appendSql( "))" );
}
@Override
public JdbcMappingContainer getExpressionType() {
return argument.getExpressionType();
}
}
@Override
protected void renderJsonTable(
SqlAppender sqlAppender,
Expression array,
BasicPluralType<?, ?> pluralType,
@Nullable SqlTypedMapping sqlTypedMapping,
AnonymousTupleTableGroupProducer tupleType,
String tableIdentifierVariable,
SqlAstTranslator<?> walker) {
final BasicType<?> elementType = pluralType.getElementType();
final String columnType = walker.getSessionFactory().getTypeConfiguration().getDdlTypeRegistry().getTypeName(
elementType.getJdbcType().getDdlTypeCode(),
sqlTypedMapping == null ? Size.nil() : sqlTypedMapping.toSize(),
elementType
);
sqlAppender.appendSql( "json_table(" );
array.accept( walker );
if ( array instanceof TableColumnReferenceExpression expression ) {
sqlAppender.appendSql( ",'$' columns(" );
for ( ColumnInfo columnInfo : expression.getIdColumns() ) {
sqlAppender.appendSql( columnInfo.name() );
sqlAppender.appendSql( ' ' );
sqlAppender.appendSql( columnInfo.ddlType() );
sqlAppender.appendSql( " path '$." );
sqlAppender.appendSql( columnInfo.name() );
sqlAppender.appendSql( "'," );
}
sqlAppender.appendSql( "nested path '$.v' columns (" );
sqlAppender.append( tupleType.getColumnNames().get( 0 ) );
sqlAppender.appendSql( ' ' );
sqlAppender.append( columnType );
sqlAppender.appendSql( " path '$')))" );
}
else {
sqlAppender.appendSql( ",'$[*]' columns(" );
sqlAppender.append( tupleType.getColumnNames().get( 0 ) );
sqlAppender.appendSql( ' ' );
sqlAppender.append( columnType );
sqlAppender.appendSql( " path '$'))" );
}
}
static class JsonWrapperExpression implements SelfRenderingExpression {
private final List<ColumnInfo> idColumns;
private final String tableQualifier;
private final Expression argument;
public JsonWrapperExpression(List<ColumnInfo> idColumns, String tableQualifier, Expression argument) {
this.idColumns = idColumns;
this.tableQualifier = tableQualifier;
this.argument = argument;
}
@Override
public void renderToSql(
SqlAppender sqlAppender,
SqlAstTranslator<?> walker,
SessionFactoryImplementor sessionFactory) {
// Produce a JSON string e.g. {"id":1,"v":[...]}
// which will contain the original JSON as well as id column information for correlation
sqlAppender.appendSql( "'{'||trim('{}' from (select" );
char separator = ' ';
for ( ColumnInfo columnInfo : idColumns ) {
sqlAppender.appendSql( separator );
sqlAppender.appendSql( tableQualifier );
sqlAppender.appendSql( '.' );
sqlAppender.appendSql( columnInfo.name() );
sqlAppender.appendSql( ' ' );
sqlAppender.appendDoubleQuoteEscapedString( columnInfo.name() );
separator = ',';
}
sqlAppender.appendSql( " from sys.dummy for json('arraywrap'='no')))||" );
sqlAppender.appendSql( "'\"v\":'||" );
argument.accept( walker );
sqlAppender.appendSql( "||'}'" );
}
@Override
public JdbcMappingContainer getExpressionType() {
return argument.getExpressionType();
}
}
}

View File

@ -0,0 +1,50 @@
/*
* SPDX-License-Identifier: LGPL-2.1-or-later
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.dialect.function.array;
import org.hibernate.metamodel.mapping.CollectionPart;
import org.hibernate.metamodel.mapping.ModelPart;
import org.hibernate.metamodel.mapping.SqlTypedMapping;
import org.hibernate.query.derived.AnonymousTupleTableGroupProducer;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.type.BasicPluralType;
import org.checkerframework.checker.nullness.qual.Nullable;
/**
* Oracle unnest function.
*/
public class OracleUnnestFunction extends UnnestFunction {
public OracleUnnestFunction() {
super( "column_value", "i" );
}
@Override
protected void renderUnnest(
SqlAppender sqlAppender,
Expression array,
BasicPluralType<?, ?> pluralType,
@Nullable SqlTypedMapping sqlTypedMapping,
AnonymousTupleTableGroupProducer tupleType,
String tableIdentifierVariable,
SqlAstTranslator<?> walker) {
final ModelPart ordinalitySubPart = tupleType.findSubPart( CollectionPart.Nature.INDEX.getName(), null );
final boolean withOrdinality = ordinalitySubPart != null;
if ( withOrdinality ) {
sqlAppender.appendSql( "lateral (select t.*, rownum " );
sqlAppender.appendSql( ordinalitySubPart.asBasicValuedModelPart().getSelectionExpression() );
sqlAppender.appendSql( " from " );
}
sqlAppender.appendSql( "table(" );
array.accept( walker );
sqlAppender.appendSql( ")" );
if ( withOrdinality ) {
sqlAppender.appendSql( " t)" );
}
}
}

View File

@ -0,0 +1,71 @@
/*
* SPDX-License-Identifier: LGPL-2.1-or-later
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.dialect.function.array;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.hibernate.dialect.aggregate.AggregateSupport;
import org.hibernate.metamodel.mapping.CollectionPart;
import org.hibernate.metamodel.mapping.SqlTypedMapping;
import org.hibernate.query.derived.AnonymousTupleTableGroupProducer;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.type.BasicPluralType;
import org.hibernate.type.SqlTypes;
/**
* PostgreSQL unnest function.
*/
public class PostgreSQLUnnestFunction extends UnnestFunction {
public PostgreSQLUnnestFunction() {
super( null, "ordinality" );
}
@Override
protected void renderJsonTable(
SqlAppender sqlAppender,
Expression array,
BasicPluralType<?, ?> pluralType,
@Nullable SqlTypedMapping sqlTypedMapping,
AnonymousTupleTableGroupProducer tupleType,
String tableIdentifierVariable,
SqlAstTranslator<?> walker) {
final AggregateSupport aggregateSupport = walker.getSessionFactory().getJdbcServices().getDialect()
.getAggregateSupport();
sqlAppender.appendSql( "(select" );
tupleType.forEachSelectable( 0, (selectionIndex, selectableMapping) -> {
if ( selectionIndex == 0 ) {
sqlAppender.append( ' ' );
}
else {
sqlAppender.append( ',' );
}
if ( CollectionPart.Nature.INDEX.getName().equals( selectableMapping.getSelectableName() ) ) {
sqlAppender.appendSql( "t.ordinality" );
}
else {
sqlAppender.append( aggregateSupport.aggregateComponentCustomReadExpression(
"",
"",
"t.value",
selectableMapping.getSelectableName(),
SqlTypes.JSON,
selectableMapping
) );
}
sqlAppender.append( ' ' );
sqlAppender.append( selectableMapping.getSelectionExpression() );
} );
sqlAppender.appendSql( " from jsonb_array_elements(" );
array.accept( walker );
sqlAppender.appendSql( ')' );
if ( tupleType.findSubPart( CollectionPart.Nature.INDEX.getName(), null ) != null ) {
sqlAppender.appendSql( " with ordinality" );
}
sqlAppender.appendSql( " t)" );
}
}

View File

@ -0,0 +1,162 @@
/*
* SPDX-License-Identifier: LGPL-2.1-or-later
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.dialect.function.array;
import org.hibernate.dialect.XmlHelper;
import org.hibernate.metamodel.mapping.CollectionPart;
import org.hibernate.metamodel.mapping.SqlTypedMapping;
import org.hibernate.query.derived.AnonymousTupleTableGroupProducer;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.type.BasicPluralType;
import org.hibernate.type.descriptor.java.BasicPluralJavaType;
import org.checkerframework.checker.nullness.qual.Nullable;
/**
* SQL Server unnest function.
*/
public class SQLServerUnnestFunction extends UnnestFunction {
public SQLServerUnnestFunction() {
super( "v", "i" );
}
@Override
protected void renderJsonTable(
SqlAppender sqlAppender,
Expression array,
BasicPluralType<?, ?> pluralType,
@Nullable SqlTypedMapping sqlTypedMapping,
AnonymousTupleTableGroupProducer tupleType,
String tableIdentifierVariable,
SqlAstTranslator<?> walker) {
sqlAppender.appendSql( "openjson(" );
array.accept( walker );
sqlAppender.appendSql( ",'$[*]') with (" );
if ( tupleType.findSubPart( CollectionPart.Nature.ELEMENT.getName(), null ) == null ) {
tupleType.forEachSelectable( 0, (selectionIndex, selectableMapping) -> {
if ( selectionIndex == 0 ) {
sqlAppender.append( ' ' );
}
else {
sqlAppender.append( ',' );
}
if ( CollectionPart.Nature.INDEX.getName().equals( selectableMapping.getSelectableName() ) ) {
sqlAppender.append( selectableMapping.getSelectionExpression() );
sqlAppender.append( " for ordinality" );
}
else {
sqlAppender.append( selectableMapping.getSelectionExpression() );
sqlAppender.append( ' ' );
sqlAppender.append( getDdlType( selectableMapping, walker ) );
sqlAppender.appendSql( " path '$." );
sqlAppender.append( selectableMapping.getSelectableName() );
sqlAppender.appendSql( '\'' );
}
} );
}
else {
tupleType.forEachSelectable( 0, (selectionIndex, selectableMapping) -> {
if ( selectionIndex == 0 ) {
sqlAppender.append( ' ' );
}
else {
sqlAppender.append( ',' );
}
if ( CollectionPart.Nature.INDEX.getName().equals( selectableMapping.getSelectableName() ) ) {
sqlAppender.append( selectableMapping.getSelectionExpression() );
sqlAppender.append( " for ordinality" );
}
else {
sqlAppender.append( selectableMapping.getSelectionExpression() );
sqlAppender.append( ' ' );
sqlAppender.append( getDdlType( selectableMapping, walker ) );
sqlAppender.appendSql( " path '$'" );
}
} );
}
sqlAppender.appendSql( ')' );
}
@Override
protected void renderXmlTable(
SqlAppender sqlAppender,
Expression array,
BasicPluralType<?, ?> pluralType,
@Nullable SqlTypedMapping sqlTypedMapping,
AnonymousTupleTableGroupProducer tupleType,
String tableIdentifierVariable,
SqlAstTranslator<?> walker) {
final XmlHelper.CollectionTags collectionTags = XmlHelper.determineCollectionTags(
(BasicPluralJavaType<?>) pluralType.getJavaTypeDescriptor(), walker.getSessionFactory()
);
sqlAppender.appendSql( "(select" );
if ( tupleType.findSubPart( CollectionPart.Nature.ELEMENT.getName(), null ) == null ) {
tupleType.forEachSelectable( 0, (selectionIndex, selectableMapping) -> {
if ( selectionIndex == 0 ) {
sqlAppender.append( ' ' );
}
else {
sqlAppender.append( ',' );
}
if ( CollectionPart.Nature.INDEX.getName().equals( selectableMapping.getSelectableName() ) ) {
sqlAppender.appendSql( "t.v.value('count(for $a in . return $a/../" );
sqlAppender.appendSql( collectionTags.elementName() );
sqlAppender.appendSql( "[.<<$a])+1','" );
sqlAppender.append( getDdlType( selectableMapping, walker ) );
sqlAppender.appendSql( "') " );
sqlAppender.appendSql( selectableMapping.getSelectionExpression() );
}
else {
sqlAppender.appendSql( "t.v.value('");
sqlAppender.appendSql( selectableMapping.getSelectableName() );
sqlAppender.appendSql( "/text()[1]','" );
sqlAppender.append( getDdlType( selectableMapping, walker ) );
sqlAppender.appendSql( "') " );
sqlAppender.appendSql( selectableMapping.getSelectionExpression() );
}
} );
}
else {
tupleType.forEachSelectable( 0, (selectionIndex, selectableMapping) -> {
if ( selectionIndex == 0 ) {
sqlAppender.append( ' ' );
}
else {
sqlAppender.append( ',' );
}
if ( CollectionPart.Nature.INDEX.getName().equals( selectableMapping.getSelectableName() ) ) {
sqlAppender.appendSql( "t.v.value('count(for $a in . return $a/../" );
sqlAppender.appendSql( collectionTags.elementName() );
sqlAppender.appendSql( "[.<<$a])+1','" );
sqlAppender.append( getDdlType( selectableMapping, walker ) );
sqlAppender.appendSql( "') " );
sqlAppender.appendSql( selectableMapping.getSelectionExpression() );
}
else {
sqlAppender.appendSql( "t.v.value('text()[1]','" );
sqlAppender.append( getDdlType( selectableMapping, walker ) );
sqlAppender.appendSql( "') " );
sqlAppender.appendSql( selectableMapping.getSelectionExpression() );
}
} );
}
sqlAppender.appendSql( " from " );
array.accept( walker );
sqlAppender.appendSql( ".nodes('/" );
sqlAppender.appendSql( collectionTags.rootName() );
sqlAppender.appendSql( '/' );
sqlAppender.appendSql( collectionTags.elementName() );
sqlAppender.appendSql( "') t(v))" );
}
}

View File

@ -0,0 +1,93 @@
/*
* SPDX-License-Identifier: LGPL-2.1-or-later
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.dialect.function.array;
import org.hibernate.dialect.XmlHelper;
import org.hibernate.metamodel.mapping.CollectionPart;
import org.hibernate.metamodel.mapping.SqlTypedMapping;
import org.hibernate.query.derived.AnonymousTupleTableGroupProducer;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.type.BasicPluralType;
import org.hibernate.type.descriptor.java.BasicPluralJavaType;
import org.checkerframework.checker.nullness.qual.Nullable;
/**
* Sybase ASE unnest function.
*/
public class SybaseASEUnnestFunction extends UnnestFunction {
public SybaseASEUnnestFunction() {
super( "v", "i" );
}
@Override
protected void renderXmlTable(
SqlAppender sqlAppender,
Expression array,
BasicPluralType<?, ?> pluralType,
@Nullable SqlTypedMapping sqlTypedMapping,
AnonymousTupleTableGroupProducer tupleType,
String tableIdentifierVariable,
SqlAstTranslator<?> walker) {
final XmlHelper.CollectionTags collectionTags = XmlHelper.determineCollectionTags(
(BasicPluralJavaType<?>) pluralType.getJavaTypeDescriptor(), walker.getSessionFactory()
);
sqlAppender.appendSql( "xmltable('/" );
sqlAppender.appendSql( collectionTags.rootName() );
sqlAppender.appendSql( '/' );
sqlAppender.appendSql( collectionTags.elementName() );
sqlAppender.appendSql( "' passing " );
array.accept( walker );
sqlAppender.appendSql( " columns" );
if ( tupleType.findSubPart( CollectionPart.Nature.ELEMENT.getName(), null ) == null ) {
tupleType.forEachSelectable( 0, (selectionIndex, selectableMapping) -> {
if ( selectionIndex == 0 ) {
sqlAppender.append( ' ' );
}
else {
sqlAppender.append( ',' );
}
sqlAppender.append( selectableMapping.getSelectionExpression() );
if ( CollectionPart.Nature.INDEX.getName().equals( selectableMapping.getSelectableName() ) ) {
sqlAppender.append( " bigint for ordinality" );
}
else {
sqlAppender.append( ' ' );
sqlAppender.append( getDdlType( selectableMapping, walker ) );
sqlAppender.appendSql( " path '" );
sqlAppender.appendSql( selectableMapping.getSelectableName() );
sqlAppender.appendSql( "'" );
}
} );
}
else {
tupleType.forEachSelectable( 0, (selectionIndex, selectableMapping) -> {
if ( selectionIndex == 0 ) {
sqlAppender.append( ' ' );
}
else {
sqlAppender.append( ',' );
}
sqlAppender.append( selectableMapping.getSelectionExpression() );
if ( CollectionPart.Nature.INDEX.getName().equals( selectableMapping.getSelectableName() ) ) {
sqlAppender.append( " bigint for ordinality" );
}
else {
sqlAppender.append( ' ' );
sqlAppender.append( getDdlType( selectableMapping, walker ) );
sqlAppender.appendSql( " path '" );
sqlAppender.appendSql( "." );
sqlAppender.appendSql( "'" );
}
} );
}
sqlAppender.appendSql( ')' );
}
}

View File

@ -0,0 +1,216 @@
/*
* SPDX-License-Identifier: LGPL-2.1-or-later
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.dialect.function.array;
import java.util.List;
import org.hibernate.dialect.XmlHelper;
import org.hibernate.dialect.function.UnnestSetReturningFunctionTypeResolver;
import org.hibernate.metamodel.mapping.CollectionPart;
import org.hibernate.metamodel.mapping.SqlTypedMapping;
import org.hibernate.query.derived.AnonymousTupleTableGroupProducer;
import org.hibernate.query.sqm.function.AbstractSqmSelfRenderingSetReturningFunctionDescriptor;
import org.hibernate.query.sqm.produce.function.SetReturningFunctionTypeResolver;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.sql.ast.tree.SqlAstNode;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.type.BasicPluralType;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.Type;
import org.hibernate.type.descriptor.java.BasicPluralJavaType;
import org.checkerframework.checker.nullness.qual.Nullable;
/**
* Standard unnest function.
*/
public class UnnestFunction extends AbstractSqmSelfRenderingSetReturningFunctionDescriptor {
public UnnestFunction(@Nullable String defaultBasicArrayColumnName, String defaultIndexSelectionExpression) {
this( new UnnestSetReturningFunctionTypeResolver( defaultBasicArrayColumnName, defaultIndexSelectionExpression ) );
}
protected UnnestFunction(SetReturningFunctionTypeResolver setReturningFunctionTypeResolver) {
super(
"unnest",
null,
setReturningFunctionTypeResolver,
null
);
}
@Override
public void render(
SqlAppender sqlAppender,
List<? extends SqlAstNode> sqlAstArguments,
AnonymousTupleTableGroupProducer tupleType,
String tableIdentifierVariable,
SqlAstTranslator<?> walker) {
final Expression array = (Expression) sqlAstArguments.get( 0 );
final @Nullable SqlTypedMapping sqlTypedMapping = array.getExpressionType() instanceof SqlTypedMapping
? (SqlTypedMapping) array.getExpressionType()
: null;
final BasicPluralType<?, ?> pluralType = (BasicPluralType<?, ?>) array.getExpressionType().getSingleJdbcMapping();
final int ddlTypeCode = pluralType.getJdbcType().getDefaultSqlTypeCode();
if ( ddlTypeCode == SqlTypes.JSON_ARRAY ) {
renderJsonTable( sqlAppender, array, pluralType, sqlTypedMapping, tupleType, tableIdentifierVariable, walker );
}
else if ( ddlTypeCode == SqlTypes.XML_ARRAY ) {
renderXmlTable( sqlAppender, array, pluralType, sqlTypedMapping, tupleType, tableIdentifierVariable, walker );
}
else {
renderUnnest( sqlAppender, array, pluralType, sqlTypedMapping, tupleType, tableIdentifierVariable, walker );
}
}
protected String getDdlType(SqlTypedMapping sqlTypedMapping, SqlAstTranslator<?> translator) {
final String columnDefinition = sqlTypedMapping.getColumnDefinition();
if ( columnDefinition != null ) {
return columnDefinition;
}
return translator.getSessionFactory().getTypeConfiguration().getDdlTypeRegistry().getTypeName(
sqlTypedMapping.getJdbcMapping().getJdbcType().getDdlTypeCode(),
sqlTypedMapping.toSize(),
(Type) sqlTypedMapping.getJdbcMapping()
);
}
protected void renderJsonTable(
SqlAppender sqlAppender,
Expression array,
BasicPluralType<?, ?> pluralType,
@Nullable SqlTypedMapping sqlTypedMapping,
AnonymousTupleTableGroupProducer tupleType,
String tableIdentifierVariable,
SqlAstTranslator<?> walker) {
sqlAppender.appendSql( "json_table(" );
array.accept( walker );
sqlAppender.appendSql( ",'$[*]' columns(" );
if ( tupleType.findSubPart( CollectionPart.Nature.ELEMENT.getName(), null ) == null ) {
tupleType.forEachSelectable( 0, (selectionIndex, selectableMapping) -> {
if ( selectionIndex == 0 ) {
sqlAppender.append( ' ' );
}
else {
sqlAppender.append( ',' );
}
sqlAppender.append( selectableMapping.getSelectionExpression() );
sqlAppender.append( ' ' );
if ( CollectionPart.Nature.INDEX.getName().equals( selectableMapping.getSelectableName() ) ) {
sqlAppender.append( " for ordinality" );
}
else {
sqlAppender.append( getDdlType( selectableMapping, walker ) );
sqlAppender.appendSql( " path '$." );
sqlAppender.append( selectableMapping.getSelectableName() );
sqlAppender.appendSql( '\'' );
}
} );
}
else {
tupleType.forEachSelectable( 0, (selectionIndex, selectableMapping) -> {
if ( selectionIndex == 0 ) {
sqlAppender.append( ' ' );
}
else {
sqlAppender.append( ',' );
}
sqlAppender.append( selectableMapping.getSelectionExpression() );
if ( CollectionPart.Nature.INDEX.getName().equals( selectableMapping.getSelectableName() ) ) {
sqlAppender.append( " for ordinality" );
}
else {
sqlAppender.append( ' ' );
sqlAppender.append( getDdlType( selectableMapping, walker ) );
sqlAppender.appendSql( " path '$'" );
}
} );
}
sqlAppender.appendSql( "))" );
}
protected void renderXmlTable(
SqlAppender sqlAppender,
Expression array,
BasicPluralType<?, ?> pluralType,
@Nullable SqlTypedMapping sqlTypedMapping,
AnonymousTupleTableGroupProducer tupleType,
String tableIdentifierVariable,
SqlAstTranslator<?> walker) {
final XmlHelper.CollectionTags collectionTags = XmlHelper.determineCollectionTags(
(BasicPluralJavaType<?>) pluralType.getJavaTypeDescriptor(), walker.getSessionFactory()
);
sqlAppender.appendSql( "xmltable('$d/" );
sqlAppender.appendSql( collectionTags.rootName() );
sqlAppender.appendSql( '/' );
sqlAppender.appendSql( collectionTags.elementName() );
sqlAppender.appendSql( "' passing " );
array.accept( walker );
sqlAppender.appendSql( " as \"d\" columns" );
if ( tupleType.findSubPart( CollectionPart.Nature.ELEMENT.getName(), null ) == null ) {
tupleType.forEachSelectable( 0, (selectionIndex, selectableMapping) -> {
if ( selectionIndex == 0 ) {
sqlAppender.append( ' ' );
}
else {
sqlAppender.append( ',' );
}
sqlAppender.append( selectableMapping.getSelectionExpression() );
if ( CollectionPart.Nature.INDEX.getName().equals( selectableMapping.getSelectableName() ) ) {
sqlAppender.append( " for ordinality" );
}
else {
sqlAppender.append( ' ' );
sqlAppender.append( getDdlType( selectableMapping, walker ) );
sqlAppender.appendSql( " path '" );
sqlAppender.appendSql( selectableMapping.getSelectableName() );
sqlAppender.appendSql( "/text()" );
sqlAppender.appendSql( "'" );
}
} );
}
else {
tupleType.forEachSelectable( 0, (selectionIndex, selectableMapping) -> {
if ( selectionIndex == 0 ) {
sqlAppender.append( ' ' );
}
else {
sqlAppender.append( ',' );
}
sqlAppender.append( selectableMapping.getSelectionExpression() );
if ( CollectionPart.Nature.INDEX.getName().equals( selectableMapping.getSelectableName() ) ) {
sqlAppender.append( " for ordinality" );
}
else {
sqlAppender.append( ' ' );
sqlAppender.append( getDdlType( selectableMapping, walker ) );
sqlAppender.appendSql( " path '" );
sqlAppender.appendSql( "text()" );
sqlAppender.appendSql( "'" );
}
} );
}
sqlAppender.appendSql( ')' );
}
protected void renderUnnest(
SqlAppender sqlAppender,
Expression array,
BasicPluralType<?, ?> pluralType,
@Nullable SqlTypedMapping sqlTypedMapping,
AnonymousTupleTableGroupProducer tupleType,
String tableIdentifierVariable,
SqlAstTranslator<?> walker) {
sqlAppender.appendSql( "unnest(" );
array.accept( walker );
sqlAppender.appendSql( ')' );
if ( tupleType.findSubPart( CollectionPart.Nature.INDEX.getName(), null ) != null ) {
sqlAppender.append( " with ordinality" );
}
}
}

View File

@ -38,7 +38,7 @@ public class JsonArrayAggFunction extends AbstractSqmSelfRenderingFunctionDescri
FunctionKind.ORDERED_SET_AGGREGATE,
StandardArgumentsValidators.between( 1, 2 ),
StandardFunctionReturnTypeResolvers.invariant(
typeConfiguration.getBasicTypeRegistry().resolve( String.class, SqlTypes.JSON_ARRAY )
typeConfiguration.getBasicTypeRegistry().resolve( String.class, SqlTypes.JSON )
),
null
);

View File

@ -28,7 +28,7 @@ public class JsonArrayFunction extends AbstractSqmSelfRenderingFunctionDescripto
FunctionKind.NORMAL,
null,
StandardFunctionReturnTypeResolvers.invariant(
typeConfiguration.getBasicTypeRegistry().resolve( String.class, SqlTypes.JSON_ARRAY )
typeConfiguration.getBasicTypeRegistry().resolve( String.class, SqlTypes.JSON )
),
null
);

View File

@ -119,8 +119,10 @@ public class SQLServerXmlAggFunction extends XmlAggFunction {
),
alias,
List.of("v"),
Set.of(),
true,
true,
false,
null
);
tableGroup.addTableGroupJoin(

View File

@ -7,6 +7,7 @@ package org.hibernate.dialect.function.xml;
import java.util.List;
import java.util.Map;
import org.hibernate.dialect.XmlHelper;
import org.hibernate.query.ReturnableType;
import org.hibernate.query.spi.QueryEngine;
import org.hibernate.query.sqm.function.AbstractSqmSelfRenderingFunctionDescriptor;
@ -33,8 +34,6 @@ import org.hibernate.type.spi.TypeConfiguration;
import org.checkerframework.checker.nullness.qual.Nullable;
import static java.lang.Character.isLetter;
import static java.lang.Character.isLetterOrDigit;
import static org.hibernate.query.sqm.produce.function.FunctionParameterType.STRING;
/**
@ -56,7 +55,7 @@ public class XmlElementFunction extends AbstractSqmSelfRenderingFunctionDescript
TypeConfiguration typeConfiguration) {
//noinspection unchecked
final String elementName = ( (SqmLiteral<String>) arguments.get( 0 ) ).getLiteralValue();
if ( !isValidXmlName( elementName ) ) {
if ( !XmlHelper.isValidXmlName( elementName ) ) {
throw new FunctionArgumentException(
String.format(
"Invalid XML element name passed to 'xmlelement()': %s",
@ -68,7 +67,7 @@ public class XmlElementFunction extends AbstractSqmSelfRenderingFunctionDescript
&& arguments.get( 1 ) instanceof SqmXmlAttributesExpression attributesExpression ) {
final Map<String, SqmExpression<?>> attributes = attributesExpression.getAttributes();
for ( Map.Entry<String, SqmExpression<?>> entry : attributes.entrySet() ) {
if ( !isValidXmlName( entry.getKey() ) ) {
if ( !XmlHelper.isValidXmlName( entry.getKey() ) ) {
throw new FunctionArgumentException(
String.format(
"Invalid XML attribute name passed to 'xmlattributes()': %s",
@ -79,29 +78,6 @@ public class XmlElementFunction extends AbstractSqmSelfRenderingFunctionDescript
}
}
}
private static boolean isValidXmlName(String name) {
if ( name.isEmpty()
|| !isValidXmlNameStart( name.charAt( 0 ) )
|| name.regionMatches( true, 0, "xml", 0, 3 ) ) {
return false;
}
for ( int i = 1; i < name.length(); i++ ) {
if ( !isValidXmlNameChar( name.charAt( i ) ) ) {
return false;
}
}
return true;
}
private static boolean isValidXmlNameStart(char c) {
return isLetter( c ) || c == '_' || c == ':';
}
private static boolean isValidXmlNameChar(char c) {
return isLetterOrDigit( c ) || c == '_' || c == ':' || c == '-' || c == '.';
}
}
),
StandardFunctionReturnTypeResolvers.invariant(

View File

@ -6,6 +6,7 @@ package org.hibernate.dialect.function.xml;
import java.util.List;
import org.hibernate.dialect.XmlHelper;
import org.hibernate.query.ReturnableType;
import org.hibernate.query.sqm.function.AbstractSqmSelfRenderingFunctionDescriptor;
import org.hibernate.query.sqm.function.FunctionKind;
@ -22,9 +23,6 @@ import org.hibernate.sql.ast.tree.expression.AliasedExpression;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.spi.TypeConfiguration;
import static java.lang.Character.isLetter;
import static java.lang.Character.isLetterOrDigit;
/**
* Standard xmlforest function.
*/
@ -52,7 +50,7 @@ public class XmlForestFunction extends AbstractSqmSelfRenderingFunctionDescripto
)
);
}
if ( !isValidXmlName( namedExpression.getName() ) ) {
if ( !XmlHelper.isValidXmlName( namedExpression.getName() ) ) {
throw new FunctionArgumentException(
String.format(
"Invalid XML element name passed to 'xmlforest()': %s",
@ -63,28 +61,6 @@ public class XmlForestFunction extends AbstractSqmSelfRenderingFunctionDescripto
}
}
private static boolean isValidXmlName(String name) {
if ( name.isEmpty()
|| !isValidXmlNameStart( name.charAt( 0 ) )
|| name.regionMatches( true, 0, "xml", 0, 3 ) ) {
return false;
}
for ( int i = 1; i < name.length(); i++ ) {
if ( !isValidXmlNameChar( name.charAt( i ) ) ) {
return false;
}
}
return true;
}
private static boolean isValidXmlNameStart(char c) {
return isLetter( c ) || c == '_' || c == ':';
}
private static boolean isValidXmlNameChar(char c) {
return isLetterOrDigit( c ) || c == '_' || c == ':' || c == '-' || c == '.';
}
}
),
StandardFunctionReturnTypeResolvers.invariant(

View File

@ -0,0 +1,68 @@
/*
* SPDX-License-Identifier: LGPL-2.1-or-later
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.engine.spi;
import java.util.TimeZone;
import org.hibernate.Internal;
import org.hibernate.type.descriptor.WrapperOptions;
import org.checkerframework.checker.nullness.qual.Nullable;
/**
* A lazy session implementation that is needed for rendering literals.
* Usually, only the {@link WrapperOptions} interface is needed,
* but for creating LOBs, it might be to have a full-blown session.
*/
@Internal
public class LazySessionWrapperOptions extends AbstractDelegatingWrapperOptions {
private final SessionFactoryImplementor sessionFactory;
private @Nullable SessionImplementor session;
public LazySessionWrapperOptions(SessionFactoryImplementor sessionFactory) {
this.sessionFactory = sessionFactory;
}
public void cleanup() {
if ( session != null ) {
session.close();
session = null;
}
}
@Override
protected SessionImplementor delegate() {
if ( session == null ) {
session = sessionFactory.openTemporarySession();
}
return session;
}
@Override
public SharedSessionContractImplementor getSession() {
return delegate();
}
@Override
public SessionFactoryImplementor getSessionFactory() {
return sessionFactory;
}
@Override
public boolean useStreamForLobBinding() {
return sessionFactory.getFastSessionServices().useStreamForLobBinding();
}
@Override
public int getPreferredSqlTypeCodeForBoolean() {
return sessionFactory.getFastSessionServices().getPreferredSqlTypeCodeForBoolean();
}
@Override
public TimeZone getJdbcTimeZone() {
return sessionFactory.getSessionFactoryOptions().getJdbcTimeZone();
}
}

View File

@ -1160,7 +1160,6 @@ public class BasicValue extends SimpleValue implements JdbcTypeIndicators, Resol
this.jdbcTypeCode = jdbcTypeCode;
}
@Override
public Integer getExplicitJdbcTypeCode() {
return jdbcTypeCode == null ? getPreferredSqlTypeCodeForArray() : jdbcTypeCode;
}

View File

@ -32,6 +32,8 @@ import org.hibernate.type.EntityType;
import org.hibernate.type.Type;
import org.hibernate.type.descriptor.JdbcTypeNameMapper;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.JdbcTypeConstructor;
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
import org.hibernate.type.descriptor.sql.DdlType;
import org.hibernate.type.descriptor.sql.spi.DdlTypeRegistry;
import org.hibernate.type.MappingContext;
@ -116,6 +118,10 @@ public class Column implements Selectable, Serializable, Cloneable, ColumnTypeIn
this.value = value;
}
public JdbcMapping getType() {
return getValue().getSelectableType( getValue().getBuildingContext().getMetadataCollector(), getTypeIndex() );
}
public String getName() {
return name;
}
@ -316,10 +322,22 @@ public class Column implements Selectable, Serializable, Cloneable, ColumnTypeIn
return sqlTypeCode;
}
private String getSqlTypeName(DdlTypeRegistry ddlTypeRegistry, Dialect dialect, MappingContext mapping) {
private String getSqlTypeName(TypeConfiguration typeConfiguration, Dialect dialect, MappingContext mapping) {
if ( sqlTypeName == null ) {
final int typeCode = getSqlTypeCode( mapping );
final DdlType descriptor = ddlTypeRegistry.getDescriptor( typeCode );
final DdlTypeRegistry ddlTypeRegistry = typeConfiguration.getDdlTypeRegistry();
final JdbcTypeRegistry jdbcTypeRegistry = typeConfiguration.getJdbcTypeRegistry();
final int sqlTypeCode = getSqlTypeCode( mapping );
final JdbcTypeConstructor constructor = jdbcTypeRegistry.getConstructor( sqlTypeCode );
final JdbcType jdbcType;
if ( constructor == null ) {
jdbcType = jdbcTypeRegistry.findDescriptor( sqlTypeCode );
}
else {
jdbcType = ( (BasicType<?>) getUnderlyingType( mapping, getValue().getType(), typeIndex ) ).getJdbcType();
}
final DdlType descriptor = jdbcType == null
? null
: ddlTypeRegistry.getDescriptor( jdbcType.getDdlTypeCode() );
if ( descriptor == null ) {
throw new MappingException(
String.format(
@ -327,8 +345,8 @@ public class Column implements Selectable, Serializable, Cloneable, ColumnTypeIn
"Unable to determine SQL type name for column '%s' of table '%s' because there is no type mapping for org.hibernate.type.SqlTypes code: %s (%s)",
getName(),
getValue().getTable().getName(),
typeCode,
JdbcTypeNameMapper.getTypeName( typeCode )
sqlTypeCode,
JdbcTypeNameMapper.getTypeName( sqlTypeCode )
)
);
}
@ -400,7 +418,7 @@ public class Column implements Selectable, Serializable, Cloneable, ColumnTypeIn
public String getSqlType(Metadata mapping) {
final Database database = mapping.getDatabase();
return getSqlTypeName( database.getTypeConfiguration().getDdlTypeRegistry(), database.getDialect(), mapping );
return getSqlTypeName( database.getTypeConfiguration(), database.getDialect(), mapping );
}
/**
@ -408,7 +426,7 @@ public class Column implements Selectable, Serializable, Cloneable, ColumnTypeIn
*/
@Deprecated(since = "6.2")
public String getSqlType(TypeConfiguration typeConfiguration, Dialect dialect, Mapping mapping) {
return getSqlTypeName( typeConfiguration.getDdlTypeRegistry(), dialect, mapping );
return getSqlTypeName( typeConfiguration, dialect, mapping );
}
@Override

View File

@ -6,17 +6,19 @@ package org.hibernate.metamodel.mapping;
import org.hibernate.engine.jdbc.Size;
import org.checkerframework.checker.nullness.qual.Nullable;
/**
* Models the type of a thing that can be used as an expression in a SQL query
*
* @author Christian Beikov
*/
public interface SqlTypedMapping {
String getColumnDefinition();
Long getLength();
Integer getPrecision();
Integer getScale();
Integer getTemporalPrecision();
@Nullable String getColumnDefinition();
@Nullable Long getLength();
@Nullable Integer getPrecision();
@Nullable Integer getScale();
@Nullable Integer getTemporalPrecision();
default boolean isLob() {
return getJdbcMapping().getJdbcType().isLob();
}

View File

@ -77,6 +77,7 @@ import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
import org.hibernate.type.spi.CompositeTypeImplementor;
import org.hibernate.type.spi.TypeConfiguration;
import static org.hibernate.type.SqlTypes.ARRAY;
import static org.hibernate.type.SqlTypes.JSON;
import static org.hibernate.type.SqlTypes.JSON_ARRAY;
import static org.hibernate.type.SqlTypes.SQLXML;
@ -263,13 +264,18 @@ public class EmbeddableMappingTypeImpl extends AbstractEmbeddableMapping impleme
final TypeConfiguration typeConfiguration = creationContext.getTypeConfiguration();
final BasicTypeRegistry basicTypeRegistry = typeConfiguration.getBasicTypeRegistry();
final Column aggregateColumn = bootDescriptor.getAggregateColumn();
Integer aggregateSqlTypeCode = aggregateColumn.getSqlTypeCode();
final BasicValue basicValue = (BasicValue) aggregateColumn.getValue();
final BasicValue.Resolution<?> resolution = basicValue.getResolution();
final int aggregateColumnSqlTypeCode = resolution.getJdbcType().getDefaultSqlTypeCode();
final int aggregateSqlTypeCode;
boolean isArray = false;
String structTypeName = null;
switch ( aggregateSqlTypeCode ) {
switch ( aggregateColumnSqlTypeCode ) {
case STRUCT:
aggregateSqlTypeCode = STRUCT;
structTypeName = aggregateColumn.getSqlType( creationContext.getMetadata() );
break;
case ARRAY:
case STRUCT_ARRAY:
case STRUCT_TABLE:
isArray = true;
@ -290,6 +296,9 @@ public class EmbeddableMappingTypeImpl extends AbstractEmbeddableMapping impleme
isArray = true;
aggregateSqlTypeCode = SQLXML;
break;
default:
aggregateSqlTypeCode = aggregateColumnSqlTypeCode;
break;
}
final JdbcTypeRegistry jdbcTypeRegistry = typeConfiguration.getJdbcTypeRegistry();
final AggregateJdbcType aggregateJdbcType = jdbcTypeRegistry.resolveAggregateDescriptor(
@ -307,7 +316,6 @@ public class EmbeddableMappingTypeImpl extends AbstractEmbeddableMapping impleme
basicTypeRegistry.register( basicType, bootDescriptor.getStructName().render() );
basicTypeRegistry.register( basicType, getMappedJavaType().getJavaTypeClass().getName() );
}
final BasicValue basicValue = (BasicValue) aggregateColumn.getValue();
final BasicType<?> resolvedJdbcMapping;
if ( isArray ) {
final JdbcTypeConstructor arrayConstructor = jdbcTypeRegistry.getConstructor( SqlTypes.ARRAY );
@ -315,7 +323,7 @@ public class EmbeddableMappingTypeImpl extends AbstractEmbeddableMapping impleme
throw new IllegalArgumentException( "No JdbcTypeConstructor registered for SqlTypes.ARRAY" );
}
//noinspection rawtypes,unchecked
final BasicType<?> arrayType = ( (BasicPluralJavaType) basicValue.getResolution().getDomainJavaType() ).resolveType(
final BasicType<?> arrayType = ( (BasicPluralJavaType) resolution.getDomainJavaType() ).resolveType(
typeConfiguration,
creationContext.getDialect(),
basicType,
@ -328,7 +336,7 @@ public class EmbeddableMappingTypeImpl extends AbstractEmbeddableMapping impleme
else {
resolvedJdbcMapping = basicType;
}
basicValue.getResolution().updateResolution( resolvedJdbcMapping );
resolution.updateResolution( resolvedJdbcMapping );
return resolvedJdbcMapping;
}

View File

@ -7,24 +7,30 @@ package org.hibernate.metamodel.mapping.internal;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.metamodel.mapping.SqlTypedMapping;
import org.checkerframework.checker.nullness.qual.Nullable;
/**
* @author Christian Beikov
*/
public class SqlTypedMappingImpl implements SqlTypedMapping {
private final String columnDefinition;
private final Long length;
private final Integer precision;
private final Integer scale;
private final Integer temporalPrecision;
private final @Nullable String columnDefinition;
private final @Nullable Long length;
private final @Nullable Integer precision;
private final @Nullable Integer scale;
private final @Nullable Integer temporalPrecision;
private final JdbcMapping jdbcMapping;
public SqlTypedMappingImpl(JdbcMapping jdbcMapping) {
this( null, null, null, null, null, jdbcMapping );
}
public SqlTypedMappingImpl(
String columnDefinition,
Long length,
Integer precision,
Integer scale,
Integer temporalPrecision,
@Nullable String columnDefinition,
@Nullable Long length,
@Nullable Integer precision,
@Nullable Integer scale,
@Nullable Integer temporalPrecision,
JdbcMapping jdbcMapping) {
// Save memory by using interned strings. Probability is high that we have multiple duplicate strings
this.columnDefinition = columnDefinition == null ? null : columnDefinition.intern();
@ -36,27 +42,27 @@ public class SqlTypedMappingImpl implements SqlTypedMapping {
}
@Override
public String getColumnDefinition() {
public @Nullable String getColumnDefinition() {
return columnDefinition;
}
@Override
public Long getLength() {
public @Nullable Long getLength() {
return length;
}
@Override
public Integer getPrecision() {
public @Nullable Integer getPrecision() {
return precision;
}
@Override
public Integer getTemporalPrecision() {
public @Nullable Integer getTemporalPrecision() {
return temporalPrecision;
}
@Override
public Integer getScale() {
public @Nullable Integer getScale() {
return scale;
}

View File

@ -60,6 +60,7 @@ import org.hibernate.persister.collection.CollectionPersister;
import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.persister.spi.PersisterFactory;
import org.hibernate.query.BindableType;
import org.hibernate.query.derived.AnonymousTupleSimpleSqmPathSource;
import org.hibernate.query.derived.AnonymousTupleSqmPathSource;
import org.hibernate.query.sqm.SqmExpressible;
import org.hibernate.query.sqm.tree.domain.SqmPath;
@ -742,7 +743,8 @@ public class MappingMetamodelImpl extends QueryParameterBindingTypeResolverImpl
return getTypeConfiguration().getBasicTypeForJavaType( sqmExpressible.getRelationalJavaType().getJavaType() );
}
if ( sqmExpressible instanceof BasicSqmPathSource<?> ) {
if ( sqmExpressible instanceof BasicSqmPathSource<?>
|| sqmExpressible instanceof AnonymousTupleSimpleSqmPathSource<?> ) {
return resolveMappingExpressible( sqmExpressible.getSqmType(), tableGroupLocator );
}

View File

@ -27,10 +27,14 @@ import org.hibernate.query.sqm.spi.SqmCreationHelper;
import org.hibernate.query.sqm.tree.SqmJoinType;
import org.hibernate.query.sqm.tree.domain.SqmPath;
import org.hibernate.query.sqm.tree.domain.SqmSingularJoin;
import org.hibernate.query.sqm.tree.from.SqmAttributeJoin;
import org.hibernate.query.sqm.tree.expression.SqmSetReturningFunction;
import org.hibernate.query.sqm.tree.from.SqmFrom;
import org.hibernate.query.sqm.tree.from.SqmFunctionJoin;
import org.hibernate.query.sqm.tree.from.SqmJoin;
import org.hibernate.query.sqm.tree.from.SqmRoot;
import org.hibernate.spi.EntityIdentifierNavigablePath;
import org.hibernate.spi.NavigablePath;
import org.hibernate.type.BasicPluralType;
import org.hibernate.type.descriptor.java.JavaType;
import static jakarta.persistence.metamodel.Bindable.BindableType.SINGULAR_ATTRIBUTE;
@ -143,7 +147,7 @@ public class SingularAttributeImpl<D,J>
}
@Override
public SqmAttributeJoin<D,J> createSqmJoin(
public SqmJoin<D,J> createSqmJoin(
SqmFrom<?,D> lhs,
SqmJoinType joinType,
String alias,
@ -152,6 +156,21 @@ public class SingularAttributeImpl<D,J>
if ( getType() instanceof AnyMappingDomainType ) {
throw new SemanticException( "An @Any attribute cannot be join fetched" );
}
else if ( sqmPathSource.getSqmPathType() instanceof BasicPluralType<?,?> ) {
final SqmSetReturningFunction<J> setReturningFunction = creationState.getCreationContext()
.getNodeBuilder()
.unnestArray( lhs.get( getName() ) );
//noinspection unchecked
return (SqmJoin<D, J>) new SqmFunctionJoin<>(
createNavigablePath( lhs, alias ),
setReturningFunction,
true,
setReturningFunction.getType(),
alias,
joinType,
(SqmRoot<Object>) lhs
);
}
else {
return new SqmSingularJoin<>(
lhs,

View File

@ -469,6 +469,11 @@ public abstract class CriteriaDefinition<R>
return query.from(cte);
}
@Override
public <X> JpaFunctionRoot<X> from(JpaSetReturningFunction<X> function) {
return query.from( function );
}
@Override
public JpaCriteriaQuery<Long> createCountQuery() {
return query.createCountQuery();

View File

@ -4193,6 +4193,37 @@ public interface HibernateCriteriaBuilder extends CriteriaBuilder {
@Incubating
<T> JpaExpression<T> named(Expression<T> expression, String name);
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Set-Returning functions
/**
* Create a new set-returning function expression.
*
* @since 7.0
* @see JpaSelectCriteria#from(JpaSetReturningFunction)
* @see JpaFrom#join(JpaSetReturningFunction)
*/
@Incubating
<E> JpaSetReturningFunction<E> setReturningFunction(String name, Expression<?>... args);
/**
* Creates an unnest function expression to turn an array into a set of rows.
*
* @since 7.0
* @see JpaFrom#join(JpaSetReturningFunction)
*/
@Incubating
<E> JpaSetReturningFunction<E> unnestArray(Expression<E[]> array);
/**
* Creates an unnest function expression to turn an array into a set of rows.
*
* @since 7.0
* @see JpaFrom#join(JpaSetReturningFunction)
*/
@Incubating
<E> JpaSetReturningFunction<E> unnestCollection(Expression<? extends Collection<E>> collection);
@Override
JpaPredicate and(List<Predicate> restrictions);

View File

@ -4,10 +4,13 @@
*/
package org.hibernate.query.criteria;
import java.util.Collection;
import org.hibernate.Incubating;
import org.hibernate.metamodel.model.domain.EntityDomainType;
import org.hibernate.query.sqm.tree.SqmJoinType;
import jakarta.persistence.criteria.Expression;
import jakarta.persistence.criteria.From;
import jakarta.persistence.criteria.JoinType;
import jakarta.persistence.criteria.Subquery;
@ -62,6 +65,130 @@ public interface JpaFrom<O,T> extends JpaPath<T>, JpaFetchParent<O,T>, From<O,T>
@Incubating
<X> JpaDerivedJoin<X> join(Subquery<X> subquery, SqmJoinType joinType, boolean lateral);
/**
* Like calling the overload {@link #join(JpaSetReturningFunction, SqmJoinType)} with {@link SqmJoinType#INNER}.
*
* @see #join(JpaSetReturningFunction, SqmJoinType)
* @since 7.0
*/
@Incubating
<X> JpaFunctionJoin<X> join(JpaSetReturningFunction<X> function);
/**
* Like calling the overload {@link #join(JpaSetReturningFunction, SqmJoinType, boolean)} passing {@code false}
* for the {@code lateral} parameter.
*
* @see #join(JpaSetReturningFunction, SqmJoinType, boolean)
* @since 7.0
*/
@Incubating
<X> JpaFunctionJoin<X> join(JpaSetReturningFunction<X> function, SqmJoinType joinType);
/**
* Like calling the overload {@link #joinLateral(JpaSetReturningFunction, SqmJoinType)} with {@link SqmJoinType#INNER}.
*
* @see #joinLateral(JpaSetReturningFunction, SqmJoinType)
* @since 7.0
*/
@Incubating
<X> JpaFunctionJoin<X> joinLateral(JpaSetReturningFunction<X> function);
/**
* Like calling the overload {@link #join(JpaSetReturningFunction, SqmJoinType, boolean)} passing {@code true}
* for the {@code lateral} parameter.
*
* @see #join(JpaSetReturningFunction, SqmJoinType, boolean)
* @since 7.0
*/
@Incubating
<X> JpaFunctionJoin<X> joinLateral(JpaSetReturningFunction<X> function, SqmJoinType joinType);
/**
* Creates and returns a join node for the given set returning function.
* If function arguments refer to correlated paths, the {@code lateral} argument must be set to {@code true}.
* Failing to do so when necessary may lead to an error during query compilation or execution.
*
* @since 7.0
*/
@Incubating
<X> JpaFunctionJoin<X> join(JpaSetReturningFunction<X> function, SqmJoinType joinType, boolean lateral);
/**
* Like calling the overload {@link #joinArray(String, SqmJoinType)} with {@link SqmJoinType#INNER}.
*
* @see #joinArray(String, SqmJoinType)
* @since 7.0
*/
@Incubating
<X> JpaFunctionJoin<X> joinArray(String arrayAttributeName);
/**
* Like calling the overload {@link #join(JpaSetReturningFunction, SqmJoinType)} with {@link HibernateCriteriaBuilder#unnestArray(Expression)}
* with the result of {@link #get(String)} passing the given attribute name.
*
* @see #joinLateral(JpaSetReturningFunction, SqmJoinType)
* @since 7.0
*/
@Incubating
<X> JpaFunctionJoin<X> joinArray(String arrayAttributeName, SqmJoinType joinType);
/**
* Like calling the overload {@link #joinArray(SingularAttribute, SqmJoinType)} with {@link SqmJoinType#INNER}.
*
* @see #joinArray(SingularAttribute, SqmJoinType)
* @since 7.0
*/
@Incubating
<X> JpaFunctionJoin<X> joinArray(SingularAttribute<? super T, X[]> arrayAttribute);
/**
* Like calling the overload {@link #join(JpaSetReturningFunction, SqmJoinType)} with {@link HibernateCriteriaBuilder#unnestArray(Expression)}
* with the given attribute.
*
* @see #joinLateral(JpaSetReturningFunction, SqmJoinType)
* @since 7.0
*/
@Incubating
<X> JpaFunctionJoin<X> joinArray(SingularAttribute<? super T, X[]> arrayAttribute, SqmJoinType joinType);
/**
* Like calling the overload {@link #joinArrayCollection(String, SqmJoinType)} with {@link SqmJoinType#INNER}.
*
* @see #joinArrayCollection(String, SqmJoinType)
* @since 7.0
*/
@Incubating
<X> JpaFunctionJoin<X> joinArrayCollection(String collectionAttributeName);
/**
* Like calling the overload {@link #join(JpaSetReturningFunction, SqmJoinType)} with {@link HibernateCriteriaBuilder#unnestCollection(Expression)}
* with the result of {@link #get(String)} passing the given attribute name.
*
* @see #joinLateral(JpaSetReturningFunction, SqmJoinType)
* @since 7.0
*/
@Incubating
<X> JpaFunctionJoin<X> joinArrayCollection(String collectionAttributeName, SqmJoinType joinType);
/**
* Like calling the overload {@link #joinArrayCollection(SingularAttribute, SqmJoinType)} with {@link SqmJoinType#INNER}.
*
* @see #joinArrayCollection(SingularAttribute, SqmJoinType)
* @since 7.0
*/
@Incubating
<X> JpaFunctionJoin<X> joinArrayCollection(SingularAttribute<? super T, ? extends Collection<X>> collectionAttribute);
/**
* Like calling the overload {@link #join(JpaSetReturningFunction, SqmJoinType)} with {@link HibernateCriteriaBuilder#unnestCollection(Expression)}
* with the given attribute.
*
* @see #joinLateral(JpaSetReturningFunction, SqmJoinType)
* @since 7.0
*/
@Incubating
<X> JpaFunctionJoin<X> joinArrayCollection(SingularAttribute<? super T, ? extends Collection<X>> collectionAttribute, SqmJoinType joinType);
@Incubating
<X> JpaJoin<?, X> join(JpaCteCriteria<X> cte);

Some files were not shown because too many files have changed in this diff Show More