HHH-18731 Add generate_series() set-returning function
This commit is contained in:
parent
5bd244dd20
commit
82b20a0e90
|
@ -60,10 +60,11 @@ elif [ "$RDBMS" == "db2_10_5" ]; then
|
|||
goal="-Pdb=db2"
|
||||
elif [ "$RDBMS" == "mssql" ] || [ "$RDBMS" == "mssql_2017" ]; then
|
||||
goal="-Pdb=mssql_ci"
|
||||
# Exclude some Sybase tests on CI because they use `xmltable` function which has a memory leak on the DB version in CI
|
||||
elif [ "$RDBMS" == "sybase" ]; then
|
||||
goal="-Pdb=sybase_ci"
|
||||
goal="-Pdb=sybase_ci -PexcludeTests=**.GenerateSeriesTest*"
|
||||
elif [ "$RDBMS" == "sybase_jconn" ]; then
|
||||
goal="-Pdb=sybase_jconn_ci"
|
||||
goal="-Pdb=sybase_jconn_ci -PexcludeTests=**.GenerateSeriesTest*"
|
||||
elif [ "$RDBMS" == "tidb" ]; then
|
||||
goal="-Pdb=tidb"
|
||||
elif [ "$RDBMS" == "hana_cloud" ]; then
|
||||
|
|
|
@ -2958,7 +2958,7 @@ The following set-returning functions are available on many platforms:
|
|||
| Function | purpose
|
||||
|
||||
| <<hql-array-unnest,`unnest()`>> | Turns an array into rows
|
||||
//| `generate_series()` | Creates a series of values as rows
|
||||
| <<hql-from-set-returning-functions-generate-series,`generate_series()`>> | Creates a series of values as rows
|
||||
|===
|
||||
|
||||
To use set returning functions defined in the database, it is required to register them in a `FunctionContributor`:
|
||||
|
@ -2986,6 +2986,43 @@ which is not supported on some databases for user defined functions.
|
|||
Hibernate ORM tries to emulate this feature by wrapping invocations as lateral subqueries and using `row_number()`,
|
||||
which may lead to worse performance.
|
||||
|
||||
[[hql-from-set-returning-functions-generate-series]]
|
||||
==== `generate_series` set-returning function
|
||||
|
||||
A <<hql-from-set-returning-functions,set-returning function>>, which generates rows from a given start value (inclusive)
|
||||
up to a given stop value (inclusive). The function has 2 variants:
|
||||
|
||||
* `generate_series(numeric, numeric [,numeric])` - Arguments are `start`, `stop` and `step` with a default of `1` for the optional `step` argument
|
||||
* `generate_series(temporal, temporal, duration)` - Like the numeric variant, but for temporal types and `step` is required
|
||||
|
||||
[[hql-generate-series-example]]
|
||||
====
|
||||
[source, java, indent=0]
|
||||
----
|
||||
include::{srf-example-dir-hql}/GenerateSeriesTest.java[tags=hql-set-returning-function-generate-series-example]
|
||||
----
|
||||
====
|
||||
|
||||
To obtain the "row number" of a generated value i.e. ordinality, it is possible to use the `index()` function.
|
||||
|
||||
[[hql-generate-series-ordinality-example]]
|
||||
====
|
||||
[source, java, indent=0]
|
||||
----
|
||||
include::{srf-example-dir-hql}/GenerateSeriesTest.java[tags=hql-set-returning-function-generate-series-ordinality-example]
|
||||
----
|
||||
====
|
||||
|
||||
The `step` argument can be a negative value and progress from a higher `start` value to a lower `stop` value.
|
||||
|
||||
[[hql-generate-series-temporal-example]]
|
||||
====
|
||||
[source, java, indent=0]
|
||||
----
|
||||
include::{srf-example-dir-hql}/GenerateSeriesTest.java[tags=hql-set-returning-function-generate-series-temporal-example]
|
||||
----
|
||||
====
|
||||
|
||||
[[hql-join]]
|
||||
=== Declaring joined entities
|
||||
|
||||
|
|
|
@ -518,6 +518,7 @@ public class CockroachLegacyDialect extends Dialect {
|
|||
functionFactory.jsonArrayInsert_postgresql();
|
||||
|
||||
functionFactory.unnest_postgresql();
|
||||
functionFactory.generateSeries( null, "ordinality", true );
|
||||
|
||||
// Postgres uses # instead of ^ for XOR
|
||||
functionContributions.getFunctionRegistry().patternDescriptorBuilder( "bitxor", "(?1#?2)" )
|
||||
|
|
|
@ -458,6 +458,18 @@ public class DB2LegacyDialect extends Dialect {
|
|||
functionFactory.xmlagg();
|
||||
|
||||
functionFactory.unnest_emulated();
|
||||
if ( supportsRecursiveCTE() ) {
|
||||
functionFactory.generateSeries_recursive( getMaximumSeriesSize(), false, true );
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* DB2 doesn't support the {@code generate_series} function or {@code lateral} recursive CTEs,
|
||||
* so it has to be emulated with a top level recursive CTE which requires an upper bound on the amount
|
||||
* of elements that the series can return.
|
||||
*/
|
||||
protected int getMaximumSeriesSize() {
|
||||
return 10000;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -428,6 +428,7 @@ public class H2LegacyDialect extends Dialect {
|
|||
}
|
||||
|
||||
functionFactory.unnest_h2( getMaximumArraySize() );
|
||||
functionFactory.generateSeries_h2( getMaximumSeriesSize() );
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -440,6 +441,16 @@ public class H2LegacyDialect extends Dialect {
|
|||
return 1000;
|
||||
}
|
||||
|
||||
/**
|
||||
* Since H2 doesn't support ordinality for the {@code system_range} function or {@code lateral},
|
||||
* it's impossible to use {@code system_range} for non-constant cases.
|
||||
* Luckily, correlation can be emulated, but requires that there is an upper bound on the amount
|
||||
* of elements that the series can return.
|
||||
*/
|
||||
protected int getMaximumSeriesSize() {
|
||||
return 10000;
|
||||
}
|
||||
|
||||
@Override
|
||||
public @Nullable String getDefaultOrdinalityColumnName() {
|
||||
return "nord";
|
||||
|
|
|
@ -497,6 +497,8 @@ public class HANALegacyDialect extends Dialect {
|
|||
functionFactory.unnest_hana();
|
||||
// functionFactory.json_table();
|
||||
|
||||
functionFactory.generateSeries_hana( getMaximumSeriesSize() );
|
||||
|
||||
if ( getVersion().isSameOrAfter(2, 0, 20 ) ) {
|
||||
if ( getVersion().isSameOrAfter( 2, 0, 40 ) ) {
|
||||
// Introduced in 2.0 SPS 04
|
||||
|
@ -513,6 +515,14 @@ public class HANALegacyDialect extends Dialect {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* HANA doesn't support the {@code generate_series} function or {@code lateral} recursive CTEs,
|
||||
* so it has to be emulated with the {@code xmltable} and {@code lpad} functions.
|
||||
*/
|
||||
protected int getMaximumSeriesSize() {
|
||||
return 10000;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SqlAstTranslatorFactory getSqlAstTranslatorFactory() {
|
||||
return new StandardSqlAstTranslatorFactory() {
|
||||
|
|
|
@ -279,6 +279,7 @@ public class HSQLLegacyDialect extends Dialect {
|
|||
}
|
||||
|
||||
functionFactory.unnest( "c1", "c2" );
|
||||
functionFactory.generateSeries_recursive( getMaximumSeriesSize(), true, false );
|
||||
|
||||
//trim() requires parameters to be cast when used as trim character
|
||||
functionContributions.getFunctionRegistry().register( "trim", new TrimFunction(
|
||||
|
@ -288,6 +289,16 @@ public class HSQLLegacyDialect extends Dialect {
|
|||
) );
|
||||
}
|
||||
|
||||
/**
|
||||
* HSQLDB doesn't support the {@code generate_series} function or {@code lateral} recursive CTEs,
|
||||
* so it has to be emulated with a top level recursive CTE which requires an upper bound on the amount
|
||||
* of elements that the series can return.
|
||||
*/
|
||||
protected int getMaximumSeriesSize() {
|
||||
// The maximum recursion depth of HSQLDB
|
||||
return 258;
|
||||
}
|
||||
|
||||
@Override
|
||||
public @Nullable String getDefaultOrdinalityColumnName() {
|
||||
return "c2";
|
||||
|
|
|
@ -675,9 +675,22 @@ public class MySQLLegacyDialect extends Dialect {
|
|||
if ( getMySQLVersion().isSameOrAfter( 8 ) ) {
|
||||
functionFactory.unnest_emulated();
|
||||
}
|
||||
if ( supportsRecursiveCTE() ) {
|
||||
functionFactory.generateSeries_recursive( getMaximumSeriesSize(), false, false );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* MySQL doesn't support the {@code generate_series} function or {@code lateral} recursive CTEs,
|
||||
* so it has to be emulated with a top level recursive CTE which requires an upper bound on the amount
|
||||
* of elements that the series can return.
|
||||
*/
|
||||
protected int getMaximumSeriesSize() {
|
||||
// The maximum recursion depth of MySQL
|
||||
return 1000;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void contributeTypes(TypeContributions typeContributions, ServiceRegistry serviceRegistry) {
|
||||
super.contributeTypes( typeContributions, serviceRegistry );
|
||||
|
|
|
@ -335,6 +335,16 @@ public class OracleLegacyDialect extends Dialect {
|
|||
functionFactory.xmlagg();
|
||||
|
||||
functionFactory.unnest_oracle();
|
||||
functionFactory.generateSeries_recursive( getMaximumSeriesSize(), true, false );
|
||||
}
|
||||
|
||||
/**
|
||||
* Oracle doesn't support the {@code generate_series} function or {@code lateral} recursive CTEs,
|
||||
* so it has to be emulated with a top level recursive CTE which requires an upper bound on the amount
|
||||
* of elements that the series can return.
|
||||
*/
|
||||
protected int getMaximumSeriesSize() {
|
||||
return 10000;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -711,6 +711,7 @@ public class PostgreSQLLegacyDialect extends Dialect {
|
|||
else {
|
||||
functionFactory.unnest_postgresql();
|
||||
}
|
||||
functionFactory.generateSeries( null, "ordinality", false );
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -440,6 +440,7 @@ public class SQLServerLegacyDialect extends AbstractTransactSQLDialect {
|
|||
functionFactory.leastGreatest();
|
||||
functionFactory.dateTrunc_datetrunc();
|
||||
functionFactory.trunc_round_datetrunc();
|
||||
functionFactory.generateSeries_sqlserver( getMaximumSeriesSize() );
|
||||
}
|
||||
else {
|
||||
functionContributions.getFunctionRegistry().register(
|
||||
|
@ -447,6 +448,24 @@ public class SQLServerLegacyDialect extends AbstractTransactSQLDialect {
|
|||
new SqlServerConvertTruncFunction( functionContributions.getTypeConfiguration() )
|
||||
);
|
||||
functionContributions.getFunctionRegistry().registerAlternateKey( "truncate", "trunc" );
|
||||
if ( supportsRecursiveCTE() ) {
|
||||
functionFactory.generateSeries_recursive( getMaximumSeriesSize(), false, false );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* SQL Server doesn't support the {@code generate_series} function or {@code lateral} recursive CTEs,
|
||||
* so it has to be emulated with a top level recursive CTE which requires an upper bound on the amount
|
||||
* of elements that the series can return.
|
||||
*/
|
||||
protected int getMaximumSeriesSize() {
|
||||
if ( getVersion().isSameOrAfter( 16 ) ) {
|
||||
return 10000;
|
||||
}
|
||||
else {
|
||||
// The maximum recursion depth of SQL Server
|
||||
return 100;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -166,6 +166,17 @@ public class SybaseASELegacyDialect extends SybaseLegacyDialect {
|
|||
CommonFunctionFactory functionFactory = new CommonFunctionFactory( functionContributions);
|
||||
|
||||
functionFactory.unnest_sybasease();
|
||||
functionFactory.generateSeries_sybasease( getMaximumSeriesSize() );
|
||||
}
|
||||
|
||||
/**
|
||||
* Sybase ASE doesn't support the {@code generate_series} function or {@code lateral} recursive CTEs,
|
||||
* so it has to be emulated with the {@code xmltable} and {@code replicate} functions.
|
||||
*/
|
||||
protected int getMaximumSeriesSize() {
|
||||
// The maximum possible value for replicating an XML tag, so that the resulting string stays below the 16K limit
|
||||
// https://infocenter.sybase.com/help/index.jsp?topic=/com.sybase.infocenter.dc32300.1570/html/sqlug/sqlug31.htm
|
||||
return 4094;
|
||||
}
|
||||
|
||||
private static boolean isAnsiNull(DatabaseMetaData databaseMetaData) {
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
/*
|
||||
* Hibernate, Relational Persistence for Idiomatic Java
|
||||
*
|
||||
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
||||
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html.
|
||||
* SPDX-License-Identifier: LGPL-2.1-or-later
|
||||
* Copyright Red Hat Inc. and Hibernate Authors
|
||||
*/
|
||||
package org.hibernate.boot.models.annotations.internal;
|
||||
|
||||
|
|
|
@ -486,6 +486,7 @@ public class CockroachDialect extends Dialect {
|
|||
functionFactory.jsonArrayInsert_postgresql();
|
||||
|
||||
functionFactory.unnest_postgresql();
|
||||
functionFactory.generateSeries( null, "ordinality", true );
|
||||
|
||||
// Postgres uses # instead of ^ for XOR
|
||||
functionContributions.getFunctionRegistry().patternDescriptorBuilder( "bitxor", "(?1#?2)" )
|
||||
|
|
|
@ -443,6 +443,16 @@ public class DB2Dialect extends Dialect {
|
|||
functionFactory.xmlagg();
|
||||
|
||||
functionFactory.unnest_emulated();
|
||||
functionFactory.generateSeries_recursive( getMaximumSeriesSize(), false, true );
|
||||
}
|
||||
|
||||
/**
|
||||
* DB2 doesn't support the {@code generate_series} function or {@code lateral} recursive CTEs,
|
||||
* so it has to be emulated with a top level recursive CTE which requires an upper bound on the amount
|
||||
* of elements that the series can return.
|
||||
*/
|
||||
protected int getMaximumSeriesSize() {
|
||||
return 10000;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -185,7 +185,10 @@ import java.time.LocalDate;
|
|||
import java.time.LocalDateTime;
|
||||
import java.time.LocalTime;
|
||||
import java.time.OffsetDateTime;
|
||||
import java.time.Period;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
import java.time.temporal.TemporalAccessor;
|
||||
import java.time.temporal.TemporalAmount;
|
||||
import java.util.Calendar;
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
|
@ -5618,11 +5621,102 @@ public abstract class Dialect implements ConversionContext, TypeContributor, Fun
|
|||
* {@link Duration}.
|
||||
*/
|
||||
public void appendIntervalLiteral(SqlAppender appender, Duration literal) {
|
||||
final int nano = literal.getNano();
|
||||
final int secondsPart = literal.toSecondsPart();
|
||||
final int minutesPart = literal.toMinutesPart();
|
||||
final int hoursPart = literal.toHoursPart();
|
||||
final long daysPart = literal.toDaysPart();
|
||||
enum Unit { day, hour, minute }
|
||||
final Unit unit;
|
||||
if ( daysPart != 0 ) {
|
||||
unit = hoursPart == 0 && minutesPart == 0 && secondsPart == 0 && nano == 0
|
||||
? Unit.day
|
||||
: null;
|
||||
}
|
||||
else if ( hoursPart != 0 ) {
|
||||
unit = minutesPart == 0 && secondsPart == 0 && nano == 0
|
||||
? Unit.hour
|
||||
: null;
|
||||
}
|
||||
else if ( minutesPart != 0 ) {
|
||||
unit = secondsPart == 0 && nano == 0
|
||||
? Unit.minute
|
||||
: null;
|
||||
}
|
||||
else {
|
||||
unit = null;
|
||||
}
|
||||
appender.appendSql( "interval '" );
|
||||
appender.appendSql( literal.getSeconds() );
|
||||
appender.appendSql( '.' );
|
||||
appender.appendSql( literal.getNano() );
|
||||
appender.appendSql( "' second" );
|
||||
if ( unit != null ) {
|
||||
appender.appendSql( switch( unit ) {
|
||||
case day -> daysPart;
|
||||
case hour -> hoursPart;
|
||||
case minute -> minutesPart;
|
||||
});
|
||||
appender.appendSql( "' " );
|
||||
appender.appendSql( unit.toString() );
|
||||
}
|
||||
else {
|
||||
appender.appendSql( "interval '" );
|
||||
appender.appendSql( literal.getSeconds() );
|
||||
if ( nano > 0 ) {
|
||||
appender.appendSql( '.' );
|
||||
appender.appendSql( nano );
|
||||
}
|
||||
appender.appendSql( "' second" );
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Append a literal SQL {@code interval} representing the given Java
|
||||
* {@link TemporalAmount}.
|
||||
*/
|
||||
public void appendIntervalLiteral(SqlAppender appender, TemporalAmount literal) {
|
||||
if ( literal instanceof Duration duration ) {
|
||||
appendIntervalLiteral( appender, duration );
|
||||
}
|
||||
else if ( literal instanceof Period period ) {
|
||||
final int years = period.getYears();
|
||||
final int months = period.getMonths();
|
||||
final int days = period.getDays();
|
||||
final boolean parenthesis = years != 0 && months != 0
|
||||
|| years != 0 && days != 0
|
||||
|| months != 0 && days != 0;
|
||||
if ( parenthesis ) {
|
||||
appender.appendSql( '(' );
|
||||
}
|
||||
boolean first = true;
|
||||
for ( java.time.temporal.TemporalUnit unit : literal.getUnits() ) {
|
||||
final long value = literal.get( unit );
|
||||
if ( value != 0 ) {
|
||||
if ( first ) {
|
||||
first = false;
|
||||
}
|
||||
else {
|
||||
appender.appendSql( "+" );
|
||||
}
|
||||
appender.appendSql( "interval '" );
|
||||
appender.appendSql( value );
|
||||
appender.appendSql( "' " );
|
||||
if ( unit == ChronoUnit.YEARS ) {
|
||||
appender.appendSql( "year" );
|
||||
}
|
||||
else if ( unit == ChronoUnit.MONTHS ) {
|
||||
appender.appendSql( "month" );
|
||||
}
|
||||
else {
|
||||
assert unit == ChronoUnit.DAYS;
|
||||
appender.appendSql( "day" );
|
||||
}
|
||||
}
|
||||
}
|
||||
if ( parenthesis ) {
|
||||
appender.appendSql( ')' );
|
||||
}
|
||||
}
|
||||
else {
|
||||
throw new IllegalArgumentException( "Unsupported temporal amount type: " + literal );
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -10,6 +10,7 @@ import java.sql.ResultSet;
|
|||
import java.sql.SQLException;
|
||||
import java.time.Duration;
|
||||
import java.time.temporal.TemporalAccessor;
|
||||
import java.time.temporal.TemporalAmount;
|
||||
import java.util.Calendar;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
@ -20,6 +21,7 @@ import java.util.Set;
|
|||
import java.util.TimeZone;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.checkerframework.checker.nullness.qual.Nullable;
|
||||
import org.hibernate.Incubating;
|
||||
import org.hibernate.LockMode;
|
||||
import org.hibernate.LockOptions;
|
||||
|
@ -46,6 +48,7 @@ import org.hibernate.engine.spi.SessionFactoryImplementor;
|
|||
import org.hibernate.exception.spi.SQLExceptionConversionDelegate;
|
||||
import org.hibernate.exception.spi.ViolatedConstraintNameExtractor;
|
||||
import org.hibernate.loader.ast.spi.MultiKeyLoadSizingStrategy;
|
||||
import org.hibernate.mapping.CheckConstraint;
|
||||
import org.hibernate.mapping.Column;
|
||||
import org.hibernate.mapping.ForeignKey;
|
||||
import org.hibernate.mapping.Index;
|
||||
|
@ -1596,6 +1599,11 @@ public class DialectDelegateWrapper extends Dialect {
|
|||
wrapped.appendIntervalLiteral( appender, literal );
|
||||
}
|
||||
|
||||
@Override
|
||||
public void appendIntervalLiteral(SqlAppender appender, TemporalAmount literal) {
|
||||
wrapped.appendIntervalLiteral( appender, literal );
|
||||
}
|
||||
|
||||
@Override
|
||||
public void appendUUIDLiteral(SqlAppender appender, UUID literal) {
|
||||
wrapped.appendUUIDLiteral( appender, literal );
|
||||
|
@ -1625,4 +1633,216 @@ public class DialectDelegateWrapper extends Dialect {
|
|||
public String getRowIdColumnString(String rowId) {
|
||||
return wrapped.getRowIdColumnString( rowId );
|
||||
}
|
||||
|
||||
@Override
|
||||
public DatabaseVersion determineDatabaseVersion(DialectResolutionInfo info) {
|
||||
return wrapped.determineDatabaseVersion( info );
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isLob(int sqlTypeCode) {
|
||||
return wrapped.isLob( sqlTypeCode );
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getEnumTypeDeclaration(Class<? extends Enum<?>> enumType) {
|
||||
return wrapped.getEnumTypeDeclaration( enumType );
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] getCreateEnumTypeCommand(String name, String[] values) {
|
||||
return wrapped.getCreateEnumTypeCommand( name, values );
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] getCreateEnumTypeCommand(Class<? extends Enum<?>> enumType) {
|
||||
return wrapped.getCreateEnumTypeCommand( enumType );
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] getDropEnumTypeCommand(String name) {
|
||||
return wrapped.getDropEnumTypeCommand( name );
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] getDropEnumTypeCommand(Class<? extends Enum<?>> enumType) {
|
||||
return wrapped.getDropEnumTypeCommand( enumType );
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getCheckCondition(String columnName, Class<? extends Enum<?>> enumType) {
|
||||
return wrapped.getCheckCondition( columnName, enumType );
|
||||
}
|
||||
|
||||
@Deprecated(since = "6.5", forRemoval = true)
|
||||
@Override
|
||||
public String getCheckCondition(String columnName, long[] values) {
|
||||
return wrapped.getCheckCondition( columnName, values );
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getCheckCondition(String columnName, Long[] values) {
|
||||
return wrapped.getCheckCondition( columnName, values );
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getCheckCondition(String columnName, Set<?> valueSet, JdbcType jdbcType) {
|
||||
return wrapped.getCheckCondition( columnName, valueSet, jdbcType );
|
||||
}
|
||||
|
||||
@Override
|
||||
public String buildStringToBooleanCast(String trueValue, String falseValue) {
|
||||
return wrapped.buildStringToBooleanCast( trueValue, falseValue );
|
||||
}
|
||||
|
||||
@Override
|
||||
public String buildStringToBooleanCastDecode(String trueValue, String falseValue) {
|
||||
return wrapped.buildStringToBooleanCastDecode( trueValue, falseValue );
|
||||
}
|
||||
|
||||
@Override
|
||||
public String buildStringToBooleanDecode(String trueValue, String falseValue) {
|
||||
return wrapped.buildStringToBooleanDecode( trueValue, falseValue );
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDual() {
|
||||
return wrapped.getDual();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getFromDualForSelectOnly() {
|
||||
return wrapped.getFromDualForSelectOnly();
|
||||
}
|
||||
|
||||
@Deprecated(since = "7.0", forRemoval = true)
|
||||
@Override
|
||||
public String getNativeIdentifierGeneratorStrategy() {
|
||||
return wrapped.getNativeIdentifierGeneratorStrategy();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getTimeoutInSeconds(int millis) {
|
||||
return wrapped.getTimeoutInSeconds( millis );
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getBeforeDropStatement() {
|
||||
return wrapped.getBeforeDropStatement();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean useCrossReferenceForeignKeys() {
|
||||
return wrapped.useCrossReferenceForeignKeys();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getCrossReferenceParentTableFilter() {
|
||||
return wrapped.getCrossReferenceParentTableFilter();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean supportsIsTrue() {
|
||||
return wrapped.supportsIsTrue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String quoteCollation(String collation) {
|
||||
return wrapped.quoteCollation( collation );
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean supportsInsertReturningRowId() {
|
||||
return wrapped.supportsInsertReturningRowId();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean supportsUpdateReturning() {
|
||||
return wrapped.supportsUpdateReturning();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean unquoteGetGeneratedKeys() {
|
||||
return wrapped.unquoteGetGeneratedKeys();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean supportsNationalizedMethods() {
|
||||
return wrapped.supportsNationalizedMethods();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean useArrayForMultiValuedParameters() {
|
||||
return wrapped.useArrayForMultiValuedParameters();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean supportsConflictClauseForInsertCTE() {
|
||||
return wrapped.supportsConflictClauseForInsertCTE();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean supportsFromClauseInUpdate() {
|
||||
return wrapped.supportsFromClauseInUpdate();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getDefaultIntervalSecondScale() {
|
||||
return wrapped.getDefaultIntervalSecondScale();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean doesRoundTemporalOnOverflow() {
|
||||
return wrapped.doesRoundTemporalOnOverflow();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Boolean supportsBatchUpdates() {
|
||||
return wrapped.supportsBatchUpdates();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Boolean supportsRefCursors() {
|
||||
return wrapped.supportsRefCursors();
|
||||
}
|
||||
|
||||
@Override
|
||||
public @Nullable String getDefaultOrdinalityColumnName() {
|
||||
return wrapped.getDefaultOrdinalityColumnName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public DmlTargetColumnQualifierSupport getDmlTargetColumnQualifierSupport() {
|
||||
return wrapped.getDmlTargetColumnQualifierSupport();
|
||||
}
|
||||
|
||||
@Override
|
||||
public FunctionalDependencyAnalysisSupport getFunctionalDependencyAnalysisSupport() {
|
||||
return wrapped.getFunctionalDependencyAnalysisSupport();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getCheckConstraintString(CheckConstraint checkConstraint) {
|
||||
return wrapped.getCheckConstraintString( checkConstraint );
|
||||
}
|
||||
|
||||
@Override
|
||||
public String appendCheckConstraintOptions(CheckConstraint checkConstraint, String sqlCheckConstraint) {
|
||||
return wrapped.appendCheckConstraintOptions( checkConstraint, sqlCheckConstraint );
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean supportsTableOptions() {
|
||||
return wrapped.supportsTableOptions();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean supportsBindingNullSqlTypeForSetNull() {
|
||||
return wrapped.supportsBindingNullSqlTypeForSetNull();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean supportsBindingNullForSetObject() {
|
||||
return wrapped.supportsBindingNullForSetObject();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -359,6 +359,7 @@ public class H2Dialect extends Dialect {
|
|||
functionFactory.xmlpi_h2();
|
||||
|
||||
functionFactory.unnest_h2( getMaximumArraySize() );
|
||||
functionFactory.generateSeries_h2( getMaximumSeriesSize() );
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -371,6 +372,16 @@ public class H2Dialect extends Dialect {
|
|||
return 1000;
|
||||
}
|
||||
|
||||
/**
|
||||
* Since H2 doesn't support ordinality for the {@code system_range} function or {@code lateral},
|
||||
* it's impossible to use {@code system_range} for non-constant cases.
|
||||
* Luckily, correlation can be emulated, but requires that there is an upper bound on the amount
|
||||
* of elements that the series can return.
|
||||
*/
|
||||
protected int getMaximumSeriesSize() {
|
||||
return 10000;
|
||||
}
|
||||
|
||||
@Override
|
||||
public @Nullable String getDefaultOrdinalityColumnName() {
|
||||
return "nord";
|
||||
|
|
|
@ -497,23 +497,32 @@ public class HANADialect extends Dialect {
|
|||
typeConfiguration
|
||||
);
|
||||
|
||||
// Introduced in 2.0 SPS 00
|
||||
functionFactory.jsonValue_no_passing();
|
||||
functionFactory.jsonQuery_no_passing();
|
||||
functionFactory.jsonExists_hana();
|
||||
// Introduced in 2.0 SPS 00
|
||||
functionFactory.jsonValue_no_passing();
|
||||
functionFactory.jsonQuery_no_passing();
|
||||
functionFactory.jsonExists_hana();
|
||||
|
||||
functionFactory.unnest_hana();
|
||||
// functionFactory.json_table();
|
||||
functionFactory.unnest_hana();
|
||||
// functionFactory.json_table();
|
||||
|
||||
// Introduced in 2.0 SPS 04
|
||||
functionFactory.jsonObject_hana();
|
||||
functionFactory.jsonArray_hana();
|
||||
functionFactory.jsonArrayAgg_hana();
|
||||
functionFactory.jsonObjectAgg_hana();
|
||||
// Introduced in 2.0 SPS 04
|
||||
functionFactory.jsonObject_hana();
|
||||
functionFactory.jsonArray_hana();
|
||||
functionFactory.jsonArrayAgg_hana();
|
||||
functionFactory.jsonObjectAgg_hana();
|
||||
|
||||
// functionFactory.xmltable();
|
||||
// functionFactory.xmltable();
|
||||
|
||||
// functionFactory.xmlextract();
|
||||
// functionFactory.xmlextract();
|
||||
functionFactory.generateSeries_hana( getMaximumSeriesSize() );
|
||||
}
|
||||
|
||||
/**
|
||||
* HANA doesn't support the {@code generate_series} function or {@code lateral} recursive CTEs,
|
||||
* so it has to be emulated with the {@code xmltable} and {@code lpad} functions.
|
||||
*/
|
||||
protected int getMaximumSeriesSize() {
|
||||
return 10000;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -214,6 +214,7 @@ public class HSQLDialect extends Dialect {
|
|||
}
|
||||
|
||||
functionFactory.unnest( "c1", "c2" );
|
||||
functionFactory.generateSeries_recursive( getMaximumSeriesSize(), true, false );
|
||||
|
||||
//trim() requires parameters to be cast when used as trim character
|
||||
functionContributions.getFunctionRegistry().register( "trim", new TrimFunction(
|
||||
|
@ -223,6 +224,16 @@ public class HSQLDialect extends Dialect {
|
|||
) );
|
||||
}
|
||||
|
||||
/**
|
||||
* HSQLDB doesn't support the {@code generate_series} function or {@code lateral} recursive CTEs,
|
||||
* so it has to be emulated with a top level recursive CTE which requires an upper bound on the amount
|
||||
* of elements that the series can return.
|
||||
*/
|
||||
protected int getMaximumSeriesSize() {
|
||||
// The maximum recursion depth of HSQLDB
|
||||
return 258;
|
||||
}
|
||||
|
||||
@Override
|
||||
public @Nullable String getDefaultOrdinalityColumnName() {
|
||||
return "c2";
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.hibernate.sql.ast.tree.from.NamedTableReference;
|
|||
import org.hibernate.sql.ast.tree.insert.ConflictClause;
|
||||
import org.hibernate.sql.ast.tree.insert.InsertSelectStatement;
|
||||
import org.hibernate.sql.ast.tree.predicate.BooleanExpressionPredicate;
|
||||
import org.hibernate.sql.ast.tree.predicate.ComparisonPredicate;
|
||||
import org.hibernate.sql.ast.tree.predicate.InArrayPredicate;
|
||||
import org.hibernate.sql.ast.tree.select.QueryPart;
|
||||
import org.hibernate.sql.ast.tree.update.UpdateStatement;
|
||||
|
@ -288,6 +289,22 @@ public class HSQLSqlAstTranslator<T extends JdbcOperation> extends AbstractSqlAs
|
|||
emulateSelectTupleComparison( lhsExpressions, tuple.getExpressions(), operator, true );
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visitRelationalPredicate(ComparisonPredicate comparisonPredicate) {
|
||||
if ( isParameter( comparisonPredicate.getLeftHandExpression() )
|
||||
&& isParameter( comparisonPredicate.getRightHandExpression() ) ) {
|
||||
// HSQLDB doesn't like comparing two parameters with each other
|
||||
withParameterRenderingMode(
|
||||
SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER,
|
||||
() -> super.visitRelationalPredicate( comparisonPredicate )
|
||||
);
|
||||
}
|
||||
else {
|
||||
super.visitRelationalPredicate( comparisonPredicate );
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void renderComparison(Expression lhs, ComparisonOperator operator, Expression rhs) {
|
||||
final JdbcMappingContainer lhsExpressionType = lhs.getExpressionType();
|
||||
if ( lhsExpressionType == null || lhsExpressionType.getJdbcTypeCount() != 1 ) {
|
||||
|
|
|
@ -660,6 +660,19 @@ public class MySQLDialect extends Dialect {
|
|||
if ( getMySQLVersion().isSameOrAfter( 8 ) ) {
|
||||
functionFactory.unnest_emulated();
|
||||
}
|
||||
if ( supportsRecursiveCTE() ) {
|
||||
functionFactory.generateSeries_recursive( getMaximumSeriesSize(), false, false );
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* MySQL doesn't support the {@code generate_series} function or {@code lateral} recursive CTEs,
|
||||
* so it has to be emulated with a top level recursive CTE which requires an upper bound on the amount
|
||||
* of elements that the series can return.
|
||||
*/
|
||||
protected int getMaximumSeriesSize() {
|
||||
// The maximum recursion depth of MySQL
|
||||
return 1000;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -423,6 +423,16 @@ public class OracleDialect extends Dialect {
|
|||
functionFactory.xmlagg();
|
||||
|
||||
functionFactory.unnest_oracle();
|
||||
functionFactory.generateSeries_recursive( getMaximumSeriesSize(), true, false );
|
||||
}
|
||||
|
||||
/**
|
||||
* Oracle doesn't support the {@code generate_series} function or {@code lateral} recursive CTEs,
|
||||
* so it has to be emulated with a top level recursive CTE which requires an upper bound on the amount
|
||||
* of elements that the series can return.
|
||||
*/
|
||||
protected int getMaximumSeriesSize() {
|
||||
return 10000;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -671,6 +671,7 @@ public class PostgreSQLDialect extends Dialect {
|
|||
else {
|
||||
functionFactory.unnest_postgresql();
|
||||
}
|
||||
functionFactory.generateSeries( null, "ordinality", false );
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -457,6 +457,7 @@ public class SQLServerDialect extends AbstractTransactSQLDialect {
|
|||
functionFactory.leastGreatest();
|
||||
functionFactory.dateTrunc_datetrunc();
|
||||
functionFactory.trunc_round_datetrunc();
|
||||
functionFactory.generateSeries_sqlserver( getMaximumSeriesSize() );
|
||||
}
|
||||
else {
|
||||
functionContributions.getFunctionRegistry().register(
|
||||
|
@ -464,6 +465,24 @@ public class SQLServerDialect extends AbstractTransactSQLDialect {
|
|||
new SqlServerConvertTruncFunction( functionContributions.getTypeConfiguration() )
|
||||
);
|
||||
functionContributions.getFunctionRegistry().registerAlternateKey( "truncate", "trunc" );
|
||||
if ( supportsRecursiveCTE() ) {
|
||||
functionFactory.generateSeries_recursive( getMaximumSeriesSize(), false, false );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* SQL Server doesn't support the {@code generate_series} function or {@code lateral} recursive CTEs,
|
||||
* so it has to be emulated with a top level recursive CTE which requires an upper bound on the amount
|
||||
* of elements that the series can return.
|
||||
*/
|
||||
protected int getMaximumSeriesSize() {
|
||||
if ( getVersion().isSameOrAfter( 16 ) ) {
|
||||
return 10000;
|
||||
}
|
||||
else {
|
||||
// The maximum recursion depth of SQL Server
|
||||
return 100;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -183,6 +183,17 @@ public class SybaseASEDialect extends SybaseDialect {
|
|||
CommonFunctionFactory functionFactory = new CommonFunctionFactory( functionContributions);
|
||||
|
||||
functionFactory.unnest_sybasease();
|
||||
functionFactory.generateSeries_sybasease( getMaximumSeriesSize() );
|
||||
}
|
||||
|
||||
/**
|
||||
* Sybase ASE doesn't support the {@code generate_series} function or {@code lateral} recursive CTEs,
|
||||
* so it has to be emulated with the {@code xmltable} and {@code replicate} functions.
|
||||
*/
|
||||
protected int getMaximumSeriesSize() {
|
||||
// The maximum possible value for replicating an XML tag, so that the resulting string stays below the 16K limit
|
||||
// https://infocenter.sybase.com/help/index.jsp?topic=/com.sybase.infocenter.dc32300.1570/html/sqlug/sqlug31.htm
|
||||
return 4094;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -4287,4 +4287,46 @@ public class CommonFunctionFactory {
|
|||
public void unnest_hana() {
|
||||
functionRegistry.register( "unnest", new HANAUnnestFunction() );
|
||||
}
|
||||
|
||||
/**
|
||||
* Standard generate_series() function
|
||||
*/
|
||||
public void generateSeries(@Nullable String defaultValueColumnName, String defaultIndexSelectionExpression, boolean coerceToTimestamp) {
|
||||
functionRegistry.register( "generate_series", new GenerateSeriesFunction( defaultValueColumnName, defaultIndexSelectionExpression, coerceToTimestamp, typeConfiguration ) );
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursive CTE generate_series() function
|
||||
*/
|
||||
public void generateSeries_recursive(int maxSeriesSize, boolean supportsInterval, boolean coerceToTimestamp) {
|
||||
functionRegistry.register( "generate_series", new CteGenerateSeriesFunction( maxSeriesSize, supportsInterval, coerceToTimestamp, typeConfiguration ) );
|
||||
}
|
||||
|
||||
/**
|
||||
* H2 generate_series() function
|
||||
*/
|
||||
public void generateSeries_h2(int maxSeriesSize) {
|
||||
functionRegistry.register( "generate_series", new H2GenerateSeriesFunction( maxSeriesSize, typeConfiguration ) );
|
||||
}
|
||||
|
||||
/**
|
||||
* SQL Server generate_series() function
|
||||
*/
|
||||
public void generateSeries_sqlserver(int maxSeriesSize) {
|
||||
functionRegistry.register( "generate_series", new SQLServerGenerateSeriesFunction( maxSeriesSize, typeConfiguration ) );
|
||||
}
|
||||
|
||||
/**
|
||||
* Sybase ASE generate_series() function
|
||||
*/
|
||||
public void generateSeries_sybasease(int maxSeriesSize) {
|
||||
functionRegistry.register( "generate_series", new SybaseASEGenerateSeriesFunction( maxSeriesSize, typeConfiguration ) );
|
||||
}
|
||||
|
||||
/**
|
||||
* HANA generate_series() function
|
||||
*/
|
||||
public void generateSeries_hana(int maxSeriesSize) {
|
||||
functionRegistry.register( "generate_series", new HANAGenerateSeriesFunction( maxSeriesSize, typeConfiguration ) );
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,441 @@
|
|||
/*
|
||||
* SPDX-License-Identifier: LGPL-2.1-or-later
|
||||
* Copyright Red Hat Inc. and Hibernate Authors
|
||||
*/
|
||||
package org.hibernate.dialect.function;
|
||||
|
||||
import org.checkerframework.checker.nullness.qual.Nullable;
|
||||
import org.hibernate.metamodel.mapping.BasicValuedMapping;
|
||||
import org.hibernate.metamodel.mapping.CollectionPart;
|
||||
import org.hibernate.metamodel.mapping.ModelPart;
|
||||
import org.hibernate.metamodel.mapping.SelectableMapping;
|
||||
import org.hibernate.query.derived.AnonymousTupleTableGroupProducer;
|
||||
import org.hibernate.query.derived.AnonymousTupleType;
|
||||
import org.hibernate.query.spi.QueryEngine;
|
||||
import org.hibernate.query.sqm.BinaryArithmeticOperator;
|
||||
import org.hibernate.query.sqm.ComparisonOperator;
|
||||
import org.hibernate.query.sqm.SetOperator;
|
||||
import org.hibernate.query.sqm.function.SelfRenderingSqmSetReturningFunction;
|
||||
import org.hibernate.query.sqm.sql.SqmToSqlAstConverter;
|
||||
import org.hibernate.query.sqm.tree.SqmTypedNode;
|
||||
import org.hibernate.query.sqm.tree.expression.NumericTypeCategory;
|
||||
import org.hibernate.spi.NavigablePath;
|
||||
import org.hibernate.sql.ast.SqlAstTranslator;
|
||||
import org.hibernate.sql.ast.spi.SqlAppender;
|
||||
import org.hibernate.sql.ast.tree.SqlAstNode;
|
||||
import org.hibernate.sql.ast.tree.cte.CteColumn;
|
||||
import org.hibernate.sql.ast.tree.cte.CteContainer;
|
||||
import org.hibernate.sql.ast.tree.cte.CteStatement;
|
||||
import org.hibernate.sql.ast.tree.cte.CteTable;
|
||||
import org.hibernate.sql.ast.tree.cte.CteTableGroup;
|
||||
import org.hibernate.sql.ast.tree.expression.BinaryArithmeticExpression;
|
||||
import org.hibernate.sql.ast.tree.expression.ColumnReference;
|
||||
import org.hibernate.sql.ast.tree.expression.Expression;
|
||||
import org.hibernate.sql.ast.tree.expression.UnparsedNumericLiteral;
|
||||
import org.hibernate.sql.ast.tree.from.FunctionTableGroup;
|
||||
import org.hibernate.sql.ast.tree.from.NamedTableReference;
|
||||
import org.hibernate.sql.ast.tree.from.QueryPartTableGroup;
|
||||
import org.hibernate.sql.ast.tree.from.TableGroup;
|
||||
import org.hibernate.sql.ast.tree.predicate.ComparisonPredicate;
|
||||
import org.hibernate.sql.ast.tree.predicate.Junction;
|
||||
import org.hibernate.sql.ast.tree.predicate.Predicate;
|
||||
import org.hibernate.sql.ast.tree.select.QueryGroup;
|
||||
import org.hibernate.sql.ast.tree.select.QuerySpec;
|
||||
import org.hibernate.sql.ast.tree.select.SelectStatement;
|
||||
import org.hibernate.sql.results.internal.SqlSelectionImpl;
|
||||
import org.hibernate.type.BasicType;
|
||||
import org.hibernate.type.SqlTypes;
|
||||
import org.hibernate.type.descriptor.jdbc.JdbcType;
|
||||
import org.hibernate.type.spi.TypeConfiguration;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
|
||||
/**
|
||||
* Recursive CTE based generate_series function.
|
||||
*/
|
||||
public class CteGenerateSeriesFunction extends NumberSeriesGenerateSeriesFunction {
|
||||
|
||||
public CteGenerateSeriesFunction(int maxSeriesSize, boolean supportsIntervals, boolean coerceToTimestamp, TypeConfiguration typeConfiguration) {
|
||||
super(
|
||||
new CteGenerateSeriesSetReturningFunctionTypeResolver(),
|
||||
// Treat durations like intervals to avoid conversions
|
||||
typeConfiguration.getBasicTypeRegistry().resolve(
|
||||
java.time.Duration.class,
|
||||
supportsIntervals ? SqlTypes.INTERVAL_SECOND : SqlTypes.DURATION
|
||||
),
|
||||
coerceToTimestamp,
|
||||
maxSeriesSize
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected <T> SelfRenderingSqmSetReturningFunction<T> generateSqmSetReturningFunctionExpression(List<? extends SqmTypedNode<?>> arguments, QueryEngine queryEngine) {
|
||||
//noinspection unchecked
|
||||
return new SelfRenderingSqmSetReturningFunction<>(
|
||||
this,
|
||||
this,
|
||||
arguments,
|
||||
getArgumentsValidator(),
|
||||
getSetReturningTypeResolver(),
|
||||
(AnonymousTupleType<T>) getSetReturningTypeResolver().resolveTupleType( arguments, queryEngine.getTypeConfiguration() ),
|
||||
queryEngine.getCriteriaBuilder(),
|
||||
getName()
|
||||
) {
|
||||
@Override
|
||||
public TableGroup convertToSqlAst(NavigablePath navigablePath, String identifierVariable, boolean lateral, boolean canUseInnerJoins, boolean withOrdinality, SqmToSqlAstConverter walker) {
|
||||
final FunctionTableGroup tableGroup = (FunctionTableGroup) super.convertToSqlAst(
|
||||
navigablePath,
|
||||
identifierVariable,
|
||||
lateral,
|
||||
canUseInnerJoins,
|
||||
withOrdinality,
|
||||
walker
|
||||
);
|
||||
final AnonymousTupleTableGroupProducer tableGroupProducer = (AnonymousTupleTableGroupProducer) tableGroup.getModelPart();
|
||||
if ( !lateral ) {
|
||||
return new QueryPartTableGroup(
|
||||
navigablePath,
|
||||
tableGroupProducer,
|
||||
createCteSubquery( tableGroup, walker ),
|
||||
identifierVariable,
|
||||
tableGroupProducer.getColumnNames(),
|
||||
tableGroup.getPrimaryTableReference().getCompatibleTableExpressions(),
|
||||
lateral,
|
||||
canUseInnerJoins,
|
||||
walker.getCreationContext().getSessionFactory()
|
||||
);
|
||||
}
|
||||
else {
|
||||
final CteTableGroup cteTableGroup = new CteTableGroup(
|
||||
canUseInnerJoins,
|
||||
navigablePath,
|
||||
null,
|
||||
tableGroupProducer,
|
||||
new NamedTableReference( CteGenerateSeriesQueryTransformer.NAME, identifierVariable ),
|
||||
tableGroupProducer.getCompatibleTableExpressions()
|
||||
);
|
||||
walker.registerQueryTransformer( new CteGenerateSeriesQueryTransformer(
|
||||
tableGroup,
|
||||
cteTableGroup,
|
||||
maxSeriesSize,
|
||||
"i",
|
||||
coerceToTimestamp
|
||||
) );
|
||||
return cteTableGroup;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
protected static class CteGenerateSeriesQueryTransformer extends NumberSeriesQueryTransformer {
|
||||
|
||||
public static final String NAME = "max_series";
|
||||
protected final int maxSeriesSize;
|
||||
|
||||
public CteGenerateSeriesQueryTransformer(FunctionTableGroup functionTableGroup, TableGroup targetTableGroup, int maxSeriesSize, String positionColumnName, boolean coerceToTimestamp) {
|
||||
super( functionTableGroup, targetTableGroup, positionColumnName, coerceToTimestamp );
|
||||
this.maxSeriesSize = maxSeriesSize;
|
||||
}
|
||||
|
||||
@Override
|
||||
public QuerySpec transform(CteContainer cteContainer, QuerySpec querySpec, SqmToSqlAstConverter converter) {
|
||||
// First add the CTE that creates the series
|
||||
if ( cteContainer.getCteStatement( CteGenerateSeriesQueryTransformer.NAME ) == null ) {
|
||||
cteContainer.addCteStatement( createSeriesCte( converter ) );
|
||||
}
|
||||
return super.transform( cteContainer, querySpec, converter );
|
||||
}
|
||||
|
||||
protected CteStatement createSeriesCte(SqmToSqlAstConverter converter) {
|
||||
final BasicType<Long> longType = converter.getCreationContext().getTypeConfiguration()
|
||||
.getBasicTypeForJavaType( Long.class );
|
||||
final Expression one = new UnparsedNumericLiteral<>( "1", NumericTypeCategory.LONG, longType );
|
||||
final List<CteColumn> cteColumns = List.of( new CteColumn( "i", longType ) );
|
||||
|
||||
final QuerySpec cteStart = new QuerySpec( false );
|
||||
cteStart.getSelectClause().addSqlSelection( new SqlSelectionImpl( one ) );
|
||||
|
||||
final QuerySpec cteUnion = new QuerySpec( false );
|
||||
final CteTableGroup cteTableGroup = new CteTableGroup( new NamedTableReference( CteGenerateSeriesQueryTransformer.NAME, "t" ) );
|
||||
cteUnion.getFromClause().addRoot( cteTableGroup );
|
||||
final ColumnReference tIndex = new ColumnReference( cteTableGroup.getPrimaryTableReference(), "i", longType );
|
||||
final Expression nextValue = new BinaryArithmeticExpression(
|
||||
tIndex,
|
||||
BinaryArithmeticOperator.ADD,
|
||||
one,
|
||||
longType
|
||||
);
|
||||
cteUnion.getSelectClause().addSqlSelection( new SqlSelectionImpl( nextValue ) );
|
||||
cteUnion.applyPredicate(
|
||||
new ComparisonPredicate(
|
||||
nextValue,
|
||||
ComparisonOperator.LESS_THAN_OR_EQUAL,
|
||||
new UnparsedNumericLiteral<>(
|
||||
Integer.toString( maxSeriesSize ),
|
||||
NumericTypeCategory.LONG,
|
||||
longType
|
||||
)
|
||||
)
|
||||
);
|
||||
final QueryGroup cteContent = new QueryGroup( false, SetOperator.UNION_ALL, List.of( cteStart, cteUnion ) );
|
||||
final CteStatement cteStatement = new CteStatement(
|
||||
new CteTable( CteGenerateSeriesQueryTransformer.NAME, cteColumns ),
|
||||
new SelectStatement( cteContent )
|
||||
);
|
||||
cteStatement.setRecursive();
|
||||
return cteStatement;
|
||||
}
|
||||
}
|
||||
|
||||
private SelectStatement createCteSubquery(FunctionTableGroup tableGroup, SqmToSqlAstConverter walker) {
|
||||
final AnonymousTupleTableGroupProducer tableGroupProducer = (AnonymousTupleTableGroupProducer) tableGroup.getModelPart();
|
||||
final ModelPart indexPart = tableGroupProducer.findSubPart( CollectionPart.Nature.INDEX.getName(), null );
|
||||
final ModelPart elementPart = tableGroupProducer.findSubPart( CollectionPart.Nature.ELEMENT.getName(), null );
|
||||
final NumericTypeCategory numericTypeCategory = NumericTypeCategory.BIG_DECIMAL;
|
||||
final BasicType<?> resultType = (BasicType<?>) elementPart.getSingleJdbcMapping();
|
||||
final BasicType<Integer> integerType = walker.getCreationContext().getTypeConfiguration()
|
||||
.getBasicTypeForJavaType( Integer.class );
|
||||
final BasicType<Boolean> booleanType = walker.getCreationContext().getTypeConfiguration()
|
||||
.getBasicTypeForJavaType( Boolean.class );
|
||||
|
||||
final JdbcType boundType = resultType.getJdbcType();
|
||||
final boolean castTimestamp = coerceToTimestamp
|
||||
&& (boundType.getDdlTypeCode() == SqlTypes.DATE || boundType.getDdlTypeCode() == SqlTypes.TIME);
|
||||
|
||||
final List<? extends SqlAstNode> arguments = tableGroup.getPrimaryTableReference().getFunctionExpression()
|
||||
.getArguments();
|
||||
final Expression start = castTimestamp
|
||||
? castToTimestamp( arguments.get( 0 ), walker )
|
||||
: (Expression) arguments.get( 0 );
|
||||
final Expression stop = castTimestamp
|
||||
? castToTimestamp( arguments.get( 1 ), walker )
|
||||
: (Expression) arguments.get( 1 );
|
||||
final Expression explicitStep = arguments.size() > 2 ? (Expression) arguments.get( 2 ) : null;
|
||||
final Expression step = explicitStep != null
|
||||
? explicitStep
|
||||
: new UnparsedNumericLiteral<>( "1", numericTypeCategory, resultType );
|
||||
|
||||
final String cteName = "generate_series";
|
||||
final List<CteColumn> cteColumns;
|
||||
if ( indexPart == null ) {
|
||||
cteColumns = List.of( new CteColumn( "v", resultType ) );
|
||||
}
|
||||
else {
|
||||
cteColumns = List.of(
|
||||
new CteColumn( "v", resultType ),
|
||||
new CteColumn( "i", indexPart.getSingleJdbcMapping() )
|
||||
);
|
||||
}
|
||||
|
||||
// Select the start value and check if the step can progress towards the stop value
|
||||
final QuerySpec cteStart = new QuerySpec( false );
|
||||
if ( explicitStep == null ) {
|
||||
cteStart.getSelectClause().addSqlSelection( new SqlSelectionImpl( start ) );
|
||||
}
|
||||
else {
|
||||
// For explicit steps, we need to add the step 0 times in the initial part of the recursive CTE,
|
||||
// in order for the database to recognize the correct result type of the CTE column
|
||||
cteStart.getSelectClause().addSqlSelection( new SqlSelectionImpl( add(
|
||||
start,
|
||||
multiply( step, 0, integerType ),
|
||||
walker
|
||||
) ) );
|
||||
}
|
||||
|
||||
if ( indexPart != null ) {
|
||||
// ordinal is 1 based
|
||||
cteStart.getSelectClause().addSqlSelection( new SqlSelectionImpl(
|
||||
new UnparsedNumericLiteral<>( "1", NumericTypeCategory.INTEGER, integerType )
|
||||
) );
|
||||
}
|
||||
|
||||
// Add a predicate to ensure the start value is valid
|
||||
if ( explicitStep == null ) {
|
||||
// The default step is 1, so just check if start <= stop
|
||||
cteStart.applyPredicate(
|
||||
new ComparisonPredicate(
|
||||
start,
|
||||
ComparisonOperator.LESS_THAN_OR_EQUAL,
|
||||
stop
|
||||
)
|
||||
);
|
||||
}
|
||||
else {
|
||||
// When start <= stop, only produce an initial result if the step is positive i.e. step > step*-1
|
||||
final Predicate positiveProgress = new Junction(
|
||||
Junction.Nature.CONJUNCTION,
|
||||
List.of(
|
||||
new ComparisonPredicate(
|
||||
start,
|
||||
ComparisonOperator.LESS_THAN_OR_EQUAL,
|
||||
stop
|
||||
),
|
||||
new ComparisonPredicate(
|
||||
step,
|
||||
ComparisonOperator.GREATER_THAN,
|
||||
multiply( step, -1, integerType )
|
||||
)
|
||||
),
|
||||
booleanType
|
||||
);
|
||||
// When start >= stop, only produce an initial result if the step is negative i.e. step > step*-1
|
||||
final Predicate negativeProgress = new Junction(
|
||||
Junction.Nature.CONJUNCTION,
|
||||
List.of(
|
||||
new ComparisonPredicate(
|
||||
start,
|
||||
ComparisonOperator.GREATER_THAN_OR_EQUAL,
|
||||
stop
|
||||
),
|
||||
new ComparisonPredicate(
|
||||
step,
|
||||
ComparisonOperator.LESS_THAN,
|
||||
multiply( step, -1, integerType )
|
||||
)
|
||||
),
|
||||
booleanType
|
||||
);
|
||||
cteStart.applyPredicate(
|
||||
new Junction(
|
||||
Junction.Nature.DISJUNCTION,
|
||||
List.of( positiveProgress, negativeProgress ),
|
||||
booleanType
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
// The union part just adds the step to the previous value as long as the stop value is not reached
|
||||
final QuerySpec cteUnion = new QuerySpec( false );
|
||||
final CteTableGroup cteTableGroup = new CteTableGroup( new NamedTableReference( cteName, "t" ) );
|
||||
cteUnion.getFromClause().addRoot( cteTableGroup );
|
||||
final ColumnReference tValue = new ColumnReference( cteTableGroup.getPrimaryTableReference(), "v", resultType );
|
||||
final ColumnReference tIndex = indexPart == null
|
||||
? null
|
||||
: new ColumnReference(
|
||||
cteTableGroup.getPrimaryTableReference(),
|
||||
"i",
|
||||
indexPart.getSingleJdbcMapping()
|
||||
);
|
||||
final Expression nextValue = add( tValue, step, walker );
|
||||
cteUnion.getSelectClause().addSqlSelection( new SqlSelectionImpl( nextValue ) );
|
||||
if ( tIndex != null ) {
|
||||
cteUnion.getSelectClause().addSqlSelection( new SqlSelectionImpl( new BinaryArithmeticExpression(
|
||||
tIndex,
|
||||
BinaryArithmeticOperator.ADD,
|
||||
new UnparsedNumericLiteral<>( "1", NumericTypeCategory.INTEGER, integerType ),
|
||||
(BasicValuedMapping) indexPart.getSingleJdbcMapping()
|
||||
) ) );
|
||||
}
|
||||
|
||||
// Add a predicate to ensure the current value is valid
|
||||
if ( explicitStep == null ) {
|
||||
// The default step is 1, so just check if value <= stop
|
||||
cteUnion.applyPredicate(
|
||||
new ComparisonPredicate(
|
||||
nextValue,
|
||||
ComparisonOperator.LESS_THAN_OR_EQUAL,
|
||||
stop
|
||||
)
|
||||
);
|
||||
}
|
||||
else {
|
||||
// When start < stop, value is only valid if it's less than or equal to stop
|
||||
final Predicate positiveProgress = new Junction(
|
||||
Junction.Nature.CONJUNCTION,
|
||||
List.of(
|
||||
new ComparisonPredicate(
|
||||
start,
|
||||
ComparisonOperator.LESS_THAN,
|
||||
stop
|
||||
),
|
||||
new ComparisonPredicate(
|
||||
nextValue,
|
||||
ComparisonOperator.LESS_THAN_OR_EQUAL,
|
||||
stop
|
||||
)
|
||||
),
|
||||
booleanType
|
||||
);
|
||||
// When start > stop, value is only valid if it's greater than or equal to stop
|
||||
final Predicate negativeProgress = new Junction(
|
||||
Junction.Nature.CONJUNCTION,
|
||||
List.of(
|
||||
new ComparisonPredicate(
|
||||
start,
|
||||
ComparisonOperator.GREATER_THAN,
|
||||
stop
|
||||
),
|
||||
new ComparisonPredicate(
|
||||
nextValue,
|
||||
ComparisonOperator.GREATER_THAN_OR_EQUAL,
|
||||
stop
|
||||
)
|
||||
),
|
||||
booleanType
|
||||
);
|
||||
cteUnion.applyPredicate(
|
||||
new Junction(
|
||||
Junction.Nature.DISJUNCTION,
|
||||
List.of( positiveProgress, negativeProgress ),
|
||||
booleanType
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
// Main query selects the columns from the CTE
|
||||
final QueryGroup cteContent = new QueryGroup( false, SetOperator.UNION_ALL, List.of( cteStart, cteUnion ) );
|
||||
final QuerySpec mainQuery = new QuerySpec( false );
|
||||
final SelectStatement selectStatement = new SelectStatement( mainQuery );
|
||||
final CteStatement cteStatement = new CteStatement(
|
||||
new CteTable( cteName, cteColumns ),
|
||||
new SelectStatement( cteContent )
|
||||
);
|
||||
cteStatement.setRecursive();
|
||||
selectStatement.addCteStatement( cteStatement );
|
||||
mainQuery.getFromClause().addRoot( cteTableGroup );
|
||||
mainQuery.getSelectClause().addSqlSelection( new SqlSelectionImpl( tValue ) );
|
||||
if ( indexPart != null ) {
|
||||
mainQuery.getSelectClause().addSqlSelection( new SqlSelectionImpl( tIndex ) );
|
||||
}
|
||||
return selectStatement;
|
||||
}
|
||||
|
||||
static class CteGenerateSeriesSetReturningFunctionTypeResolver extends NumberSeriesGenerateSeriesSetReturningFunctionTypeResolver {
|
||||
|
||||
public CteGenerateSeriesSetReturningFunctionTypeResolver() {
|
||||
super( "v", "i" );
|
||||
}
|
||||
|
||||
public CteGenerateSeriesSetReturningFunctionTypeResolver(@Nullable String defaultValueColumnName, String defaultIndexSelectionExpression) {
|
||||
super( defaultValueColumnName, defaultIndexSelectionExpression );
|
||||
}
|
||||
|
||||
@Override
|
||||
public SelectableMapping[] resolveFunctionReturnType(
|
||||
List<? extends SqlAstNode> arguments,
|
||||
String tableIdentifierVariable,
|
||||
boolean lateral,
|
||||
boolean withOrdinality,
|
||||
SqmToSqlAstConverter converter) {
|
||||
if ( !lateral ) {
|
||||
return super.resolveFunctionReturnType( arguments, tableIdentifierVariable, lateral, withOrdinality, converter );
|
||||
}
|
||||
else {
|
||||
return resolveIterationVariableBasedFunctionReturnType( arguments, tableIdentifierVariable, lateral, withOrdinality, converter );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void renderGenerateSeries(
|
||||
SqlAppender sqlAppender,
|
||||
Expression start,
|
||||
Expression stop,
|
||||
@Nullable Expression step,
|
||||
AnonymousTupleTableGroupProducer tupleType,
|
||||
String tableIdentifierVariable,
|
||||
SqlAstTranslator<?> walker) {
|
||||
throw new UnsupportedOperationException( "Function expands to custom SQL AST" );
|
||||
}
|
||||
}
|
|
@ -0,0 +1,68 @@
|
|||
/*
|
||||
* SPDX-License-Identifier: LGPL-2.1-or-later
|
||||
* Copyright Red Hat Inc. and Hibernate Authors
|
||||
*/
|
||||
package org.hibernate.dialect.function;
|
||||
|
||||
import org.checkerframework.checker.nullness.qual.Nullable;
|
||||
import org.hibernate.metamodel.mapping.MappingModelExpressible;
|
||||
import org.hibernate.query.sqm.produce.function.ArgumentsValidator;
|
||||
import org.hibernate.query.sqm.produce.function.internal.AbstractFunctionArgumentTypeResolver;
|
||||
import org.hibernate.query.sqm.sql.SqmToSqlAstConverter;
|
||||
import org.hibernate.query.sqm.tree.SqmTypedNode;
|
||||
import org.hibernate.query.sqm.tree.expression.SqmExpression;
|
||||
import org.hibernate.type.BasicType;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* A {@link ArgumentsValidator} that validates the array type is compatible with the element type.
|
||||
*/
|
||||
public class GenerateSeriesArgumentTypeResolver extends AbstractFunctionArgumentTypeResolver {
|
||||
|
||||
private final BasicType<Duration> durationType;
|
||||
|
||||
public GenerateSeriesArgumentTypeResolver(BasicType<Duration> durationType) {
|
||||
this.durationType = durationType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public @Nullable MappingModelExpressible<?> resolveFunctionArgumentType(List<? extends SqmTypedNode<?>> arguments, int argumentIndex, SqmToSqlAstConverter converter) {
|
||||
if ( argumentIndex == 0 ) {
|
||||
final MappingModelExpressible<?> mappingModelExpressible = converter.resolveFunctionImpliedReturnType();
|
||||
return mappingModelExpressible != null
|
||||
? mappingModelExpressible
|
||||
: converter.determineValueMapping( (SqmExpression<?>) arguments.get( 1 ) );
|
||||
}
|
||||
else if ( argumentIndex == 1 ) {
|
||||
final MappingModelExpressible<?> mappingModelExpressible = converter.resolveFunctionImpliedReturnType();
|
||||
return mappingModelExpressible != null
|
||||
? mappingModelExpressible
|
||||
: converter.determineValueMapping( (SqmExpression<?>) arguments.get( 0 ) );
|
||||
}
|
||||
else {
|
||||
assert argumentIndex == 2;
|
||||
final MappingModelExpressible<?> implied = converter.resolveFunctionImpliedReturnType();
|
||||
final MappingModelExpressible<?> firstType;
|
||||
final MappingModelExpressible<?> resultType;
|
||||
if ( implied != null ) {
|
||||
resultType = implied;
|
||||
}
|
||||
else if ( (firstType = converter.determineValueMapping( (SqmExpression<?>) arguments.get( 0 ) )) != null ) {
|
||||
resultType = firstType;
|
||||
}
|
||||
else {
|
||||
resultType = converter.determineValueMapping( (SqmExpression<?>) arguments.get( 1 ) );
|
||||
}
|
||||
|
||||
assert resultType != null;
|
||||
if ( resultType.getSingleJdbcMapping().getJdbcType().isTemporal() ) {
|
||||
return durationType;
|
||||
}
|
||||
else {
|
||||
return resultType;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,133 @@
|
|||
/*
|
||||
* SPDX-License-Identifier: LGPL-2.1-or-later
|
||||
* Copyright Red Hat Inc. and Hibernate Authors
|
||||
*/
|
||||
package org.hibernate.dialect.function;
|
||||
|
||||
import org.hibernate.metamodel.mapping.JdbcMapping;
|
||||
import org.hibernate.metamodel.model.domain.DomainType;
|
||||
import org.hibernate.query.sqm.SqmExpressible;
|
||||
import org.hibernate.query.sqm.produce.function.ArgumentsValidator;
|
||||
import org.hibernate.query.sqm.produce.function.FunctionArgumentException;
|
||||
import org.hibernate.query.sqm.produce.function.StandardArgumentsValidators;
|
||||
import org.hibernate.query.sqm.tree.SqmTypedNode;
|
||||
import org.hibernate.type.descriptor.jdbc.JdbcType;
|
||||
import org.hibernate.type.spi.TypeConfiguration;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
* A {@link ArgumentsValidator} that validates the array type is compatible with the element type.
|
||||
*/
|
||||
public class GenerateSeriesArgumentValidator implements ArgumentsValidator {
|
||||
|
||||
private final ArgumentsValidator delegate;
|
||||
|
||||
public GenerateSeriesArgumentValidator() {
|
||||
this.delegate = StandardArgumentsValidators.between( 2, 3 );
|
||||
}
|
||||
|
||||
@Override
|
||||
public void validate(
|
||||
List<? extends SqmTypedNode<?>> arguments,
|
||||
String functionName,
|
||||
TypeConfiguration typeConfiguration) {
|
||||
delegate.validate( arguments, functionName, typeConfiguration );
|
||||
|
||||
final SqmTypedNode<?> start = arguments.get( 0 );
|
||||
final SqmTypedNode<?> stop = arguments.get( 1 );
|
||||
final SqmTypedNode<?> step = arguments.size() > 2 ? arguments.get( 2 ) : null;
|
||||
|
||||
final SqmExpressible<?> startExpressible = start.getExpressible();
|
||||
final SqmExpressible<?> stopExpressible = stop.getExpressible();
|
||||
final SqmExpressible<?> stepExpressible = step == null ? null : step.getExpressible();
|
||||
|
||||
final DomainType<?> startType = startExpressible == null ? null : startExpressible.getSqmType();
|
||||
final DomainType<?> stopType = stopExpressible == null ? null : stopExpressible.getSqmType();
|
||||
final DomainType<?> stepType = stepExpressible == null ? null : stepExpressible.getSqmType();
|
||||
|
||||
if ( startType == null ) {
|
||||
throw unknownType( functionName, arguments, 0 );
|
||||
}
|
||||
if ( stopType == null ) {
|
||||
throw unknownType( functionName, arguments, 1 );
|
||||
}
|
||||
|
||||
if ( startType != stopType ) {
|
||||
throw new FunctionArgumentException(
|
||||
String.format(
|
||||
"Start and stop parameters of function '%s()' must be of the same type, but found [%s,%s]",
|
||||
functionName,
|
||||
startType.getTypeName(),
|
||||
stopType.getTypeName()
|
||||
)
|
||||
);
|
||||
}
|
||||
final JdbcMapping type = (JdbcMapping) startType;
|
||||
final JdbcType jdbcType = type.getJdbcType();
|
||||
if ( jdbcType.isInteger() || jdbcType.isDecimal() ) {
|
||||
if ( step != null ) {
|
||||
if ( stepType == null ) {
|
||||
throw unknownType( functionName, arguments, 2 );
|
||||
}
|
||||
if ( stepType != startType ) {
|
||||
throw new FunctionArgumentException(
|
||||
String.format(
|
||||
"Step parameter of function '%s()' is of type '%s', but must be of the same type as start and stop [%s,%s]",
|
||||
functionName,
|
||||
stepType.getTypeName(),
|
||||
startType.getTypeName(),
|
||||
stopType.getTypeName()
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
else if ( jdbcType.isTemporal() ) {
|
||||
if ( step == null ) {
|
||||
throw new FunctionArgumentException(
|
||||
String.format(
|
||||
Locale.ROOT,
|
||||
"Function %s() requires exactly 3 arguments when invoked with a temporal argument, but %d arguments given",
|
||||
functionName,
|
||||
arguments.size()
|
||||
)
|
||||
);
|
||||
}
|
||||
if ( stepType == null ) {
|
||||
throw unknownType( functionName, arguments, 2 );
|
||||
}
|
||||
final JdbcType stepJdbcType = ((JdbcMapping) stepType).getJdbcType();
|
||||
if ( !stepJdbcType.isInterval() && !stepJdbcType.isDuration() ) {
|
||||
throw new FunctionArgumentException(
|
||||
String.format(
|
||||
"Step parameter of function '%s()' is of type '%s', but must be of type interval",
|
||||
functionName,
|
||||
stepType.getTypeName()
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
else {
|
||||
throw new FunctionArgumentException(
|
||||
String.format(
|
||||
"Unsupported type '%s' for function '%s()'. Only integral, decimal and timestamp types are supported.",
|
||||
startType.getTypeName(),
|
||||
functionName
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
private FunctionArgumentException unknownType(String functionName, List<? extends SqmTypedNode<?>> arguments, int parameterIndex) {
|
||||
return new FunctionArgumentException(
|
||||
String.format(
|
||||
"Couldn't determine type of parameter %d of function '%s()'. Argument is '%s'",
|
||||
parameterIndex,
|
||||
functionName,
|
||||
arguments.get( parameterIndex )
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,103 @@
|
|||
/*
|
||||
* SPDX-License-Identifier: LGPL-2.1-or-later
|
||||
* Copyright Red Hat Inc. and Hibernate Authors
|
||||
*/
|
||||
package org.hibernate.dialect.function;
|
||||
|
||||
import org.checkerframework.checker.nullness.qual.Nullable;
|
||||
import org.hibernate.metamodel.mapping.CollectionPart;
|
||||
import org.hibernate.query.derived.AnonymousTupleTableGroupProducer;
|
||||
import org.hibernate.query.sqm.function.AbstractSqmSelfRenderingSetReturningFunctionDescriptor;
|
||||
import org.hibernate.query.sqm.produce.function.SetReturningFunctionTypeResolver;
|
||||
import org.hibernate.sql.ast.SqlAstTranslator;
|
||||
import org.hibernate.sql.ast.spi.SqlAppender;
|
||||
import org.hibernate.sql.ast.tree.SqlAstNode;
|
||||
import org.hibernate.sql.ast.tree.expression.Expression;
|
||||
import org.hibernate.type.BasicType;
|
||||
import org.hibernate.type.SqlTypes;
|
||||
import org.hibernate.type.descriptor.jdbc.JdbcType;
|
||||
import org.hibernate.type.spi.TypeConfiguration;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Standard generate_series function.
|
||||
*/
|
||||
public class GenerateSeriesFunction extends AbstractSqmSelfRenderingSetReturningFunctionDescriptor {
|
||||
|
||||
protected final boolean coerceToTimestamp;
|
||||
|
||||
public GenerateSeriesFunction(@Nullable String defaultValueColumnName, String defaultIndexSelectionExpression, boolean coerceToTimestamp, TypeConfiguration typeConfiguration) {
|
||||
this(
|
||||
new GenerateSeriesSetReturningFunctionTypeResolver(
|
||||
defaultValueColumnName,
|
||||
defaultIndexSelectionExpression
|
||||
),
|
||||
// Treat durations like intervals to avoid conversions
|
||||
typeConfiguration.getBasicTypeRegistry().resolve( java.time.Duration.class, SqlTypes.INTERVAL_SECOND ),
|
||||
coerceToTimestamp
|
||||
);
|
||||
}
|
||||
|
||||
protected GenerateSeriesFunction(SetReturningFunctionTypeResolver setReturningFunctionTypeResolver, BasicType<Duration> durationType) {
|
||||
this( setReturningFunctionTypeResolver, durationType, false );
|
||||
}
|
||||
|
||||
protected GenerateSeriesFunction(SetReturningFunctionTypeResolver setReturningFunctionTypeResolver, BasicType<Duration> durationType, boolean coerceToTimestamp) {
|
||||
super(
|
||||
"generate_series",
|
||||
new GenerateSeriesArgumentValidator(),
|
||||
setReturningFunctionTypeResolver,
|
||||
new GenerateSeriesArgumentTypeResolver( durationType )
|
||||
);
|
||||
this.coerceToTimestamp = coerceToTimestamp;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void render(
|
||||
SqlAppender sqlAppender,
|
||||
List<? extends SqlAstNode> sqlAstArguments,
|
||||
AnonymousTupleTableGroupProducer tupleType,
|
||||
String tableIdentifierVariable,
|
||||
SqlAstTranslator<?> walker) {
|
||||
final Expression start = (Expression) sqlAstArguments.get( 0 );
|
||||
final Expression stop = (Expression) sqlAstArguments.get( 1 );
|
||||
final Expression step = sqlAstArguments.size() > 2 ? (Expression) sqlAstArguments.get( 2 ) : null;
|
||||
renderGenerateSeries( sqlAppender, start, stop, step, tupleType, tableIdentifierVariable, walker );
|
||||
}
|
||||
|
||||
protected void renderGenerateSeries(
|
||||
SqlAppender sqlAppender,
|
||||
Expression start,
|
||||
Expression stop,
|
||||
@Nullable Expression step,
|
||||
AnonymousTupleTableGroupProducer tupleType,
|
||||
String tableIdentifierVariable,
|
||||
SqlAstTranslator<?> walker) {
|
||||
final JdbcType boundType = start.getExpressionType().getSingleJdbcMapping().getJdbcType();
|
||||
final boolean castTimestamp = coerceToTimestamp
|
||||
&& (boundType.getDdlTypeCode() == SqlTypes.DATE || boundType.getDdlTypeCode() == SqlTypes.TIME);
|
||||
sqlAppender.appendSql( "generate_series(" );
|
||||
if ( castTimestamp ) {
|
||||
sqlAppender.appendSql( "cast(" );
|
||||
start.accept( walker );
|
||||
sqlAppender.appendSql( " as timestamp),cast(" );
|
||||
stop.accept( walker );
|
||||
sqlAppender.appendSql( " as timestamp)" );
|
||||
}
|
||||
else {
|
||||
start.accept( walker );
|
||||
sqlAppender.appendSql( ',' );
|
||||
stop.accept( walker );
|
||||
}
|
||||
if ( step != null ) {
|
||||
sqlAppender.appendSql( ',' );
|
||||
step.accept( walker );
|
||||
}
|
||||
sqlAppender.appendSql( ')' );
|
||||
if ( tupleType.findSubPart( CollectionPart.Nature.INDEX.getName(), null ) != null ) {
|
||||
sqlAppender.append( " with ordinality" );
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,154 @@
|
|||
/*
|
||||
* SPDX-License-Identifier: LGPL-2.1-or-later
|
||||
* Copyright Red Hat Inc. and Hibernate Authors
|
||||
*/
|
||||
package org.hibernate.dialect.function;
|
||||
|
||||
import org.checkerframework.checker.nullness.qual.Nullable;
|
||||
import org.hibernate.internal.util.NullnessHelper;
|
||||
import org.hibernate.metamodel.mapping.CollectionPart;
|
||||
import org.hibernate.metamodel.mapping.JdbcMapping;
|
||||
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
|
||||
import org.hibernate.metamodel.mapping.SelectableMapping;
|
||||
import org.hibernate.metamodel.mapping.SelectablePath;
|
||||
import org.hibernate.metamodel.mapping.SqlTypedMapping;
|
||||
import org.hibernate.metamodel.mapping.internal.SelectableMappingImpl;
|
||||
import org.hibernate.metamodel.model.domain.DomainType;
|
||||
import org.hibernate.query.derived.AnonymousTupleType;
|
||||
import org.hibernate.query.sqm.SqmExpressible;
|
||||
import org.hibernate.query.sqm.produce.function.SetReturningFunctionTypeResolver;
|
||||
import org.hibernate.query.sqm.sql.SqmToSqlAstConverter;
|
||||
import org.hibernate.query.sqm.tree.SqmTypedNode;
|
||||
import org.hibernate.sql.ast.tree.SqlAstNode;
|
||||
import org.hibernate.sql.ast.tree.expression.Expression;
|
||||
import org.hibernate.type.spi.TypeConfiguration;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
*
|
||||
* @since 7.0
|
||||
*/
|
||||
public class GenerateSeriesSetReturningFunctionTypeResolver implements SetReturningFunctionTypeResolver {
|
||||
|
||||
protected final @Nullable String defaultValueColumnName;
|
||||
protected final String defaultIndexSelectionExpression;
|
||||
|
||||
public GenerateSeriesSetReturningFunctionTypeResolver(@Nullable String defaultValueColumnName, String defaultIndexSelectionExpression) {
|
||||
this.defaultValueColumnName = defaultValueColumnName;
|
||||
this.defaultIndexSelectionExpression = defaultIndexSelectionExpression;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AnonymousTupleType<?> resolveTupleType(List<? extends SqmTypedNode<?>> arguments, TypeConfiguration typeConfiguration) {
|
||||
final SqmTypedNode<?> start = arguments.get( 0 );
|
||||
final SqmTypedNode<?> stop = arguments.get( 1 );
|
||||
final SqmExpressible<?> startExpressible = start.getExpressible();
|
||||
final SqmExpressible<?> stopExpressible = stop.getExpressible();
|
||||
final DomainType<?> type = NullnessHelper.coalesce(
|
||||
startExpressible == null ? null : startExpressible.getSqmType(),
|
||||
stopExpressible == null ? null : stopExpressible.getSqmType()
|
||||
);
|
||||
if ( type == null ) {
|
||||
throw new IllegalArgumentException( "Couldn't determine types of arguments to function 'generate_series'" );
|
||||
}
|
||||
|
||||
final SqmExpressible<?>[] componentTypes = new SqmExpressible<?>[]{ type, typeConfiguration.getBasicTypeForJavaType( Long.class ) };
|
||||
final String[] componentNames = new String[]{ CollectionPart.Nature.ELEMENT.getName(), CollectionPart.Nature.INDEX.getName() };
|
||||
return new AnonymousTupleType<>( componentTypes, componentNames );
|
||||
}
|
||||
|
||||
@Override
|
||||
public SelectableMapping[] resolveFunctionReturnType(
|
||||
List<? extends SqlAstNode> arguments,
|
||||
String tableIdentifierVariable,
|
||||
boolean lateral,
|
||||
boolean withOrdinality,
|
||||
SqmToSqlAstConverter converter) {
|
||||
final Expression start = (Expression) arguments.get( 0 );
|
||||
final Expression stop = (Expression) arguments.get( 0 );
|
||||
final JdbcMappingContainer expressionType = NullnessHelper.coalesce(
|
||||
start.getExpressionType(),
|
||||
stop.getExpressionType()
|
||||
);
|
||||
final JdbcMapping type = expressionType.getSingleJdbcMapping();
|
||||
if ( type == null ) {
|
||||
throw new IllegalArgumentException( "Couldn't determine types of arguments to function 'generate_series'" );
|
||||
}
|
||||
|
||||
final SelectableMapping indexMapping = withOrdinality ? new SelectableMappingImpl(
|
||||
"",
|
||||
defaultIndexSelectionExpression,
|
||||
new SelectablePath( CollectionPart.Nature.INDEX.getName() ),
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
converter.getCreationContext().getTypeConfiguration().getBasicTypeForJavaType( Long.class )
|
||||
) : null;
|
||||
|
||||
final String elementSelectionExpression = defaultValueColumnName == null
|
||||
? tableIdentifierVariable
|
||||
: defaultValueColumnName;
|
||||
final SelectableMapping elementMapping;
|
||||
if ( expressionType instanceof SqlTypedMapping typedMapping ) {
|
||||
elementMapping = new SelectableMappingImpl(
|
||||
"",
|
||||
elementSelectionExpression,
|
||||
new SelectablePath( CollectionPart.Nature.ELEMENT.getName() ),
|
||||
null,
|
||||
null,
|
||||
typedMapping.getColumnDefinition(),
|
||||
typedMapping.getLength(),
|
||||
typedMapping.getPrecision(),
|
||||
typedMapping.getScale(),
|
||||
typedMapping.getTemporalPrecision(),
|
||||
typedMapping.isLob(),
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
type
|
||||
);
|
||||
}
|
||||
else {
|
||||
elementMapping = new SelectableMappingImpl(
|
||||
"",
|
||||
elementSelectionExpression,
|
||||
new SelectablePath( CollectionPart.Nature.ELEMENT.getName() ),
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
type
|
||||
);
|
||||
}
|
||||
final SelectableMapping[] returnType;
|
||||
if ( indexMapping == null ) {
|
||||
returnType = new SelectableMapping[]{ elementMapping };
|
||||
}
|
||||
else {
|
||||
returnType = new SelectableMapping[] {elementMapping, indexMapping};
|
||||
}
|
||||
return returnType;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,239 @@
|
|||
/*
|
||||
* SPDX-License-Identifier: LGPL-2.1-or-later
|
||||
* Copyright Red Hat Inc. and Hibernate Authors
|
||||
*/
|
||||
package org.hibernate.dialect.function;
|
||||
|
||||
import org.checkerframework.checker.nullness.qual.Nullable;
|
||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||
import org.hibernate.internal.util.NullnessHelper;
|
||||
import org.hibernate.metamodel.mapping.CollectionPart;
|
||||
import org.hibernate.metamodel.mapping.JdbcMapping;
|
||||
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
|
||||
import org.hibernate.metamodel.mapping.ModelPart;
|
||||
import org.hibernate.metamodel.mapping.SelectableMapping;
|
||||
import org.hibernate.query.derived.AnonymousTupleTableGroupProducer;
|
||||
import org.hibernate.query.derived.AnonymousTupleType;
|
||||
import org.hibernate.query.spi.QueryEngine;
|
||||
import org.hibernate.query.sqm.function.SelfRenderingSqmSetReturningFunction;
|
||||
import org.hibernate.query.sqm.sql.SqmToSqlAstConverter;
|
||||
import org.hibernate.query.sqm.tree.SqmTypedNode;
|
||||
import org.hibernate.spi.NavigablePath;
|
||||
import org.hibernate.sql.ast.SqlAstTranslator;
|
||||
import org.hibernate.sql.ast.spi.SqlAppender;
|
||||
import org.hibernate.sql.ast.tree.SqlAstNode;
|
||||
import org.hibernate.sql.ast.tree.expression.ColumnReference;
|
||||
import org.hibernate.sql.ast.tree.expression.Expression;
|
||||
import org.hibernate.sql.ast.tree.expression.JdbcParameter;
|
||||
import org.hibernate.sql.ast.tree.expression.Literal;
|
||||
import org.hibernate.sql.ast.tree.from.FunctionTableGroup;
|
||||
import org.hibernate.sql.ast.tree.from.TableGroup;
|
||||
import org.hibernate.type.SqlTypes;
|
||||
import org.hibernate.type.spi.TypeConfiguration;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* H2 generate_series function.
|
||||
*
|
||||
* When possible, the {@code system_range} function is used directly.
|
||||
* If ordinality is requested, the arguments are temporals or anything other than literals,
|
||||
* this emulation comes into play.
|
||||
* It essentially renders a {@code system_range} with a specified maximum size that serves as "iteration variable".
|
||||
* References to the value are replaced with expressions of the form {@code start + step * iterationVariable}
|
||||
* and a condition is added either to the query or join where the function is used to ensure that the value is
|
||||
* less than or equal to the stop value.
|
||||
*/
|
||||
public class H2GenerateSeriesFunction extends NumberSeriesGenerateSeriesFunction {
|
||||
|
||||
public H2GenerateSeriesFunction(int maxSeriesSize, TypeConfiguration typeConfiguration) {
|
||||
super(
|
||||
new H2GenerateSeriesSetReturningFunctionTypeResolver(),
|
||||
// Treat durations like intervals to avoid conversions
|
||||
typeConfiguration.getBasicTypeRegistry().resolve( java.time.Duration.class, SqlTypes.INTERVAL_SECOND ),
|
||||
maxSeriesSize
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean rendersIdentifierVariable(List<SqlAstNode> arguments, SessionFactoryImplementor sessionFactory) {
|
||||
// To make our lives simpler during emulation
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected <T> SelfRenderingSqmSetReturningFunction<T> generateSqmSetReturningFunctionExpression(List<? extends SqmTypedNode<?>> arguments, QueryEngine queryEngine) {
|
||||
//noinspection unchecked
|
||||
return new SelfRenderingSqmSetReturningFunction<>(
|
||||
this,
|
||||
this,
|
||||
arguments,
|
||||
getArgumentsValidator(),
|
||||
getSetReturningTypeResolver(),
|
||||
(AnonymousTupleType<T>) getSetReturningTypeResolver().resolveTupleType( arguments, queryEngine.getTypeConfiguration() ),
|
||||
queryEngine.getCriteriaBuilder(),
|
||||
getName()
|
||||
) {
|
||||
@Override
|
||||
public TableGroup convertToSqlAst(
|
||||
NavigablePath navigablePath,
|
||||
String identifierVariable,
|
||||
boolean lateral,
|
||||
boolean canUseInnerJoins,
|
||||
boolean withOrdinality,
|
||||
SqmToSqlAstConverter walker) {
|
||||
// Register a transformer that adds a join predicate "start+(step*(ordinal-1))<=stop"
|
||||
final FunctionTableGroup functionTableGroup = (FunctionTableGroup) super.convertToSqlAst(
|
||||
navigablePath,
|
||||
identifierVariable,
|
||||
lateral,
|
||||
canUseInnerJoins,
|
||||
withOrdinality,
|
||||
walker
|
||||
);
|
||||
//noinspection unchecked
|
||||
final List<SqlAstNode> sqlArguments = (List<SqlAstNode>) functionTableGroup.getPrimaryTableReference()
|
||||
.getFunctionExpression()
|
||||
.getArguments();
|
||||
final Expression startExpression = (Expression) sqlArguments.get( 0 );
|
||||
final Expression stopExpression = (Expression) sqlArguments.get( 1 );
|
||||
final Expression explicitStepExpression = sqlArguments.size() > 2
|
||||
? (Expression) sqlArguments.get( 2 )
|
||||
: null;
|
||||
final boolean needsEmulation = needsEmulation( startExpression )
|
||||
|| needsEmulation( stopExpression )
|
||||
|| explicitStepExpression != null && needsEmulation( explicitStepExpression );
|
||||
final ModelPart elementPart = functionTableGroup.getModelPart()
|
||||
.findSubPart( CollectionPart.Nature.ELEMENT.getName(), null );
|
||||
final boolean isTemporal = elementPart.getSingleJdbcMapping().getJdbcType().isTemporal();
|
||||
// Only do this transformation if one of the arguments is anything but a literal or parameter,
|
||||
// ordinality is requested or the result is a temporal (H2 only supports numerics in system_range)
|
||||
if ( needsEmulation || withOrdinality || isTemporal ) {
|
||||
// Register a query transformer to register a join predicate
|
||||
walker.registerQueryTransformer( new NumberSeriesQueryTransformer(
|
||||
functionTableGroup,
|
||||
functionTableGroup,
|
||||
"x",
|
||||
coerceToTimestamp
|
||||
) );
|
||||
}
|
||||
return functionTableGroup;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static class H2GenerateSeriesSetReturningFunctionTypeResolver extends NumberSeriesGenerateSeriesSetReturningFunctionTypeResolver {
|
||||
|
||||
public H2GenerateSeriesSetReturningFunctionTypeResolver() {
|
||||
super( "x", "x" );
|
||||
}
|
||||
|
||||
@Override
|
||||
public SelectableMapping[] resolveFunctionReturnType(
|
||||
List<? extends SqlAstNode> arguments,
|
||||
String tableIdentifierVariable,
|
||||
boolean lateral,
|
||||
boolean withOrdinality,
|
||||
SqmToSqlAstConverter converter) {
|
||||
final Expression start = (Expression) arguments.get( 0 );
|
||||
final Expression stop = (Expression) arguments.get( 1 );
|
||||
final JdbcMappingContainer expressionType = NullnessHelper.coalesce(
|
||||
start.getExpressionType(),
|
||||
stop.getExpressionType()
|
||||
);
|
||||
final Expression explicitStep = arguments.size() > 2
|
||||
? (Expression) arguments.get( 2 )
|
||||
: null;
|
||||
final ColumnReference joinBaseColumnReference = NullnessHelper.coalesce(
|
||||
start.getColumnReference(),
|
||||
stop.getColumnReference(),
|
||||
explicitStep != null
|
||||
? explicitStep.getColumnReference()
|
||||
: null
|
||||
);
|
||||
final JdbcMapping type = expressionType.getSingleJdbcMapping();
|
||||
if ( type == null ) {
|
||||
throw new IllegalArgumentException( "Couldn't determine types of arguments to function 'generate_series'" );
|
||||
}
|
||||
|
||||
if ( joinBaseColumnReference != null || withOrdinality || type.getJdbcType().isTemporal() ) {
|
||||
return resolveIterationVariableBasedFunctionReturnType( arguments, tableIdentifierVariable, lateral, withOrdinality, converter );
|
||||
}
|
||||
else {
|
||||
return super.resolveFunctionReturnType( arguments, tableIdentifierVariable, lateral, withOrdinality, converter );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void renderGenerateSeries(
|
||||
SqlAppender sqlAppender,
|
||||
Expression start,
|
||||
Expression stop,
|
||||
@Nullable Expression step,
|
||||
AnonymousTupleTableGroupProducer tupleType,
|
||||
String tableIdentifierVariable,
|
||||
SqlAstTranslator<?> walker) {
|
||||
final boolean needsEmulation = needsEmulation( start )
|
||||
|| needsEmulation( stop )
|
||||
|| step != null && needsEmulation( step );
|
||||
final ModelPart elementPart = tupleType.findSubPart( CollectionPart.Nature.ELEMENT.getName(), null );
|
||||
final ModelPart ordinalityPart = tupleType.findSubPart( CollectionPart.Nature.INDEX.getName(), null );
|
||||
final boolean isTemporal = elementPart.getSingleJdbcMapping().getJdbcType().isTemporal();
|
||||
|
||||
if ( needsEmulation || ordinalityPart != null || isTemporal ) {
|
||||
final boolean startNeedsVariable = needsVariable( start );
|
||||
final boolean stepNeedsVariable = step != null && needsVariable( step );
|
||||
if ( startNeedsVariable || stepNeedsVariable ) {
|
||||
sqlAppender.appendSql( "((values " );
|
||||
char separator = '(';
|
||||
if ( startNeedsVariable ) {
|
||||
sqlAppender.appendSql( separator );
|
||||
start.accept( walker );
|
||||
separator = ',';
|
||||
}
|
||||
if ( stepNeedsVariable ) {
|
||||
sqlAppender.appendSql( separator );
|
||||
step.accept( walker );
|
||||
}
|
||||
sqlAppender.appendSql( ")) " );
|
||||
sqlAppender.appendSql( tableIdentifierVariable );
|
||||
sqlAppender.appendSql( "_" );
|
||||
separator = '(';
|
||||
if ( startNeedsVariable ) {
|
||||
sqlAppender.appendSql( separator );
|
||||
sqlAppender.appendSql( "b" );
|
||||
separator = ',';
|
||||
}
|
||||
if ( stepNeedsVariable ) {
|
||||
sqlAppender.appendSql( separator );
|
||||
sqlAppender.appendSql( "s" );
|
||||
}
|
||||
sqlAppender.appendSql( ") join " );
|
||||
}
|
||||
sqlAppender.appendSql( "system_range(1," );
|
||||
sqlAppender.appendSql( maxSeriesSize );
|
||||
sqlAppender.appendSql( ") " );
|
||||
sqlAppender.appendSql( tableIdentifierVariable );
|
||||
if ( startNeedsVariable || stepNeedsVariable ) {
|
||||
sqlAppender.appendSql( " on true)" );
|
||||
}
|
||||
}
|
||||
else {
|
||||
sqlAppender.appendSql( "system_range(" );
|
||||
start.accept( walker );
|
||||
sqlAppender.appendSql( ',' );
|
||||
stop.accept( walker );
|
||||
if ( step != null ) {
|
||||
sqlAppender.appendSql( ',' );
|
||||
step.accept( walker );
|
||||
}
|
||||
sqlAppender.appendSql( ") " );
|
||||
sqlAppender.appendSql( tableIdentifierVariable );
|
||||
}
|
||||
}
|
||||
|
||||
private static boolean needsEmulation(Expression expression) {
|
||||
return !( expression instanceof Literal || expression instanceof JdbcParameter);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,188 @@
|
|||
/*
|
||||
* SPDX-License-Identifier: LGPL-2.1-or-later
|
||||
* Copyright Red Hat Inc. and Hibernate Authors
|
||||
*/
|
||||
package org.hibernate.dialect.function;
|
||||
|
||||
import org.checkerframework.checker.nullness.qual.Nullable;
|
||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
|
||||
import org.hibernate.metamodel.mapping.SelectableMapping;
|
||||
import org.hibernate.query.derived.AnonymousTupleTableGroupProducer;
|
||||
import org.hibernate.query.derived.AnonymousTupleType;
|
||||
import org.hibernate.query.spi.QueryEngine;
|
||||
import org.hibernate.query.sqm.function.SelfRenderingSqmSetReturningFunction;
|
||||
import org.hibernate.query.sqm.sql.SqmToSqlAstConverter;
|
||||
import org.hibernate.query.sqm.tree.SqmTypedNode;
|
||||
import org.hibernate.spi.NavigablePath;
|
||||
import org.hibernate.sql.ast.SqlAstTranslator;
|
||||
import org.hibernate.sql.ast.spi.SqlAppender;
|
||||
import org.hibernate.sql.ast.tree.SqlAstNode;
|
||||
import org.hibernate.sql.ast.tree.cte.CteColumn;
|
||||
import org.hibernate.sql.ast.tree.cte.CteStatement;
|
||||
import org.hibernate.sql.ast.tree.cte.CteTable;
|
||||
import org.hibernate.sql.ast.tree.expression.Duration;
|
||||
import org.hibernate.sql.ast.tree.expression.Expression;
|
||||
import org.hibernate.sql.ast.tree.expression.SelfRenderingExpression;
|
||||
import org.hibernate.sql.ast.tree.from.FunctionTableGroup;
|
||||
import org.hibernate.sql.ast.tree.from.TableGroup;
|
||||
import org.hibernate.sql.ast.tree.select.QuerySpec;
|
||||
import org.hibernate.sql.ast.tree.select.SelectStatement;
|
||||
import org.hibernate.sql.results.internal.SqlSelectionImpl;
|
||||
import org.hibernate.type.BasicType;
|
||||
import org.hibernate.type.SqlTypes;
|
||||
import org.hibernate.type.spi.TypeConfiguration;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
|
||||
/**
|
||||
* HANA generate_series function.
|
||||
*/
|
||||
public class HANAGenerateSeriesFunction extends NumberSeriesGenerateSeriesFunction {
|
||||
|
||||
public HANAGenerateSeriesFunction(int maxSeriesSize, TypeConfiguration typeConfiguration) {
|
||||
super(
|
||||
new CteGenerateSeriesSetReturningFunctionTypeResolver(),
|
||||
// Treat durations like intervals to avoid conversions
|
||||
typeConfiguration.getBasicTypeRegistry().resolve(
|
||||
java.time.Duration.class,
|
||||
SqlTypes.DURATION
|
||||
),
|
||||
false,
|
||||
maxSeriesSize
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean rendersIdentifierVariable(List<SqlAstNode> arguments, SessionFactoryImplementor sessionFactory) {
|
||||
// To make our lives simpler during emulation
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected <T> SelfRenderingSqmSetReturningFunction<T> generateSqmSetReturningFunctionExpression(List<? extends SqmTypedNode<?>> arguments, QueryEngine queryEngine) {
|
||||
//noinspection unchecked
|
||||
return new SelfRenderingSqmSetReturningFunction<>(
|
||||
this,
|
||||
this,
|
||||
arguments,
|
||||
getArgumentsValidator(),
|
||||
getSetReturningTypeResolver(),
|
||||
(AnonymousTupleType<T>) getSetReturningTypeResolver().resolveTupleType( arguments, queryEngine.getTypeConfiguration() ),
|
||||
queryEngine.getCriteriaBuilder(),
|
||||
getName()
|
||||
) {
|
||||
@Override
|
||||
public TableGroup convertToSqlAst(NavigablePath navigablePath, String identifierVariable, boolean lateral, boolean canUseInnerJoins, boolean withOrdinality, SqmToSqlAstConverter walker) {
|
||||
final FunctionTableGroup tableGroup = (FunctionTableGroup) super.convertToSqlAst(
|
||||
navigablePath,
|
||||
identifierVariable,
|
||||
lateral,
|
||||
canUseInnerJoins,
|
||||
withOrdinality,
|
||||
walker
|
||||
);
|
||||
walker.registerQueryTransformer( new HANAGenerateSeriesQueryTransformer(
|
||||
tableGroup,
|
||||
tableGroup,
|
||||
maxSeriesSize,
|
||||
"i",
|
||||
coerceToTimestamp
|
||||
) );
|
||||
return tableGroup;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
protected static class HANAGenerateSeriesQueryTransformer extends CteGenerateSeriesFunction.CteGenerateSeriesQueryTransformer {
|
||||
|
||||
public HANAGenerateSeriesQueryTransformer(FunctionTableGroup functionTableGroup, TableGroup targetTableGroup, int maxSeriesSize, String positionColumnName, boolean coerceToTimestamp) {
|
||||
super( functionTableGroup, targetTableGroup, maxSeriesSize, positionColumnName, coerceToTimestamp );
|
||||
}
|
||||
|
||||
@Override
|
||||
protected CteStatement createSeriesCte(SqmToSqlAstConverter converter) {
|
||||
final BasicType<String> stringType = converter.getCreationContext().getTypeConfiguration()
|
||||
.getBasicTypeForJavaType( String.class );
|
||||
final List<CteColumn> cteColumns = List.of( new CteColumn( "v", stringType ) );
|
||||
|
||||
final QuerySpec query = new QuerySpec( false );
|
||||
query.getSelectClause().addSqlSelection( new SqlSelectionImpl( new SelfRenderingExpression() {
|
||||
@Override
|
||||
public void renderToSql(SqlAppender sqlAppender, SqlAstTranslator<?> walker, SessionFactoryImplementor sessionFactory) {
|
||||
sqlAppender.appendSql( "'<r>'||lpad(''," );
|
||||
sqlAppender.appendSql( maxSeriesSize * 4 );
|
||||
sqlAppender.appendSql( ",'<a/>')||'</r>'" );
|
||||
}
|
||||
|
||||
@Override
|
||||
public JdbcMappingContainer getExpressionType() {
|
||||
return stringType;
|
||||
}
|
||||
} ) );
|
||||
return new CteStatement( new CteTable( CteGenerateSeriesFunction.CteGenerateSeriesQueryTransformer.NAME, cteColumns ), new SelectStatement( query ) );
|
||||
}
|
||||
}
|
||||
|
||||
static class CteGenerateSeriesSetReturningFunctionTypeResolver extends NumberSeriesGenerateSeriesSetReturningFunctionTypeResolver {
|
||||
|
||||
public CteGenerateSeriesSetReturningFunctionTypeResolver() {
|
||||
super( "v", "i" );
|
||||
}
|
||||
|
||||
@Override
|
||||
public SelectableMapping[] resolveFunctionReturnType(
|
||||
List<? extends SqlAstNode> arguments,
|
||||
String tableIdentifierVariable,
|
||||
boolean lateral,
|
||||
boolean withOrdinality,
|
||||
SqmToSqlAstConverter converter) {
|
||||
return resolveIterationVariableBasedFunctionReturnType( arguments, tableIdentifierVariable, lateral, withOrdinality, converter );
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void renderGenerateSeries(
|
||||
SqlAppender sqlAppender,
|
||||
Expression start,
|
||||
Expression stop,
|
||||
@Nullable Expression step,
|
||||
AnonymousTupleTableGroupProducer tupleType,
|
||||
String tableIdentifierVariable,
|
||||
SqlAstTranslator<?> walker) {
|
||||
final boolean startNeedsVariable = needsVariable( start );
|
||||
final boolean stepNeedsVariable = step != null && needsVariable( step );
|
||||
if ( startNeedsVariable || stepNeedsVariable ) {
|
||||
sqlAppender.appendSql( "((select" );
|
||||
char separator = ' ';
|
||||
if ( startNeedsVariable ) {
|
||||
sqlAppender.appendSql( separator );
|
||||
start.accept( walker );
|
||||
sqlAppender.appendSql( " b" );
|
||||
separator = ',';
|
||||
}
|
||||
if ( stepNeedsVariable ) {
|
||||
sqlAppender.appendSql( separator );
|
||||
if ( step instanceof Duration duration ) {
|
||||
duration.getMagnitude().accept( walker );
|
||||
}
|
||||
else {
|
||||
step.accept( walker );
|
||||
}
|
||||
sqlAppender.appendSql( " s" );
|
||||
}
|
||||
sqlAppender.appendSql( " from sys.dummy) " );
|
||||
sqlAppender.appendSql( tableIdentifierVariable );
|
||||
sqlAppender.appendSql( "_" );
|
||||
sqlAppender.appendSql( " join " );
|
||||
}
|
||||
sqlAppender.appendSql( "xmltable('/r/a' passing " );
|
||||
sqlAppender.appendSql( CteGenerateSeriesFunction.CteGenerateSeriesQueryTransformer.NAME );
|
||||
sqlAppender.appendSql( ".v columns i for ordinality) " );
|
||||
sqlAppender.appendSql( tableIdentifierVariable );
|
||||
if ( startNeedsVariable || stepNeedsVariable ) {
|
||||
sqlAppender.appendSql( " on 1=1)" );
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,509 @@
|
|||
/*
|
||||
* SPDX-License-Identifier: LGPL-2.1-or-later
|
||||
* Copyright Red Hat Inc. and Hibernate Authors
|
||||
*/
|
||||
package org.hibernate.dialect.function;
|
||||
|
||||
import org.checkerframework.checker.nullness.qual.Nullable;
|
||||
import org.hibernate.engine.spi.LazySessionWrapperOptions;
|
||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||
import org.hibernate.internal.util.NullnessHelper;
|
||||
import org.hibernate.metamodel.mapping.BasicValuedMapping;
|
||||
import org.hibernate.metamodel.mapping.CollectionPart;
|
||||
import org.hibernate.metamodel.mapping.JdbcMapping;
|
||||
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
|
||||
import org.hibernate.metamodel.mapping.SelectableMapping;
|
||||
import org.hibernate.metamodel.mapping.SelectablePath;
|
||||
import org.hibernate.metamodel.mapping.SqlTypedMapping;
|
||||
import org.hibernate.metamodel.mapping.internal.SelectableMappingImpl;
|
||||
import org.hibernate.query.ReturnableType;
|
||||
import org.hibernate.query.derived.AnonymousTupleTableGroupProducer;
|
||||
import org.hibernate.query.spi.QueryOptions;
|
||||
import org.hibernate.query.sqm.BinaryArithmeticOperator;
|
||||
import org.hibernate.query.sqm.ComparisonOperator;
|
||||
import org.hibernate.query.sqm.function.FunctionRenderer;
|
||||
import org.hibernate.query.sqm.function.SelfRenderingFunctionSqlAstExpression;
|
||||
import org.hibernate.query.sqm.produce.function.SetReturningFunctionTypeResolver;
|
||||
import org.hibernate.query.sqm.sql.SqmToSqlAstConverter;
|
||||
import org.hibernate.query.sqm.tree.expression.NumericTypeCategory;
|
||||
import org.hibernate.sql.Template;
|
||||
import org.hibernate.sql.ast.SqlAstTranslator;
|
||||
import org.hibernate.sql.ast.spi.SqlAppender;
|
||||
import org.hibernate.sql.ast.tree.SqlAstNode;
|
||||
import org.hibernate.sql.ast.tree.cte.CteContainer;
|
||||
import org.hibernate.sql.ast.tree.expression.BinaryArithmeticExpression;
|
||||
import org.hibernate.sql.ast.tree.expression.CastTarget;
|
||||
import org.hibernate.sql.ast.tree.expression.ColumnReference;
|
||||
import org.hibernate.sql.ast.tree.expression.DurationUnit;
|
||||
import org.hibernate.sql.ast.tree.expression.Expression;
|
||||
import org.hibernate.sql.ast.tree.expression.Literal;
|
||||
import org.hibernate.sql.ast.tree.expression.QueryLiteral;
|
||||
import org.hibernate.sql.ast.tree.expression.QueryTransformer;
|
||||
import org.hibernate.sql.ast.tree.expression.SelfRenderingSqlFragmentExpression;
|
||||
import org.hibernate.sql.ast.tree.expression.UnparsedNumericLiteral;
|
||||
import org.hibernate.sql.ast.tree.from.FunctionTableGroup;
|
||||
import org.hibernate.sql.ast.tree.from.TableGroup;
|
||||
import org.hibernate.sql.ast.tree.predicate.ComparisonPredicate;
|
||||
import org.hibernate.sql.ast.tree.predicate.Junction;
|
||||
import org.hibernate.sql.ast.tree.predicate.Predicate;
|
||||
import org.hibernate.sql.ast.tree.predicate.PredicateContainer;
|
||||
import org.hibernate.sql.ast.tree.select.QuerySpec;
|
||||
import org.hibernate.sql.ast.tree.select.SelectStatement;
|
||||
import org.hibernate.sql.exec.spi.JdbcOperationQuerySelect;
|
||||
import org.hibernate.sql.results.internal.SqlSelectionImpl;
|
||||
import org.hibernate.type.BasicType;
|
||||
import org.hibernate.type.SqlTypes;
|
||||
import org.hibernate.type.descriptor.jdbc.JdbcType;
|
||||
import org.hibernate.type.spi.TypeConfiguration;
|
||||
|
||||
import java.sql.Timestamp;
|
||||
import java.time.Duration;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* The base for generate_series function implementations that use a static number source.
|
||||
*/
|
||||
public abstract class NumberSeriesGenerateSeriesFunction extends GenerateSeriesFunction {
|
||||
|
||||
protected final int maxSeriesSize;
|
||||
|
||||
public NumberSeriesGenerateSeriesFunction(@Nullable String defaultValueColumnName, String defaultIndexSelectionExpression, boolean coerceToTimestamp, TypeConfiguration typeConfiguration, int maxSeriesSize) {
|
||||
super( defaultValueColumnName, defaultIndexSelectionExpression, coerceToTimestamp, typeConfiguration );
|
||||
this.maxSeriesSize = maxSeriesSize;
|
||||
}
|
||||
|
||||
public NumberSeriesGenerateSeriesFunction(SetReturningFunctionTypeResolver setReturningFunctionTypeResolver, BasicType<Duration> durationType, int maxSeriesSize) {
|
||||
super( setReturningFunctionTypeResolver, durationType );
|
||||
this.maxSeriesSize = maxSeriesSize;
|
||||
}
|
||||
|
||||
public NumberSeriesGenerateSeriesFunction(SetReturningFunctionTypeResolver setReturningFunctionTypeResolver, BasicType<Duration> durationType, boolean coerceToTimestamp, int maxSeriesSize) {
|
||||
super( setReturningFunctionTypeResolver, durationType, coerceToTimestamp );
|
||||
this.maxSeriesSize = maxSeriesSize;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected abstract void renderGenerateSeries(
|
||||
SqlAppender sqlAppender,
|
||||
Expression start,
|
||||
Expression stop,
|
||||
@Nullable Expression step,
|
||||
AnonymousTupleTableGroupProducer tupleType,
|
||||
String tableIdentifierVariable,
|
||||
SqlAstTranslator<?> walker);
|
||||
|
||||
/**
|
||||
* Returns whether a variable (e.g. through values clause) shall be introduced for an expression,
|
||||
* which is passed as argument to the {@code generate_series} function.
|
||||
* Since the selection expression of the value column that this function returns must be transformed
|
||||
* to the form {@code start + step * ( iterationVariable - 1 )}, it is vital that {@code start} and {@code step}
|
||||
* can be rendered to a {@code String} during SQL AST build time for {@link SelectableMapping#getSelectionExpression()}.
|
||||
* If that isn't possible because the expression is too complex, a variable needs to be introduced which is then used
|
||||
* instead of the original expression.
|
||||
*/
|
||||
protected static boolean needsVariable(Expression expression) {
|
||||
return !( expression instanceof Literal || expression instanceof ColumnReference );
|
||||
}
|
||||
|
||||
public static Expression add(Expression left, Expression right, SqmToSqlAstConverter converter) {
|
||||
if ( right instanceof org.hibernate.sql.ast.tree.expression.Duration duration ) {
|
||||
final BasicType<?> nodeType = (BasicType<?>) left.getExpressionType().getSingleJdbcMapping();
|
||||
final FunctionRenderer timestampadd = (FunctionRenderer) converter.getCreationContext().getSessionFactory()
|
||||
.getQueryEngine().getSqmFunctionRegistry().findFunctionDescriptor( "timestampadd" );
|
||||
return new SelfRenderingFunctionSqlAstExpression(
|
||||
"timestampadd",
|
||||
timestampadd,
|
||||
List.of(
|
||||
new DurationUnit( duration.getUnit(), duration.getExpressionType() ),
|
||||
duration.getMagnitude(),
|
||||
left
|
||||
),
|
||||
nodeType,
|
||||
nodeType
|
||||
);
|
||||
}
|
||||
else {
|
||||
return new BinaryArithmeticExpression(
|
||||
left,
|
||||
BinaryArithmeticOperator.ADD,
|
||||
right,
|
||||
(BasicValuedMapping) left.getExpressionType()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
public static Expression multiply(Expression left, int multiplier, BasicType<Integer> integerType) {
|
||||
return multiply( left, new UnparsedNumericLiteral<>( Integer.toString( multiplier ), NumericTypeCategory.INTEGER, integerType ) );
|
||||
}
|
||||
|
||||
public static Expression multiply(Expression left, Expression multiplier) {
|
||||
if ( left instanceof org.hibernate.sql.ast.tree.expression.Duration duration ) {
|
||||
return new org.hibernate.sql.ast.tree.expression.Duration(
|
||||
multiply( duration.getMagnitude(), multiplier ),
|
||||
duration.getUnit(),
|
||||
duration.getExpressionType()
|
||||
);
|
||||
}
|
||||
else {
|
||||
return new BinaryArithmeticExpression(
|
||||
left,
|
||||
BinaryArithmeticOperator.MULTIPLY,
|
||||
multiplier,
|
||||
(BasicValuedMapping) left.getExpressionType()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
static Expression castToTimestamp(SqlAstNode node, SqmToSqlAstConverter converter) {
|
||||
final BasicType<?> nodeType = (BasicType<?>) ((Expression) node).getExpressionType().getSingleJdbcMapping();
|
||||
final FunctionRenderer cast = (FunctionRenderer) converter.getCreationContext().getSessionFactory().getQueryEngine()
|
||||
.getSqmFunctionRegistry().findFunctionDescriptor( "cast" );
|
||||
final BasicType<?> timestampType = converter.getCreationContext().getTypeConfiguration()
|
||||
.getBasicTypeForJavaType( Timestamp.class );
|
||||
return new SelfRenderingFunctionSqlAstExpression(
|
||||
"cast",
|
||||
cast,
|
||||
List.of( node, new CastTarget( timestampType ) ),
|
||||
nodeType,
|
||||
nodeType
|
||||
);
|
||||
}
|
||||
|
||||
protected static class NumberSeriesQueryTransformer implements QueryTransformer {
|
||||
|
||||
protected final FunctionTableGroup functionTableGroup;
|
||||
protected final TableGroup targetTableGroup;
|
||||
protected final String positionColumnName;
|
||||
protected final boolean coerceToTimestamp;
|
||||
|
||||
public NumberSeriesQueryTransformer(FunctionTableGroup functionTableGroup, TableGroup targetTableGroup, String positionColumnName, boolean coerceToTimestamp) {
|
||||
this.functionTableGroup = functionTableGroup;
|
||||
this.targetTableGroup = targetTableGroup;
|
||||
this.positionColumnName = positionColumnName;
|
||||
this.coerceToTimestamp = coerceToTimestamp;
|
||||
}
|
||||
|
||||
@Override
|
||||
public QuerySpec transform(CteContainer cteContainer, QuerySpec querySpec, SqmToSqlAstConverter converter) {
|
||||
//noinspection unchecked
|
||||
final List<SqlAstNode> arguments = (List<SqlAstNode>) functionTableGroup.getPrimaryTableReference()
|
||||
.getFunctionExpression()
|
||||
.getArguments();
|
||||
final JdbcType boundType = ((Expression) arguments.get( 0 )).getExpressionType().getSingleJdbcMapping().getJdbcType();
|
||||
final boolean castTimestamp = coerceToTimestamp
|
||||
&& (boundType.getDdlTypeCode() == SqlTypes.DATE || boundType.getDdlTypeCode() == SqlTypes.TIME);
|
||||
final Expression start = castTimestamp
|
||||
? castToTimestamp( arguments.get( 0 ), converter )
|
||||
: (Expression) arguments.get( 0 );
|
||||
final Expression stop = castTimestamp
|
||||
? castToTimestamp( arguments.get( 1 ), converter )
|
||||
: (Expression) arguments.get( 1 );
|
||||
final Expression explicitStep = arguments.size() > 2
|
||||
? (Expression) arguments.get( 2 )
|
||||
: null;
|
||||
|
||||
final TableGroup parentTableGroup = querySpec.getFromClause().queryTableGroups(
|
||||
tg -> tg.findTableGroupJoin( targetTableGroup ) == null ? null : tg
|
||||
);
|
||||
final PredicateContainer predicateContainer;
|
||||
if ( parentTableGroup != null ) {
|
||||
predicateContainer = parentTableGroup.findTableGroupJoin( targetTableGroup );
|
||||
}
|
||||
else {
|
||||
predicateContainer = querySpec;
|
||||
}
|
||||
final BasicType<Integer> integerType = converter.getCreationContext()
|
||||
.getSessionFactory()
|
||||
.getNodeBuilder()
|
||||
.getIntegerType();
|
||||
final Expression oneBasedOrdinal = new ColumnReference(
|
||||
functionTableGroup.getPrimaryTableReference().getIdentificationVariable(),
|
||||
positionColumnName,
|
||||
false,
|
||||
null,
|
||||
integerType
|
||||
);
|
||||
final Expression one = new QueryLiteral<>( 1, integerType );
|
||||
final Expression zeroBasedOrdinal = new BinaryArithmeticExpression(
|
||||
oneBasedOrdinal,
|
||||
BinaryArithmeticOperator.SUBTRACT,
|
||||
one,
|
||||
integerType
|
||||
);
|
||||
final Expression stepExpression = explicitStep != null
|
||||
? multiply( explicitStep, zeroBasedOrdinal )
|
||||
: zeroBasedOrdinal;
|
||||
final Expression nextValue = add( start, stepExpression, converter );
|
||||
|
||||
// Add a predicate to ensure the current value is valid
|
||||
if ( explicitStep == null ) {
|
||||
// The default step is 1, so just check if value <= stop
|
||||
predicateContainer.applyPredicate(
|
||||
new ComparisonPredicate(
|
||||
nextValue,
|
||||
ComparisonOperator.LESS_THAN_OR_EQUAL,
|
||||
stop
|
||||
)
|
||||
);
|
||||
}
|
||||
else {
|
||||
// When start < stop, step must be positive and value is only valid if it's less than or equal to stop
|
||||
final BasicType<Boolean> booleanType = converter.getCreationContext()
|
||||
.getSessionFactory()
|
||||
.getNodeBuilder()
|
||||
.getBooleanType();
|
||||
final Predicate positiveProgress = new Junction(
|
||||
Junction.Nature.CONJUNCTION,
|
||||
List.of(
|
||||
new ComparisonPredicate(
|
||||
start,
|
||||
ComparisonOperator.LESS_THAN,
|
||||
stop
|
||||
),
|
||||
new ComparisonPredicate(
|
||||
explicitStep,
|
||||
ComparisonOperator.GREATER_THAN,
|
||||
multiply( explicitStep, -1, integerType )
|
||||
),
|
||||
new ComparisonPredicate(
|
||||
nextValue,
|
||||
ComparisonOperator.LESS_THAN_OR_EQUAL,
|
||||
stop
|
||||
)
|
||||
),
|
||||
booleanType
|
||||
);
|
||||
// When start > stop, step must be negative and value is only valid if it's greater than or equal to stop
|
||||
final Predicate negativeProgress = new Junction(
|
||||
Junction.Nature.CONJUNCTION,
|
||||
List.of(
|
||||
new ComparisonPredicate(
|
||||
start,
|
||||
ComparisonOperator.GREATER_THAN,
|
||||
stop
|
||||
),
|
||||
new ComparisonPredicate(
|
||||
explicitStep,
|
||||
ComparisonOperator.LESS_THAN,
|
||||
multiply( explicitStep, -1, integerType )
|
||||
),
|
||||
new ComparisonPredicate(
|
||||
nextValue,
|
||||
ComparisonOperator.GREATER_THAN_OR_EQUAL,
|
||||
stop
|
||||
)
|
||||
),
|
||||
booleanType
|
||||
);
|
||||
final Predicate initialValue = new Junction(
|
||||
Junction.Nature.CONJUNCTION,
|
||||
List.of(
|
||||
new ComparisonPredicate(
|
||||
start,
|
||||
ComparisonOperator.EQUAL,
|
||||
stop
|
||||
),
|
||||
new ComparisonPredicate(
|
||||
oneBasedOrdinal,
|
||||
ComparisonOperator.EQUAL,
|
||||
one
|
||||
)
|
||||
),
|
||||
booleanType
|
||||
);
|
||||
predicateContainer.applyPredicate(
|
||||
new Junction(
|
||||
Junction.Nature.DISJUNCTION,
|
||||
List.of( positiveProgress, negativeProgress, initialValue ),
|
||||
booleanType
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
return querySpec;
|
||||
}
|
||||
}
|
||||
|
||||
protected static class NumberSeriesGenerateSeriesSetReturningFunctionTypeResolver extends GenerateSeriesSetReturningFunctionTypeResolver {
|
||||
|
||||
public NumberSeriesGenerateSeriesSetReturningFunctionTypeResolver(@Nullable String defaultValueColumnName, String defaultIndexSelectionExpression) {
|
||||
super( defaultValueColumnName, defaultIndexSelectionExpression );
|
||||
}
|
||||
|
||||
protected SelectableMapping[] resolveIterationVariableBasedFunctionReturnType(
|
||||
List<? extends SqlAstNode> arguments,
|
||||
String tableIdentifierVariable,
|
||||
boolean lateral,
|
||||
boolean withOrdinality,
|
||||
SqmToSqlAstConverter converter) {
|
||||
final Expression start = (Expression) arguments.get( 0 );
|
||||
final Expression stop = (Expression) arguments.get( 0 );
|
||||
final JdbcMappingContainer expressionType = NullnessHelper.coalesce(
|
||||
start.getExpressionType(),
|
||||
stop.getExpressionType()
|
||||
);
|
||||
final Expression explicitStep = arguments.size() > 2
|
||||
? (Expression) arguments.get( 2 )
|
||||
: null;
|
||||
final JdbcMapping type = expressionType.getSingleJdbcMapping();
|
||||
if ( type == null ) {
|
||||
throw new IllegalArgumentException(
|
||||
"Couldn't determine types of arguments to function 'generate_series'" );
|
||||
}
|
||||
|
||||
final SelectableMapping indexMapping = withOrdinality ? new SelectableMappingImpl(
|
||||
"",
|
||||
defaultIndexSelectionExpression,
|
||||
new SelectablePath( CollectionPart.Nature.INDEX.getName() ),
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
converter.getCreationContext().getTypeConfiguration().getBasicTypeForJavaType( Long.class )
|
||||
) : null;
|
||||
|
||||
//t.x+(1*(s.x-1))
|
||||
final String startExpression = getStartExpression( start, tableIdentifierVariable, converter );
|
||||
final String stepExpression = getStepExpression( explicitStep, tableIdentifierVariable, converter );
|
||||
final String customReadExpression;
|
||||
if ( type.getJdbcType().isTemporal() ) {
|
||||
final org.hibernate.sql.ast.tree.expression.Duration step = (org.hibernate.sql.ast.tree.expression.Duration) explicitStep;
|
||||
customReadExpression = timestampadd( startExpression, stepExpression, type, step, converter );
|
||||
}
|
||||
else {
|
||||
customReadExpression = startExpression + "+" + stepExpression;
|
||||
}
|
||||
final String elementSelectionExpression = defaultValueColumnName == null
|
||||
? tableIdentifierVariable
|
||||
: defaultValueColumnName;
|
||||
final SelectableMapping elementMapping;
|
||||
if ( expressionType instanceof SqlTypedMapping typedMapping ) {
|
||||
elementMapping = new SelectableMappingImpl(
|
||||
"",
|
||||
elementSelectionExpression,
|
||||
new SelectablePath( CollectionPart.Nature.ELEMENT.getName() ),
|
||||
customReadExpression,
|
||||
null,
|
||||
typedMapping.getColumnDefinition(),
|
||||
typedMapping.getLength(),
|
||||
typedMapping.getPrecision(),
|
||||
typedMapping.getScale(),
|
||||
typedMapping.getTemporalPrecision(),
|
||||
typedMapping.isLob(),
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
type
|
||||
);
|
||||
}
|
||||
else {
|
||||
elementMapping = new SelectableMappingImpl(
|
||||
"",
|
||||
elementSelectionExpression,
|
||||
new SelectablePath( CollectionPart.Nature.ELEMENT.getName() ),
|
||||
customReadExpression,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
type
|
||||
);
|
||||
}
|
||||
final SelectableMapping[] returnType;
|
||||
if ( indexMapping == null ) {
|
||||
returnType = new SelectableMapping[] {elementMapping};
|
||||
}
|
||||
else {
|
||||
returnType = new SelectableMapping[] {elementMapping, indexMapping};
|
||||
}
|
||||
return returnType;
|
||||
}
|
||||
|
||||
private static String timestampadd(String startExpression, String stepExpression, JdbcMapping type, org.hibernate.sql.ast.tree.expression.Duration duration, SqmToSqlAstConverter converter) {
|
||||
final FunctionRenderer renderer = (FunctionRenderer) converter.getCreationContext().getSessionFactory()
|
||||
.getQueryEngine().getSqmFunctionRegistry().findFunctionDescriptor( "timestampadd" );
|
||||
final QuerySpec fakeQuery = new QuerySpec( true );
|
||||
fakeQuery.getSelectClause().addSqlSelection( new SqlSelectionImpl(
|
||||
new SelfRenderingFunctionSqlAstExpression(
|
||||
"timestampadd",
|
||||
renderer,
|
||||
List.of(
|
||||
new DurationUnit( duration.getUnit(), duration.getExpressionType() ),
|
||||
new SelfRenderingSqlFragmentExpression( stepExpression, duration.getExpressionType() ),
|
||||
new SelfRenderingSqlFragmentExpression( startExpression, type )
|
||||
),
|
||||
(ReturnableType<?>) type,
|
||||
type
|
||||
)
|
||||
) );
|
||||
final SqlAstTranslator<JdbcOperationQuerySelect> translator = converter.getCreationContext()
|
||||
.getSessionFactory().getJdbcServices().getDialect().getSqlAstTranslatorFactory()
|
||||
.buildSelectTranslator( converter.getCreationContext().getSessionFactory(), new SelectStatement( fakeQuery ) );
|
||||
final JdbcOperationQuerySelect operation = translator.translate( null, QueryOptions.NONE );
|
||||
final String sqlString = operation.getSqlString();
|
||||
assert sqlString.startsWith( "select " );
|
||||
|
||||
final int startIndex = "select ".length();
|
||||
final int fromIndex = sqlString.lastIndexOf( " from" );
|
||||
return fromIndex == -1
|
||||
? sqlString.substring( startIndex )
|
||||
: sqlString.substring( startIndex, fromIndex );
|
||||
}
|
||||
|
||||
private String getStartExpression(Expression expression, String tableIdentifierVariable, SqmToSqlAstConverter walker) {
|
||||
return getExpression( expression, tableIdentifierVariable, "b", walker );
|
||||
}
|
||||
|
||||
private String getStepExpression(@Nullable Expression explicitStep, String tableIdentifierVariable, SqmToSqlAstConverter walker) {
|
||||
if ( explicitStep == null ) {
|
||||
return "(" + Template.TEMPLATE + "." + defaultIndexSelectionExpression + "-1)";
|
||||
}
|
||||
else {
|
||||
return "(" + getExpression( explicitStep, tableIdentifierVariable, "s", walker ) + "*(" + Template.TEMPLATE + "." + defaultIndexSelectionExpression + "-1))";
|
||||
}
|
||||
}
|
||||
|
||||
private String getExpression(Expression expression, String tableIdentifierVariable, String syntheticColumnName, SqmToSqlAstConverter walker) {
|
||||
if ( expression instanceof Literal literal ) {
|
||||
final SessionFactoryImplementor sessionFactory = walker.getCreationContext().getSessionFactory();
|
||||
final LazySessionWrapperOptions wrapperOptions = new LazySessionWrapperOptions( sessionFactory );
|
||||
try {
|
||||
//noinspection unchecked
|
||||
return literal.getJdbcMapping().getJdbcLiteralFormatter().toJdbcLiteral(
|
||||
literal.getLiteralValue(),
|
||||
sessionFactory.getJdbcServices().getDialect(),
|
||||
wrapperOptions
|
||||
);
|
||||
}
|
||||
finally {
|
||||
wrapperOptions.cleanup();
|
||||
}
|
||||
}
|
||||
else if ( expression instanceof ColumnReference columnReference ) {
|
||||
return columnReference.getExpressionText();
|
||||
}
|
||||
else {
|
||||
return tableIdentifierVariable + "_." + syntheticColumnName;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,213 @@
|
|||
/*
|
||||
* SPDX-License-Identifier: LGPL-2.1-or-later
|
||||
* Copyright Red Hat Inc. and Hibernate Authors
|
||||
*/
|
||||
package org.hibernate.dialect.function;
|
||||
|
||||
import org.checkerframework.checker.nullness.qual.Nullable;
|
||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||
import org.hibernate.internal.util.NullnessHelper;
|
||||
import org.hibernate.metamodel.mapping.CollectionPart;
|
||||
import org.hibernate.metamodel.mapping.JdbcMapping;
|
||||
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
|
||||
import org.hibernate.metamodel.mapping.ModelPart;
|
||||
import org.hibernate.metamodel.mapping.SelectableMapping;
|
||||
import org.hibernate.query.derived.AnonymousTupleTableGroupProducer;
|
||||
import org.hibernate.query.derived.AnonymousTupleType;
|
||||
import org.hibernate.query.spi.QueryEngine;
|
||||
import org.hibernate.query.sqm.function.SelfRenderingSqmSetReturningFunction;
|
||||
import org.hibernate.query.sqm.sql.SqmToSqlAstConverter;
|
||||
import org.hibernate.query.sqm.tree.SqmTypedNode;
|
||||
import org.hibernate.spi.NavigablePath;
|
||||
import org.hibernate.sql.ast.SqlAstTranslator;
|
||||
import org.hibernate.sql.ast.spi.SqlAppender;
|
||||
import org.hibernate.sql.ast.tree.SqlAstNode;
|
||||
import org.hibernate.sql.ast.tree.expression.Duration;
|
||||
import org.hibernate.sql.ast.tree.expression.Expression;
|
||||
import org.hibernate.sql.ast.tree.from.FunctionTableGroup;
|
||||
import org.hibernate.sql.ast.tree.from.TableGroup;
|
||||
import org.hibernate.type.SqlTypes;
|
||||
import org.hibernate.type.spi.TypeConfiguration;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* SQL Server generate_series function.
|
||||
*
|
||||
* When possible, the {@code generate_series} function is used directly.
|
||||
* If ordinality is requested or the arguments are temporals, this emulation comes into play.
|
||||
* It essentially renders a {@code generate_series} with a specified maximum size that serves as "iteration variable".
|
||||
* References to the value are replaced with expressions of the form {@code start + step * iterationVariable}
|
||||
* and a condition is added either to the query or join where the function is used to ensure that the value is
|
||||
* less than or equal to the stop value.
|
||||
*/
|
||||
public class SQLServerGenerateSeriesFunction extends NumberSeriesGenerateSeriesFunction {
|
||||
|
||||
public SQLServerGenerateSeriesFunction(int maxSeriesSize, TypeConfiguration typeConfiguration) {
|
||||
super(
|
||||
new SQLServerGenerateSeriesSetReturningFunctionTypeResolver(),
|
||||
// Treat durations like intervals to avoid conversions
|
||||
typeConfiguration.getBasicTypeRegistry().resolve( java.time.Duration.class, SqlTypes.DURATION ),
|
||||
maxSeriesSize
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean rendersIdentifierVariable(List<SqlAstNode> arguments, SessionFactoryImplementor sessionFactory) {
|
||||
// To make our lives simpler during emulation
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected <T> SelfRenderingSqmSetReturningFunction<T> generateSqmSetReturningFunctionExpression(List<? extends SqmTypedNode<?>> arguments, QueryEngine queryEngine) {
|
||||
//noinspection unchecked
|
||||
return new SelfRenderingSqmSetReturningFunction<>(
|
||||
this,
|
||||
this,
|
||||
arguments,
|
||||
getArgumentsValidator(),
|
||||
getSetReturningTypeResolver(),
|
||||
(AnonymousTupleType<T>) getSetReturningTypeResolver().resolveTupleType( arguments, queryEngine.getTypeConfiguration() ),
|
||||
queryEngine.getCriteriaBuilder(),
|
||||
getName()
|
||||
) {
|
||||
@Override
|
||||
public TableGroup convertToSqlAst(
|
||||
NavigablePath navigablePath,
|
||||
String identifierVariable,
|
||||
boolean lateral,
|
||||
boolean canUseInnerJoins,
|
||||
boolean withOrdinality,
|
||||
SqmToSqlAstConverter walker) {
|
||||
// Register a transformer that adds a join predicate "start+(step*(ordinal-1))<=stop"
|
||||
final FunctionTableGroup functionTableGroup = (FunctionTableGroup) super.convertToSqlAst(
|
||||
navigablePath,
|
||||
identifierVariable,
|
||||
lateral,
|
||||
canUseInnerJoins,
|
||||
withOrdinality,
|
||||
walker
|
||||
);
|
||||
final ModelPart elementPart = functionTableGroup.getModelPart()
|
||||
.findSubPart( CollectionPart.Nature.ELEMENT.getName(), null );
|
||||
final boolean isTemporal = elementPart.getSingleJdbcMapping().getJdbcType().isTemporal();
|
||||
// Only do this transformation if ordinality is requested
|
||||
// or the result is a temporal (SQL Server only supports numerics in system_range)
|
||||
if ( withOrdinality || isTemporal ) {
|
||||
// Register a query transformer to register a join predicate
|
||||
walker.registerQueryTransformer( new NumberSeriesQueryTransformer(
|
||||
functionTableGroup,
|
||||
functionTableGroup,
|
||||
"value",
|
||||
coerceToTimestamp
|
||||
) );
|
||||
}
|
||||
return functionTableGroup;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static class SQLServerGenerateSeriesSetReturningFunctionTypeResolver extends NumberSeriesGenerateSeriesSetReturningFunctionTypeResolver {
|
||||
|
||||
public SQLServerGenerateSeriesSetReturningFunctionTypeResolver() {
|
||||
super( "value", "value" );
|
||||
}
|
||||
|
||||
@Override
|
||||
public SelectableMapping[] resolveFunctionReturnType(
|
||||
List<? extends SqlAstNode> arguments,
|
||||
String tableIdentifierVariable,
|
||||
boolean lateral,
|
||||
boolean withOrdinality,
|
||||
SqmToSqlAstConverter converter) {
|
||||
final Expression start = (Expression) arguments.get( 0 );
|
||||
final Expression stop = (Expression) arguments.get( 1 );
|
||||
final JdbcMappingContainer expressionType = NullnessHelper.coalesce(
|
||||
start.getExpressionType(),
|
||||
stop.getExpressionType()
|
||||
);
|
||||
final JdbcMapping type = expressionType.getSingleJdbcMapping();
|
||||
if ( type == null ) {
|
||||
throw new IllegalArgumentException( "Couldn't determine types of arguments to function 'generate_series'" );
|
||||
}
|
||||
|
||||
if ( withOrdinality || type.getJdbcType().isTemporal() ) {
|
||||
return resolveIterationVariableBasedFunctionReturnType( arguments, tableIdentifierVariable, lateral, withOrdinality, converter );
|
||||
}
|
||||
else {
|
||||
return super.resolveFunctionReturnType( arguments, tableIdentifierVariable, lateral, withOrdinality, converter );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void renderGenerateSeries(
|
||||
SqlAppender sqlAppender,
|
||||
Expression start,
|
||||
Expression stop,
|
||||
@Nullable Expression step,
|
||||
AnonymousTupleTableGroupProducer tupleType,
|
||||
String tableIdentifierVariable,
|
||||
SqlAstTranslator<?> walker) {
|
||||
final ModelPart elementPart = tupleType.findSubPart( CollectionPart.Nature.ELEMENT.getName(), null );
|
||||
final ModelPart ordinalityPart = tupleType.findSubPart( CollectionPart.Nature.INDEX.getName(), null );
|
||||
final boolean isTemporal = elementPart.getSingleJdbcMapping().getJdbcType().isTemporal();
|
||||
|
||||
if ( ordinalityPart != null || isTemporal ) {
|
||||
final boolean startNeedsEmulation = needsVariable( start );
|
||||
final boolean stepNeedsEmulation = step != null && needsVariable( step );
|
||||
if ( startNeedsEmulation || stepNeedsEmulation ) {
|
||||
sqlAppender.appendSql( "((values " );
|
||||
char separator = '(';
|
||||
if ( startNeedsEmulation ) {
|
||||
sqlAppender.appendSql( separator );
|
||||
start.accept( walker );
|
||||
separator = ',';
|
||||
}
|
||||
if ( stepNeedsEmulation ) {
|
||||
sqlAppender.appendSql( separator );
|
||||
if ( step instanceof Duration duration ) {
|
||||
duration.getMagnitude().accept( walker );
|
||||
}
|
||||
else {
|
||||
step.accept( walker );
|
||||
}
|
||||
}
|
||||
sqlAppender.appendSql( ")) " );
|
||||
sqlAppender.appendSql( tableIdentifierVariable );
|
||||
sqlAppender.appendSql( "_" );
|
||||
separator = '(';
|
||||
if ( startNeedsEmulation ) {
|
||||
sqlAppender.appendSql( separator );
|
||||
sqlAppender.appendSql( "b" );
|
||||
separator = ',';
|
||||
}
|
||||
if ( stepNeedsEmulation ) {
|
||||
sqlAppender.appendSql( separator );
|
||||
sqlAppender.appendSql( "s" );
|
||||
}
|
||||
sqlAppender.appendSql( ") join " );
|
||||
}
|
||||
sqlAppender.appendSql( "generate_series(1," );
|
||||
sqlAppender.appendSql( maxSeriesSize );
|
||||
sqlAppender.appendSql( ") " );
|
||||
sqlAppender.appendSql( tableIdentifierVariable );
|
||||
if ( startNeedsEmulation || stepNeedsEmulation ) {
|
||||
sqlAppender.appendSql( " on 1=1)" );
|
||||
}
|
||||
}
|
||||
else {
|
||||
sqlAppender.appendSql( "generate_series(" );
|
||||
start.accept( walker );
|
||||
sqlAppender.appendSql( ',' );
|
||||
stop.accept( walker );
|
||||
if ( step != null ) {
|
||||
sqlAppender.appendSql( ',' );
|
||||
step.accept( walker );
|
||||
}
|
||||
sqlAppender.appendSql( ") " );
|
||||
sqlAppender.appendSql( tableIdentifierVariable );
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,166 @@
|
|||
/*
|
||||
* SPDX-License-Identifier: LGPL-2.1-or-later
|
||||
* Copyright Red Hat Inc. and Hibernate Authors
|
||||
*/
|
||||
package org.hibernate.dialect.function;
|
||||
|
||||
import org.checkerframework.checker.nullness.qual.Nullable;
|
||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||
import org.hibernate.metamodel.mapping.SelectableMapping;
|
||||
import org.hibernate.query.derived.AnonymousTupleTableGroupProducer;
|
||||
import org.hibernate.query.derived.AnonymousTupleType;
|
||||
import org.hibernate.query.spi.QueryEngine;
|
||||
import org.hibernate.query.sqm.function.SelfRenderingSqmSetReturningFunction;
|
||||
import org.hibernate.query.sqm.sql.SqmToSqlAstConverter;
|
||||
import org.hibernate.query.sqm.tree.SqmTypedNode;
|
||||
import org.hibernate.spi.NavigablePath;
|
||||
import org.hibernate.sql.ast.SqlAstTranslator;
|
||||
import org.hibernate.sql.ast.spi.SqlAppender;
|
||||
import org.hibernate.sql.ast.tree.SqlAstNode;
|
||||
import org.hibernate.sql.ast.tree.expression.Duration;
|
||||
import org.hibernate.sql.ast.tree.expression.Expression;
|
||||
import org.hibernate.sql.ast.tree.from.FunctionTableGroup;
|
||||
import org.hibernate.sql.ast.tree.from.TableGroup;
|
||||
import org.hibernate.type.SqlTypes;
|
||||
import org.hibernate.type.spi.TypeConfiguration;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Sybase ASE generate_series function.
|
||||
*
|
||||
* This implementation first replicates an XML tag with a specified maximum size with the {@code replicate} function
|
||||
* and then uses {@code xmltable} to produce rows for every generated element.
|
||||
* References to the value are replaced with expressions of the form {@code start + step * iterationVariable}
|
||||
* and a condition is added either to the query or join where the function is used to ensure that the value is
|
||||
* less than or equal to the stop value.
|
||||
*/
|
||||
public class SybaseASEGenerateSeriesFunction extends NumberSeriesGenerateSeriesFunction {
|
||||
|
||||
public SybaseASEGenerateSeriesFunction(int maxSeriesSize, TypeConfiguration typeConfiguration) {
|
||||
super(
|
||||
new SybaseASEGenerateSeriesSetReturningFunctionTypeResolver(),
|
||||
// Treat durations like intervals to avoid conversions
|
||||
typeConfiguration.getBasicTypeRegistry().resolve( java.time.Duration.class, SqlTypes.DURATION ),
|
||||
maxSeriesSize
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean rendersIdentifierVariable(List<SqlAstNode> arguments, SessionFactoryImplementor sessionFactory) {
|
||||
// To make our lives simpler during emulation
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected <T> SelfRenderingSqmSetReturningFunction<T> generateSqmSetReturningFunctionExpression(List<? extends SqmTypedNode<?>> arguments, QueryEngine queryEngine) {
|
||||
//noinspection unchecked
|
||||
return new SelfRenderingSqmSetReturningFunction<>(
|
||||
this,
|
||||
this,
|
||||
arguments,
|
||||
getArgumentsValidator(),
|
||||
getSetReturningTypeResolver(),
|
||||
(AnonymousTupleType<T>) getSetReturningTypeResolver().resolveTupleType( arguments, queryEngine.getTypeConfiguration() ),
|
||||
queryEngine.getCriteriaBuilder(),
|
||||
getName()
|
||||
) {
|
||||
@Override
|
||||
public TableGroup convertToSqlAst(
|
||||
NavigablePath navigablePath,
|
||||
String identifierVariable,
|
||||
boolean lateral,
|
||||
boolean canUseInnerJoins,
|
||||
boolean withOrdinality,
|
||||
SqmToSqlAstConverter walker) {
|
||||
// Register a transformer that adds a join predicate "start+(step*(ordinal-1))<=stop"
|
||||
final FunctionTableGroup functionTableGroup = (FunctionTableGroup) super.convertToSqlAst(
|
||||
navigablePath,
|
||||
identifierVariable,
|
||||
lateral,
|
||||
canUseInnerJoins,
|
||||
withOrdinality,
|
||||
walker
|
||||
);
|
||||
// Register a query transformer to register a join predicate
|
||||
walker.registerQueryTransformer( new NumberSeriesQueryTransformer(
|
||||
functionTableGroup,
|
||||
functionTableGroup,
|
||||
"i",
|
||||
coerceToTimestamp
|
||||
) );
|
||||
return functionTableGroup;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static class SybaseASEGenerateSeriesSetReturningFunctionTypeResolver extends NumberSeriesGenerateSeriesSetReturningFunctionTypeResolver {
|
||||
|
||||
public SybaseASEGenerateSeriesSetReturningFunctionTypeResolver() {
|
||||
super( "v", "i" );
|
||||
}
|
||||
|
||||
@Override
|
||||
public SelectableMapping[] resolveFunctionReturnType(
|
||||
List<? extends SqlAstNode> arguments,
|
||||
String tableIdentifierVariable,
|
||||
boolean lateral,
|
||||
boolean withOrdinality,
|
||||
SqmToSqlAstConverter converter) {
|
||||
return resolveIterationVariableBasedFunctionReturnType( arguments, tableIdentifierVariable, lateral, withOrdinality, converter );
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void renderGenerateSeries(
|
||||
SqlAppender sqlAppender,
|
||||
Expression start,
|
||||
Expression stop,
|
||||
@Nullable Expression step,
|
||||
AnonymousTupleTableGroupProducer tupleType,
|
||||
String tableIdentifierVariable,
|
||||
SqlAstTranslator<?> walker) {
|
||||
final boolean startNeedsEmulation = needsVariable( start );
|
||||
final boolean stepNeedsEmulation = step != null && needsVariable( step );
|
||||
if ( startNeedsEmulation || stepNeedsEmulation ) {
|
||||
sqlAppender.appendSql( "((select" );
|
||||
char separator = ' ';
|
||||
if ( startNeedsEmulation ) {
|
||||
sqlAppender.appendSql( separator );
|
||||
start.accept( walker );
|
||||
separator = ',';
|
||||
}
|
||||
if ( stepNeedsEmulation ) {
|
||||
sqlAppender.appendSql( separator );
|
||||
if ( step instanceof Duration duration ) {
|
||||
duration.getMagnitude().accept( walker );
|
||||
}
|
||||
else {
|
||||
step.accept( walker );
|
||||
}
|
||||
}
|
||||
sqlAppender.appendSql( ") " );
|
||||
sqlAppender.appendSql( tableIdentifierVariable );
|
||||
sqlAppender.appendSql( "_" );
|
||||
separator = '(';
|
||||
if ( startNeedsEmulation ) {
|
||||
sqlAppender.appendSql( separator );
|
||||
sqlAppender.appendSql( "b" );
|
||||
separator = ',';
|
||||
}
|
||||
if ( stepNeedsEmulation ) {
|
||||
sqlAppender.appendSql( separator );
|
||||
sqlAppender.appendSql( "s" );
|
||||
}
|
||||
sqlAppender.appendSql( ") join " );
|
||||
}
|
||||
sqlAppender.appendSql( "xmltable('/r/a' passing '<r>'+replicate('<a/>'," );
|
||||
sqlAppender.appendSql( maxSeriesSize );
|
||||
sqlAppender.appendSql( ")+'</r>' columns i bigint for ordinality, v varchar(255) path '.') " );
|
||||
sqlAppender.appendSql( tableIdentifierVariable );
|
||||
if ( startNeedsEmulation || stepNeedsEmulation ) {
|
||||
sqlAppender.appendSql( " on 1=1)" );
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -20,6 +20,7 @@ import org.hibernate.metamodel.mapping.internal.SelectableMappingImpl;
|
|||
import org.hibernate.query.derived.AnonymousTupleType;
|
||||
import org.hibernate.query.sqm.SqmExpressible;
|
||||
import org.hibernate.query.sqm.produce.function.SetReturningFunctionTypeResolver;
|
||||
import org.hibernate.query.sqm.sql.SqmToSqlAstConverter;
|
||||
import org.hibernate.query.sqm.tree.SqmTypedNode;
|
||||
import org.hibernate.sql.ast.tree.SqlAstNode;
|
||||
import org.hibernate.sql.ast.tree.expression.Expression;
|
||||
|
@ -84,8 +85,9 @@ public class UnnestSetReturningFunctionTypeResolver implements SetReturningFunct
|
|||
public SelectableMapping[] resolveFunctionReturnType(
|
||||
List<? extends SqlAstNode> arguments,
|
||||
String tableIdentifierVariable,
|
||||
boolean lateral,
|
||||
boolean withOrdinality,
|
||||
TypeConfiguration typeConfiguration) {
|
||||
SqmToSqlAstConverter converter) {
|
||||
final Expression expression = (Expression) arguments.get( 0 );
|
||||
final JdbcMappingContainer expressionType = expression.getExpressionType();
|
||||
if ( expressionType == null ) {
|
||||
|
@ -112,7 +114,7 @@ public class UnnestSetReturningFunctionTypeResolver implements SetReturningFunct
|
|||
false,
|
||||
false,
|
||||
false,
|
||||
typeConfiguration.getBasicTypeForJavaType( Long.class )
|
||||
converter.getCreationContext().getTypeConfiguration().getBasicTypeForJavaType( Long.class )
|
||||
) : null;
|
||||
|
||||
final BasicType<?> elementType = pluralType.getElementType();
|
||||
|
|
|
@ -4,15 +4,19 @@
|
|||
*/
|
||||
package org.hibernate.dialect.function.array;
|
||||
|
||||
import org.checkerframework.checker.nullness.qual.Nullable;
|
||||
import org.hibernate.metamodel.mapping.MappingModelExpressible;
|
||||
import org.hibernate.query.sqm.function.AbstractSqmSelfRenderingFunctionDescriptor;
|
||||
import org.hibernate.query.sqm.produce.function.ArgumentTypesValidator;
|
||||
import org.hibernate.query.sqm.produce.function.FunctionArgumentTypeResolver;
|
||||
import org.hibernate.query.sqm.produce.function.FunctionParameterType;
|
||||
import org.hibernate.query.sqm.produce.function.internal.AbstractFunctionArgumentTypeResolver;
|
||||
import org.hibernate.query.sqm.sql.SqmToSqlAstConverter;
|
||||
import org.hibernate.query.sqm.tree.expression.SqmFunction;
|
||||
import org.hibernate.query.sqm.tree.SqmTypedNode;
|
||||
import org.hibernate.type.BasicPluralType;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Encapsulates the validator, return type and argument type resolvers for the array_contains function.
|
||||
* Subclasses only have to implement the rendering.
|
||||
|
@ -35,7 +39,7 @@ public abstract class AbstractArrayFillFunction extends AbstractSqmSelfRendering
|
|||
return "(OBJECT element, INTEGER elementCount)";
|
||||
}
|
||||
|
||||
private static class ArrayFillArgumentsValidator implements FunctionArgumentTypeResolver {
|
||||
private static class ArrayFillArgumentsValidator extends AbstractFunctionArgumentTypeResolver {
|
||||
|
||||
public static final FunctionArgumentTypeResolver INSTANCE = new ArrayFillArgumentsValidator();
|
||||
|
||||
|
@ -43,10 +47,7 @@ public abstract class AbstractArrayFillFunction extends AbstractSqmSelfRendering
|
|||
}
|
||||
|
||||
@Override
|
||||
public MappingModelExpressible<?> resolveFunctionArgumentType(
|
||||
SqmFunction<?> function,
|
||||
int argumentIndex,
|
||||
SqmToSqlAstConverter converter) {
|
||||
public @Nullable MappingModelExpressible<?> resolveFunctionArgumentType(List<? extends SqmTypedNode<?>> arguments, int argumentIndex, SqmToSqlAstConverter converter) {
|
||||
if ( argumentIndex == 0 ) {
|
||||
final MappingModelExpressible<?> impliedReturnType = converter.resolveFunctionImpliedReturnType();
|
||||
return impliedReturnType instanceof BasicPluralType<?, ?>
|
||||
|
|
|
@ -4,13 +4,20 @@
|
|||
*/
|
||||
package org.hibernate.dialect.function.array;
|
||||
|
||||
import org.checkerframework.checker.nullness.qual.Nullable;
|
||||
import org.hibernate.metamodel.mapping.MappingModelExpressible;
|
||||
import org.hibernate.query.sqm.function.AbstractSqmSelfRenderingFunctionDescriptor;
|
||||
import org.hibernate.query.sqm.produce.function.ArgumentTypesValidator;
|
||||
import org.hibernate.query.sqm.produce.function.FunctionParameterType;
|
||||
import org.hibernate.query.sqm.produce.function.StandardArgumentsValidators;
|
||||
import org.hibernate.query.sqm.produce.function.StandardFunctionReturnTypeResolvers;
|
||||
import org.hibernate.query.sqm.produce.function.internal.AbstractFunctionArgumentTypeResolver;
|
||||
import org.hibernate.query.sqm.sql.SqmToSqlAstConverter;
|
||||
import org.hibernate.query.sqm.tree.SqmTypedNode;
|
||||
import org.hibernate.type.spi.TypeConfiguration;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Encapsulates the validator, return type and argument type resolvers for the array_position functions.
|
||||
* Subclasses only have to implement the rendering.
|
||||
|
@ -30,19 +37,22 @@ public abstract class AbstractArrayPositionFunction extends AbstractSqmSelfRende
|
|||
FunctionParameterType.INTEGER
|
||||
),
|
||||
StandardFunctionReturnTypeResolvers.invariant( typeConfiguration.standardBasicTypeForJavaType( Integer.class ) ),
|
||||
(function, argumentIndex, converter) -> {
|
||||
if ( argumentIndex == 2 ) {
|
||||
return converter.getCreationContext()
|
||||
.getSessionFactory()
|
||||
.getTypeConfiguration()
|
||||
.standardBasicTypeForJavaType( Integer.class );
|
||||
}
|
||||
else {
|
||||
return ArrayAndElementArgumentTypeResolver.DEFAULT_INSTANCE.resolveFunctionArgumentType(
|
||||
function,
|
||||
argumentIndex,
|
||||
converter
|
||||
);
|
||||
new AbstractFunctionArgumentTypeResolver() {
|
||||
@Override
|
||||
public @Nullable MappingModelExpressible<?> resolveFunctionArgumentType(List<? extends SqmTypedNode<?>> arguments, int argumentIndex, SqmToSqlAstConverter converter) {
|
||||
if ( argumentIndex == 2 ) {
|
||||
return converter.getCreationContext()
|
||||
.getSessionFactory()
|
||||
.getTypeConfiguration()
|
||||
.standardBasicTypeForJavaType( Integer.class );
|
||||
}
|
||||
else {
|
||||
return ArrayAndElementArgumentTypeResolver.DEFAULT_INSTANCE.resolveFunctionArgumentType(
|
||||
arguments,
|
||||
argumentIndex,
|
||||
converter
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
|
|
|
@ -4,21 +4,24 @@
|
|||
*/
|
||||
package org.hibernate.dialect.function.array;
|
||||
|
||||
import org.checkerframework.checker.nullness.qual.Nullable;
|
||||
import org.hibernate.internal.util.collections.ArrayHelper;
|
||||
import org.hibernate.metamodel.mapping.MappingModelExpressible;
|
||||
import org.hibernate.metamodel.model.domain.DomainType;
|
||||
import org.hibernate.query.sqm.produce.function.FunctionArgumentTypeResolver;
|
||||
import org.hibernate.query.sqm.produce.function.internal.AbstractFunctionArgumentTypeResolver;
|
||||
import org.hibernate.query.sqm.sql.SqmToSqlAstConverter;
|
||||
import org.hibernate.query.sqm.tree.SqmTypedNode;
|
||||
import org.hibernate.query.sqm.tree.expression.SqmExpression;
|
||||
import org.hibernate.query.sqm.tree.expression.SqmFunction;
|
||||
import org.hibernate.type.BasicPluralType;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* A {@link FunctionArgumentTypeResolver} that resolves the array argument type based on the element argument type
|
||||
* or the element argument type based on the array argument type.
|
||||
*/
|
||||
public class ArrayAndElementArgumentTypeResolver implements FunctionArgumentTypeResolver {
|
||||
public class ArrayAndElementArgumentTypeResolver extends AbstractFunctionArgumentTypeResolver {
|
||||
|
||||
public static final FunctionArgumentTypeResolver DEFAULT_INSTANCE = new ArrayAndElementArgumentTypeResolver( 0, 1 );
|
||||
|
||||
|
@ -31,13 +34,10 @@ public class ArrayAndElementArgumentTypeResolver implements FunctionArgumentType
|
|||
}
|
||||
|
||||
@Override
|
||||
public MappingModelExpressible<?> resolveFunctionArgumentType(
|
||||
SqmFunction<?> function,
|
||||
int argumentIndex,
|
||||
SqmToSqlAstConverter converter) {
|
||||
public @Nullable MappingModelExpressible<?> resolveFunctionArgumentType(List<? extends SqmTypedNode<?>> arguments, int argumentIndex, SqmToSqlAstConverter converter) {
|
||||
if ( argumentIndex == arrayIndex ) {
|
||||
for ( int elementIndex : elementIndexes ) {
|
||||
final SqmTypedNode<?> node = function.getArguments().get( elementIndex );
|
||||
final SqmTypedNode<?> node = arguments.get( elementIndex );
|
||||
if ( node instanceof SqmExpression<?> ) {
|
||||
final MappingModelExpressible<?> expressible = converter.determineValueMapping( (SqmExpression<?>) node );
|
||||
if ( expressible != null ) {
|
||||
|
@ -50,7 +50,7 @@ public class ArrayAndElementArgumentTypeResolver implements FunctionArgumentType
|
|||
}
|
||||
}
|
||||
else if ( ArrayHelper.contains( elementIndexes, argumentIndex ) ) {
|
||||
final SqmTypedNode<?> node = function.getArguments().get( arrayIndex );
|
||||
final SqmTypedNode<?> node = arguments.get( arrayIndex );
|
||||
if ( node instanceof SqmExpression<?> ) {
|
||||
final MappingModelExpressible<?> expressible = converter.determineValueMapping( (SqmExpression<?>) node );
|
||||
if ( expressible != null ) {
|
||||
|
|
|
@ -4,29 +4,29 @@
|
|||
*/
|
||||
package org.hibernate.dialect.function.array;
|
||||
|
||||
import org.checkerframework.checker.nullness.qual.Nullable;
|
||||
import org.hibernate.metamodel.mapping.MappingModelExpressible;
|
||||
import org.hibernate.metamodel.model.domain.DomainType;
|
||||
import org.hibernate.query.sqm.produce.function.FunctionArgumentTypeResolver;
|
||||
import org.hibernate.query.sqm.produce.function.internal.AbstractFunctionArgumentTypeResolver;
|
||||
import org.hibernate.query.sqm.sql.SqmToSqlAstConverter;
|
||||
import org.hibernate.query.sqm.tree.SqmTypedNode;
|
||||
import org.hibernate.query.sqm.tree.expression.SqmExpression;
|
||||
import org.hibernate.query.sqm.tree.expression.SqmFunction;
|
||||
import org.hibernate.type.BasicPluralType;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* A {@link FunctionArgumentTypeResolver} that resolves the argument types for the {@code array_contains} function.
|
||||
*/
|
||||
public class ArrayContainsArgumentTypeResolver implements FunctionArgumentTypeResolver {
|
||||
public class ArrayContainsArgumentTypeResolver extends AbstractFunctionArgumentTypeResolver {
|
||||
|
||||
public static final FunctionArgumentTypeResolver INSTANCE = new ArrayContainsArgumentTypeResolver();
|
||||
|
||||
@Override
|
||||
public MappingModelExpressible<?> resolveFunctionArgumentType(
|
||||
SqmFunction<?> function,
|
||||
int argumentIndex,
|
||||
SqmToSqlAstConverter converter) {
|
||||
public @Nullable MappingModelExpressible<?> resolveFunctionArgumentType(List<? extends SqmTypedNode<?>> arguments, int argumentIndex, SqmToSqlAstConverter converter) {
|
||||
if ( argumentIndex == 0 ) {
|
||||
final SqmTypedNode<?> node = function.getArguments().get( 1 );
|
||||
final SqmTypedNode<?> node = arguments.get( 1 );
|
||||
if ( node instanceof SqmExpression<?> ) {
|
||||
final MappingModelExpressible<?> expressible = converter.determineValueMapping( (SqmExpression<?>) node );
|
||||
if ( expressible != null ) {
|
||||
|
@ -43,7 +43,7 @@ public class ArrayContainsArgumentTypeResolver implements FunctionArgumentTypeRe
|
|||
}
|
||||
}
|
||||
else if ( argumentIndex == 1 ) {
|
||||
final SqmTypedNode<?> node = function.getArguments().get( 0 );
|
||||
final SqmTypedNode<?> node = arguments.get( 0 );
|
||||
if ( node instanceof SqmExpression<?> ) {
|
||||
final MappingModelExpressible<?> expressible = converter.determineValueMapping( (SqmExpression<?>) node );
|
||||
if ( expressible != null ) {
|
||||
|
|
|
@ -4,33 +4,33 @@
|
|||
*/
|
||||
package org.hibernate.dialect.function.array;
|
||||
|
||||
import org.checkerframework.checker.nullness.qual.Nullable;
|
||||
import org.hibernate.metamodel.mapping.MappingModelExpressible;
|
||||
import org.hibernate.query.sqm.produce.function.FunctionArgumentTypeResolver;
|
||||
import org.hibernate.query.sqm.produce.function.internal.AbstractFunctionArgumentTypeResolver;
|
||||
import org.hibernate.query.sqm.sql.SqmToSqlAstConverter;
|
||||
import org.hibernate.query.sqm.tree.SqmTypedNode;
|
||||
import org.hibernate.query.sqm.tree.expression.SqmExpression;
|
||||
import org.hibernate.query.sqm.tree.expression.SqmFunction;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* A {@link FunctionArgumentTypeResolver} that resolves the argument types for the {@code array_includes} function.
|
||||
*/
|
||||
public class ArrayIncludesArgumentTypeResolver implements FunctionArgumentTypeResolver {
|
||||
public class ArrayIncludesArgumentTypeResolver extends AbstractFunctionArgumentTypeResolver {
|
||||
|
||||
public static final FunctionArgumentTypeResolver INSTANCE = new ArrayIncludesArgumentTypeResolver();
|
||||
|
||||
@Override
|
||||
public MappingModelExpressible<?> resolveFunctionArgumentType(
|
||||
SqmFunction<?> function,
|
||||
int argumentIndex,
|
||||
SqmToSqlAstConverter converter) {
|
||||
public @Nullable MappingModelExpressible<?> resolveFunctionArgumentType(List<? extends SqmTypedNode<?>> arguments, int argumentIndex, SqmToSqlAstConverter converter) {
|
||||
if ( argumentIndex == 0 ) {
|
||||
final SqmTypedNode<?> node = function.getArguments().get( 1 );
|
||||
final SqmTypedNode<?> node = arguments.get( 1 );
|
||||
if ( node instanceof SqmExpression<?> ) {
|
||||
return converter.determineValueMapping( (SqmExpression<?>) node );
|
||||
}
|
||||
}
|
||||
else if ( argumentIndex == 1 ) {
|
||||
final SqmTypedNode<?> node = function.getArguments().get( 0 );
|
||||
final SqmTypedNode<?> node = arguments.get( 0 );
|
||||
if ( node instanceof SqmExpression<?> ) {
|
||||
return converter.determineValueMapping( (SqmExpression<?>) node );
|
||||
}
|
||||
|
|
|
@ -41,7 +41,6 @@ import org.hibernate.type.BasicType;
|
|||
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
|
||||
|
||||
import org.checkerframework.checker.nullness.qual.Nullable;
|
||||
import org.hibernate.type.spi.TypeConfiguration;
|
||||
|
||||
/**
|
||||
* H2 unnest function.
|
||||
|
@ -193,8 +192,9 @@ public class H2UnnestFunction extends UnnestFunction {
|
|||
public SelectableMapping[] resolveFunctionReturnType(
|
||||
List<? extends SqlAstNode> arguments,
|
||||
String tableIdentifierVariable,
|
||||
boolean lateral,
|
||||
boolean withOrdinality,
|
||||
TypeConfiguration typeConfiguration) {
|
||||
SqmToSqlAstConverter converter) {
|
||||
final Expression expression = (Expression) arguments.get( 0 );
|
||||
final JdbcMappingContainer expressionType = expression.getExpressionType();
|
||||
if ( expressionType == null ) {
|
||||
|
@ -221,7 +221,7 @@ public class H2UnnestFunction extends UnnestFunction {
|
|||
false,
|
||||
false,
|
||||
false,
|
||||
typeConfiguration.getBasicTypeForJavaType( Long.class )
|
||||
converter.getCreationContext().getTypeConfiguration().getBasicTypeForJavaType( Long.class )
|
||||
) : null;
|
||||
|
||||
final BasicType<?> elementType = pluralType.getElementType();
|
||||
|
|
|
@ -36,7 +36,7 @@ public class UnnestFunction extends AbstractSqmSelfRenderingSetReturningFunction
|
|||
protected UnnestFunction(SetReturningFunctionTypeResolver setReturningFunctionTypeResolver) {
|
||||
super(
|
||||
"unnest",
|
||||
null,
|
||||
ArrayArgumentValidator.DEFAULT_INSTANCE,
|
||||
setReturningFunctionTypeResolver,
|
||||
null
|
||||
);
|
||||
|
|
|
@ -13,6 +13,7 @@ import java.time.Duration;
|
|||
import java.time.Instant;
|
||||
import java.time.temporal.Temporal;
|
||||
import java.time.temporal.TemporalAccessor;
|
||||
import java.time.temporal.TemporalAmount;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -4224,6 +4225,206 @@ public interface HibernateCriteriaBuilder extends CriteriaBuilder {
|
|||
@Incubating
|
||||
<E> JpaSetReturningFunction<E> unnestCollection(Expression<? extends Collection<E>> collection);
|
||||
|
||||
/**
|
||||
* Creates a {@code generate_series} function expression to generate a set of values as rows.
|
||||
*
|
||||
* @since 7.0
|
||||
* @see JpaSelectCriteria#from(JpaSetReturningFunction)
|
||||
* @see JpaFrom#join(JpaSetReturningFunction)
|
||||
*/
|
||||
@Incubating
|
||||
<E extends Number> JpaSetReturningFunction<E> generateSeries(E start, E stop);
|
||||
|
||||
/**
|
||||
* Creates a {@code generate_series} function expression to generate a set of values as rows.
|
||||
*
|
||||
* @since 7.0
|
||||
* @see JpaSelectCriteria#from(JpaSetReturningFunction)
|
||||
* @see JpaFrom#join(JpaSetReturningFunction)
|
||||
*/
|
||||
@Incubating
|
||||
<E extends Number> JpaSetReturningFunction<E> generateSeries(E start, Expression<E> stop);
|
||||
|
||||
/**
|
||||
* Creates a {@code generate_series} function expression to generate a set of values as rows.
|
||||
*
|
||||
* @since 7.0
|
||||
* @see JpaSelectCriteria#from(JpaSetReturningFunction)
|
||||
* @see JpaFrom#join(JpaSetReturningFunction)
|
||||
*/
|
||||
@Incubating
|
||||
<E extends Number> JpaSetReturningFunction<E> generateSeries(Expression<E> start, E stop);
|
||||
|
||||
/**
|
||||
* Creates a {@code generate_series} function expression to generate a set of values as rows.
|
||||
*
|
||||
* @since 7.0
|
||||
* @see JpaSelectCriteria#from(JpaSetReturningFunction)
|
||||
* @see JpaFrom#join(JpaSetReturningFunction)
|
||||
*/
|
||||
@Incubating
|
||||
<E extends Number> JpaSetReturningFunction<E> generateSeries(Expression<E> start, Expression<E> stop);
|
||||
|
||||
/**
|
||||
* Creates a {@code generate_series} function expression to generate a set of values as rows.
|
||||
*
|
||||
* @since 7.0
|
||||
* @see JpaSelectCriteria#from(JpaSetReturningFunction)
|
||||
* @see JpaFrom#join(JpaSetReturningFunction)
|
||||
*/
|
||||
@Incubating
|
||||
<E extends Number> JpaSetReturningFunction<E> generateSeries(E start, Expression<E> stop, Expression<E> step);
|
||||
|
||||
/**
|
||||
* Creates a {@code generate_series} function expression to generate a set of values as rows.
|
||||
*
|
||||
* @since 7.0
|
||||
* @see JpaSelectCriteria#from(JpaSetReturningFunction)
|
||||
* @see JpaFrom#join(JpaSetReturningFunction)
|
||||
*/
|
||||
@Incubating
|
||||
<E extends Number> JpaSetReturningFunction<E> generateSeries(Expression<E> start, E stop, Expression<E> step);
|
||||
|
||||
/**
|
||||
* Creates a {@code generate_series} function expression to generate a set of values as rows.
|
||||
*
|
||||
* @since 7.0
|
||||
* @see JpaSelectCriteria#from(JpaSetReturningFunction)
|
||||
* @see JpaFrom#join(JpaSetReturningFunction)
|
||||
*/
|
||||
@Incubating
|
||||
<E extends Number> JpaSetReturningFunction<E> generateSeries(Expression<E> start, Expression<E> stop, E step);
|
||||
|
||||
/**
|
||||
* Creates a {@code generate_series} function expression to generate a set of values as rows.
|
||||
*
|
||||
* @since 7.0
|
||||
* @see JpaSelectCriteria#from(JpaSetReturningFunction)
|
||||
* @see JpaFrom#join(JpaSetReturningFunction)
|
||||
*/
|
||||
@Incubating
|
||||
<E extends Number> JpaSetReturningFunction<E> generateSeries(E start, Expression<E> stop, E step);
|
||||
|
||||
/**
|
||||
* Creates a {@code generate_series} function expression to generate a set of values as rows.
|
||||
*
|
||||
* @since 7.0
|
||||
* @see JpaSelectCriteria#from(JpaSetReturningFunction)
|
||||
* @see JpaFrom#join(JpaSetReturningFunction)
|
||||
*/
|
||||
@Incubating
|
||||
<E extends Number> JpaSetReturningFunction<E> generateSeries(Expression<E> start, E stop, E step);
|
||||
|
||||
/**
|
||||
* Creates a {@code generate_series} function expression to generate a set of values as rows.
|
||||
*
|
||||
* @since 7.0
|
||||
* @see JpaSelectCriteria#from(JpaSetReturningFunction)
|
||||
* @see JpaFrom#join(JpaSetReturningFunction)
|
||||
*/
|
||||
@Incubating
|
||||
<E extends Number> JpaSetReturningFunction<E> generateSeries(E start, E stop, Expression<E> step);
|
||||
|
||||
/**
|
||||
* Creates a {@code generate_series} function expression to generate a set of values as rows.
|
||||
*
|
||||
* @since 7.0
|
||||
* @see JpaSelectCriteria#from(JpaSetReturningFunction)
|
||||
* @see JpaFrom#join(JpaSetReturningFunction)
|
||||
*/
|
||||
@Incubating
|
||||
<E extends Number> JpaSetReturningFunction<E> generateSeries(E start, E stop, E step);
|
||||
|
||||
/**
|
||||
* Creates a {@code generate_series} function expression to generate a set of values as rows.
|
||||
*
|
||||
* @since 7.0
|
||||
* @see JpaSelectCriteria#from(JpaSetReturningFunction)
|
||||
* @see JpaFrom#join(JpaSetReturningFunction)
|
||||
*/
|
||||
@Incubating
|
||||
<E extends Number> JpaSetReturningFunction<E> generateSeries(Expression<E> start, Expression<E> stop, Expression<E> step);
|
||||
|
||||
/**
|
||||
* Creates a {@code generate_series} function expression to generate a set of values as rows.
|
||||
*
|
||||
* @since 7.0
|
||||
* @see JpaSelectCriteria#from(JpaSetReturningFunction)
|
||||
* @see JpaFrom#join(JpaSetReturningFunction)
|
||||
*/
|
||||
@Incubating
|
||||
<E extends Temporal> JpaSetReturningFunction<E> generateTimeSeries(E start, Expression<E> stop, Expression<? extends TemporalAmount> step);
|
||||
|
||||
/**
|
||||
* Creates a {@code generate_series} function expression to generate a set of values as rows.
|
||||
*
|
||||
* @since 7.0
|
||||
* @see JpaSelectCriteria#from(JpaSetReturningFunction)
|
||||
* @see JpaFrom#join(JpaSetReturningFunction)
|
||||
*/
|
||||
@Incubating
|
||||
<E extends Temporal> JpaSetReturningFunction<E> generateTimeSeries(Expression<E> start, E stop, Expression<? extends TemporalAmount> step);
|
||||
|
||||
/**
|
||||
* Creates a {@code generate_series} function expression to generate a set of values as rows.
|
||||
*
|
||||
* @since 7.0
|
||||
* @see JpaSelectCriteria#from(JpaSetReturningFunction)
|
||||
* @see JpaFrom#join(JpaSetReturningFunction)
|
||||
*/
|
||||
@Incubating
|
||||
<E extends Temporal> JpaSetReturningFunction<E> generateTimeSeries(E start, E stop, Expression<? extends TemporalAmount> step);
|
||||
|
||||
/**
|
||||
* Creates a {@code generate_series} function expression to generate a set of values as rows.
|
||||
*
|
||||
* @since 7.0
|
||||
* @see JpaSelectCriteria#from(JpaSetReturningFunction)
|
||||
* @see JpaFrom#join(JpaSetReturningFunction)
|
||||
*/
|
||||
@Incubating
|
||||
<E extends Temporal> JpaSetReturningFunction<E> generateTimeSeries(Expression<E> start, Expression<E> stop, TemporalAmount step);
|
||||
|
||||
/**
|
||||
* Creates a {@code generate_series} function expression to generate a set of values as rows.
|
||||
*
|
||||
* @since 7.0
|
||||
* @see JpaSelectCriteria#from(JpaSetReturningFunction)
|
||||
* @see JpaFrom#join(JpaSetReturningFunction)
|
||||
*/
|
||||
@Incubating
|
||||
<E extends Temporal> JpaSetReturningFunction<E> generateTimeSeries(Expression<E> start, E stop, TemporalAmount step);
|
||||
|
||||
/**
|
||||
* Creates a {@code generate_series} function expression to generate a set of values as rows.
|
||||
*
|
||||
* @since 7.0
|
||||
* @see JpaSelectCriteria#from(JpaSetReturningFunction)
|
||||
* @see JpaFrom#join(JpaSetReturningFunction)
|
||||
*/
|
||||
@Incubating
|
||||
<E extends Temporal> JpaSetReturningFunction<E> generateTimeSeries(E start, Expression<E> stop, TemporalAmount step);
|
||||
|
||||
/**
|
||||
* Creates a {@code generate_series} function expression to generate a set of values as rows.
|
||||
*
|
||||
* @since 7.0
|
||||
* @see JpaSelectCriteria#from(JpaSetReturningFunction)
|
||||
* @see JpaFrom#join(JpaSetReturningFunction)
|
||||
*/
|
||||
@Incubating
|
||||
<E extends Temporal> JpaSetReturningFunction<E> generateTimeSeries(E start, E stop, TemporalAmount step);
|
||||
|
||||
/**
|
||||
* Creates a {@code generate_series} function expression to generate a set of values as rows.
|
||||
*
|
||||
* @since 7.0
|
||||
* @see JpaSelectCriteria#from(JpaSetReturningFunction)
|
||||
* @see JpaFrom#join(JpaSetReturningFunction)
|
||||
*/
|
||||
@Incubating
|
||||
<E extends Temporal> JpaSetReturningFunction<E> generateTimeSeries(Expression<E> start, Expression<E> stop, Expression<? extends TemporalAmount> step);
|
||||
|
||||
@Override
|
||||
JpaPredicate and(List<Predicate> restrictions);
|
||||
|
||||
|
|
|
@ -16,6 +16,7 @@ import java.time.LocalDateTime;
|
|||
import java.time.LocalTime;
|
||||
import java.time.temporal.Temporal;
|
||||
import java.time.temporal.TemporalAccessor;
|
||||
import java.time.temporal.TemporalAmount;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -3753,4 +3754,124 @@ public class HibernateCriteriaBuilderDelegate implements HibernateCriteriaBuilde
|
|||
public <E> JpaSetReturningFunction<E> unnestCollection(Expression<? extends Collection<E>> collection) {
|
||||
return criteriaBuilder.unnestCollection( collection );
|
||||
}
|
||||
|
||||
@Incubating
|
||||
@Override
|
||||
public <E extends Number> JpaSetReturningFunction<E> generateSeries(E start, E stop) {
|
||||
return criteriaBuilder.generateSeries( start, stop );
|
||||
}
|
||||
|
||||
@Incubating
|
||||
@Override
|
||||
public <E extends Number> JpaSetReturningFunction<E> generateSeries(E start, Expression<E> stop) {
|
||||
return criteriaBuilder.generateSeries( start, stop );
|
||||
}
|
||||
|
||||
@Incubating
|
||||
@Override
|
||||
public <E extends Number> JpaSetReturningFunction<E> generateSeries(Expression<E> start, E stop) {
|
||||
return criteriaBuilder.generateSeries( start, stop );
|
||||
}
|
||||
|
||||
@Incubating
|
||||
@Override
|
||||
public <E extends Number> JpaSetReturningFunction<E> generateSeries(Expression<E> start, Expression<E> stop) {
|
||||
return criteriaBuilder.generateSeries( start, stop );
|
||||
}
|
||||
|
||||
@Incubating
|
||||
@Override
|
||||
public <E extends Number> JpaSetReturningFunction<E> generateSeries(E start, Expression<E> stop, Expression<E> step) {
|
||||
return criteriaBuilder.generateSeries( start, stop, step );
|
||||
}
|
||||
|
||||
@Incubating
|
||||
@Override
|
||||
public <E extends Number> JpaSetReturningFunction<E> generateSeries(Expression<E> start, E stop, Expression<E> step) {
|
||||
return criteriaBuilder.generateSeries( start, stop, step );
|
||||
}
|
||||
|
||||
@Incubating
|
||||
@Override
|
||||
public <E extends Number> JpaSetReturningFunction<E> generateSeries(Expression<E> start, Expression<E> stop, E step) {
|
||||
return criteriaBuilder.generateSeries( start, stop, step );
|
||||
}
|
||||
|
||||
@Incubating
|
||||
@Override
|
||||
public <E extends Number> JpaSetReturningFunction<E> generateSeries(E start, Expression<E> stop, E step) {
|
||||
return criteriaBuilder.generateSeries( start, stop, step );
|
||||
}
|
||||
|
||||
@Incubating
|
||||
@Override
|
||||
public <E extends Number> JpaSetReturningFunction<E> generateSeries(Expression<E> start, E stop, E step) {
|
||||
return criteriaBuilder.generateSeries( start, stop, step );
|
||||
}
|
||||
|
||||
@Incubating
|
||||
@Override
|
||||
public <E extends Number> JpaSetReturningFunction<E> generateSeries(E start, E stop, Expression<E> step) {
|
||||
return criteriaBuilder.generateSeries( start, stop, step );
|
||||
}
|
||||
|
||||
@Incubating
|
||||
@Override
|
||||
public <E extends Number> JpaSetReturningFunction<E> generateSeries(E start, E stop, E step) {
|
||||
return criteriaBuilder.generateSeries( start, stop, step );
|
||||
}
|
||||
|
||||
@Incubating
|
||||
@Override
|
||||
public <E extends Number> JpaSetReturningFunction<E> generateSeries(Expression<E> start, Expression<E> stop, Expression<E> step) {
|
||||
return criteriaBuilder.generateSeries( start, stop, step );
|
||||
}
|
||||
|
||||
@Incubating
|
||||
@Override
|
||||
public <E extends Temporal> JpaSetReturningFunction<E> generateTimeSeries(E start, Expression<E> stop, Expression<? extends TemporalAmount> step) {
|
||||
return criteriaBuilder.generateTimeSeries( start, stop, step );
|
||||
}
|
||||
|
||||
@Incubating
|
||||
@Override
|
||||
public <E extends Temporal> JpaSetReturningFunction<E> generateTimeSeries(Expression<E> start, E stop, Expression<? extends TemporalAmount> step) {
|
||||
return criteriaBuilder.generateTimeSeries( start, stop, step );
|
||||
}
|
||||
|
||||
@Incubating
|
||||
@Override
|
||||
public <E extends Temporal> JpaSetReturningFunction<E> generateTimeSeries(E start, E stop, Expression<? extends TemporalAmount> step) {
|
||||
return criteriaBuilder.generateTimeSeries( start, stop, step );
|
||||
}
|
||||
|
||||
@Incubating
|
||||
@Override
|
||||
public <E extends Temporal> JpaSetReturningFunction<E> generateTimeSeries(Expression<E> start, Expression<E> stop, TemporalAmount step) {
|
||||
return criteriaBuilder.generateTimeSeries( start, stop, step );
|
||||
}
|
||||
|
||||
@Incubating
|
||||
@Override
|
||||
public <E extends Temporal> JpaSetReturningFunction<E> generateTimeSeries(Expression<E> start, E stop, TemporalAmount step) {
|
||||
return criteriaBuilder.generateTimeSeries( start, stop, step );
|
||||
}
|
||||
|
||||
@Incubating
|
||||
@Override
|
||||
public <E extends Temporal> JpaSetReturningFunction<E> generateTimeSeries(E start, Expression<E> stop, TemporalAmount step) {
|
||||
return criteriaBuilder.generateTimeSeries( start, stop, step );
|
||||
}
|
||||
|
||||
@Incubating
|
||||
@Override
|
||||
public <E extends Temporal> JpaSetReturningFunction<E> generateTimeSeries(E start, E stop, TemporalAmount step) {
|
||||
return criteriaBuilder.generateTimeSeries( start, stop, step );
|
||||
}
|
||||
|
||||
@Incubating
|
||||
@Override
|
||||
public <E extends Temporal> JpaSetReturningFunction<E> generateTimeSeries(Expression<E> start, Expression<E> stop, Expression<? extends TemporalAmount> step) {
|
||||
return criteriaBuilder.generateTimeSeries( start, stop, step );
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,6 +12,7 @@ import java.util.Map;
|
|||
import org.hibernate.Incubating;
|
||||
import org.hibernate.internal.util.collections.CollectionHelper;
|
||||
import org.hibernate.metamodel.UnsupportedMappingException;
|
||||
import org.hibernate.metamodel.mapping.CollectionPart;
|
||||
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
|
||||
import org.hibernate.metamodel.mapping.SqlTypedMapping;
|
||||
import org.hibernate.metamodel.mapping.internal.SqlTypedMappingImpl;
|
||||
|
@ -43,7 +44,7 @@ import org.checkerframework.checker.nullness.qual.Nullable;
|
|||
@Incubating
|
||||
public class AnonymousTupleType<T> implements TupleType<T>, DomainType<T>, ReturnableType<T>, SqmPathSource<T> {
|
||||
|
||||
private final ObjectArrayJavaType javaTypeDescriptor;
|
||||
private final JavaType<T> javaTypeDescriptor;
|
||||
private final @Nullable NavigablePath[] componentSourcePaths;
|
||||
private final SqmExpressible<?>[] expressibles;
|
||||
private final String[] componentNames;
|
||||
|
@ -65,7 +66,8 @@ public class AnonymousTupleType<T> implements TupleType<T>, DomainType<T>, Retur
|
|||
this.expressibles = expressibles;
|
||||
this.componentSourcePaths = componentSourcePaths;
|
||||
this.componentNames = new String[components.length];
|
||||
this.javaTypeDescriptor = new ObjectArrayJavaType( getTypeDescriptors( components ) );
|
||||
//noinspection unchecked
|
||||
this.javaTypeDescriptor = (JavaType<T>) new ObjectArrayJavaType( getTypeDescriptors( components ) );
|
||||
final Map<String, Integer> map = CollectionHelper.linkedMapOfSize( components.length );
|
||||
for ( int i = 0; i < components.length; i++ ) {
|
||||
final SqmSelectableNode<?> component = components[i];
|
||||
|
@ -84,11 +86,23 @@ public class AnonymousTupleType<T> implements TupleType<T>, DomainType<T>, Retur
|
|||
this.componentSourcePaths = new NavigablePath[componentNames.length];
|
||||
this.expressibles = expressibles;
|
||||
this.componentNames = componentNames;
|
||||
this.javaTypeDescriptor = new ObjectArrayJavaType( getTypeDescriptors( expressibles ) );
|
||||
final Map<String, Integer> map = CollectionHelper.linkedMapOfSize( expressibles.length );
|
||||
int elementIndex = -1;
|
||||
for ( int i = 0; i < componentNames.length; i++ ) {
|
||||
if ( CollectionPart.Nature.ELEMENT.getName().equals( componentNames[i] ) ) {
|
||||
elementIndex = i;
|
||||
}
|
||||
map.put( componentNames[i], i );
|
||||
}
|
||||
// The expressible java type of this tuple type must be equal to the element type if it exists
|
||||
if ( elementIndex == -1 ) {
|
||||
//noinspection unchecked
|
||||
this.javaTypeDescriptor = (JavaType<T>) new ObjectArrayJavaType( getTypeDescriptors( expressibles ) );
|
||||
}
|
||||
else {
|
||||
//noinspection unchecked
|
||||
this.javaTypeDescriptor = (JavaType<T>) expressibles[elementIndex].getExpressibleJavaType();
|
||||
}
|
||||
this.componentIndexMap = map;
|
||||
}
|
||||
|
||||
|
|
|
@ -10,6 +10,8 @@ import java.sql.Date;
|
|||
import java.sql.Time;
|
||||
import java.sql.Timestamp;
|
||||
import java.time.Instant;
|
||||
import java.time.temporal.Temporal;
|
||||
import java.time.temporal.TemporalAmount;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -811,6 +813,66 @@ public interface NodeBuilder extends HibernateCriteriaBuilder, BindingContext {
|
|||
@Override
|
||||
<E> SqmSetReturningFunction<E> unnestCollection(Expression<? extends Collection<E>> collection);
|
||||
|
||||
@Override
|
||||
<E extends Temporal> SqmSetReturningFunction<E> generateTimeSeries(Expression<E> start, Expression<E> stop, Expression<? extends TemporalAmount> step);
|
||||
|
||||
@Override
|
||||
<E extends Temporal> SqmSetReturningFunction<E> generateTimeSeries(E start, E stop, TemporalAmount step);
|
||||
|
||||
@Override
|
||||
<E extends Temporal> SqmSetReturningFunction<E> generateTimeSeries(E start, Expression<E> stop, TemporalAmount step);
|
||||
|
||||
@Override
|
||||
<E extends Temporal> SqmSetReturningFunction<E> generateTimeSeries(Expression<E> start, E stop, TemporalAmount step);
|
||||
|
||||
@Override
|
||||
<E extends Temporal> SqmSetReturningFunction<E> generateTimeSeries(Expression<E> start, Expression<E> stop, TemporalAmount step);
|
||||
|
||||
@Override
|
||||
<E extends Temporal> SqmSetReturningFunction<E> generateTimeSeries(E start, E stop, Expression<? extends TemporalAmount> step);
|
||||
|
||||
@Override
|
||||
<E extends Temporal> SqmSetReturningFunction<E> generateTimeSeries(Expression<E> start, E stop, Expression<? extends TemporalAmount> step);
|
||||
|
||||
@Override
|
||||
<E extends Temporal> SqmSetReturningFunction<E> generateTimeSeries(E start, Expression<E> stop, Expression<? extends TemporalAmount> step);
|
||||
|
||||
@Override
|
||||
<E extends Number> SqmSetReturningFunction<E> generateSeries(Expression<E> start, Expression<E> stop, Expression<E> step);
|
||||
|
||||
@Override
|
||||
<E extends Number> SqmSetReturningFunction<E> generateSeries(E start, E stop, E step);
|
||||
|
||||
@Override
|
||||
<E extends Number> SqmSetReturningFunction<E> generateSeries(E start, E stop, Expression<E> step);
|
||||
|
||||
@Override
|
||||
<E extends Number> SqmSetReturningFunction<E> generateSeries(Expression<E> start, E stop, E step);
|
||||
|
||||
@Override
|
||||
<E extends Number> SqmSetReturningFunction<E> generateSeries(E start, Expression<E> stop, E step);
|
||||
|
||||
@Override
|
||||
<E extends Number> SqmSetReturningFunction<E> generateSeries(Expression<E> start, Expression<E> stop, E step);
|
||||
|
||||
@Override
|
||||
<E extends Number> SqmSetReturningFunction<E> generateSeries(Expression<E> start, E stop, Expression<E> step);
|
||||
|
||||
@Override
|
||||
<E extends Number> SqmSetReturningFunction<E> generateSeries(E start, Expression<E> stop, Expression<E> step);
|
||||
|
||||
@Override
|
||||
<E extends Number> SqmSetReturningFunction<E> generateSeries(Expression<E> start, Expression<E> stop);
|
||||
|
||||
@Override
|
||||
<E extends Number> SqmSetReturningFunction<E> generateSeries(Expression<E> start, E stop);
|
||||
|
||||
@Override
|
||||
<E extends Number> SqmSetReturningFunction<E> generateSeries(E start, Expression<E> stop);
|
||||
|
||||
@Override
|
||||
<E extends Number> SqmSetReturningFunction<E> generateSeries(E start, E stop);
|
||||
|
||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
// Covariant overrides
|
||||
|
||||
|
|
|
@ -6,6 +6,7 @@ package org.hibernate.query.sqm.function;
|
|||
|
||||
import java.util.List;
|
||||
|
||||
import org.checkerframework.checker.nullness.qual.Nullable;
|
||||
import org.hibernate.query.ReturnableType;
|
||||
import org.hibernate.query.spi.QueryEngine;
|
||||
import org.hibernate.query.sqm.produce.function.ArgumentsValidator;
|
||||
|
@ -33,22 +34,22 @@ public abstract class AbstractSqmFunctionDescriptor implements SqmFunctionDescri
|
|||
|
||||
public AbstractSqmFunctionDescriptor(
|
||||
String name,
|
||||
ArgumentsValidator argumentsValidator) {
|
||||
@Nullable ArgumentsValidator argumentsValidator) {
|
||||
this( name, argumentsValidator, null, null );
|
||||
}
|
||||
|
||||
public AbstractSqmFunctionDescriptor(
|
||||
String name,
|
||||
ArgumentsValidator argumentsValidator,
|
||||
FunctionArgumentTypeResolver argumentTypeResolver) {
|
||||
@Nullable ArgumentsValidator argumentsValidator,
|
||||
@Nullable FunctionArgumentTypeResolver argumentTypeResolver) {
|
||||
this( name, argumentsValidator, null, argumentTypeResolver );
|
||||
}
|
||||
|
||||
public AbstractSqmFunctionDescriptor(
|
||||
String name,
|
||||
ArgumentsValidator argumentsValidator,
|
||||
FunctionReturnTypeResolver returnTypeResolver,
|
||||
FunctionArgumentTypeResolver argumentTypeResolver) {
|
||||
@Nullable ArgumentsValidator argumentsValidator,
|
||||
@Nullable FunctionReturnTypeResolver returnTypeResolver,
|
||||
@Nullable FunctionArgumentTypeResolver argumentTypeResolver) {
|
||||
this.name = name;
|
||||
this.argumentsValidator = argumentsValidator == null
|
||||
? StandardArgumentsValidators.NONE
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
*/
|
||||
package org.hibernate.query.sqm.function;
|
||||
|
||||
import org.checkerframework.checker.nullness.qual.Nullable;
|
||||
import org.hibernate.query.ReturnableType;
|
||||
import org.hibernate.query.spi.QueryEngine;
|
||||
import org.hibernate.query.sqm.produce.function.ArgumentsValidator;
|
||||
|
@ -28,9 +29,9 @@ public abstract class AbstractSqmSelfRenderingFunctionDescriptor
|
|||
|
||||
public AbstractSqmSelfRenderingFunctionDescriptor(
|
||||
String name,
|
||||
ArgumentsValidator argumentsValidator,
|
||||
FunctionReturnTypeResolver returnTypeResolver,
|
||||
FunctionArgumentTypeResolver argumentTypeResolver) {
|
||||
@Nullable ArgumentsValidator argumentsValidator,
|
||||
@Nullable FunctionReturnTypeResolver returnTypeResolver,
|
||||
@Nullable FunctionArgumentTypeResolver argumentTypeResolver) {
|
||||
super( name, argumentsValidator, returnTypeResolver, argumentTypeResolver );
|
||||
this.functionKind = FunctionKind.NORMAL;
|
||||
}
|
||||
|
@ -38,9 +39,9 @@ public abstract class AbstractSqmSelfRenderingFunctionDescriptor
|
|||
public AbstractSqmSelfRenderingFunctionDescriptor(
|
||||
String name,
|
||||
FunctionKind functionKind,
|
||||
ArgumentsValidator argumentsValidator,
|
||||
FunctionReturnTypeResolver returnTypeResolver,
|
||||
FunctionArgumentTypeResolver argumentTypeResolver) {
|
||||
@Nullable ArgumentsValidator argumentsValidator,
|
||||
@Nullable FunctionReturnTypeResolver returnTypeResolver,
|
||||
@Nullable FunctionArgumentTypeResolver argumentTypeResolver) {
|
||||
super( name, argumentsValidator, returnTypeResolver, argumentTypeResolver );
|
||||
this.functionKind = functionKind;
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
*/
|
||||
package org.hibernate.query.sqm.function;
|
||||
|
||||
import org.checkerframework.checker.nullness.qual.Nullable;
|
||||
import org.hibernate.query.ReturnableType;
|
||||
import org.hibernate.query.sqm.produce.function.ArgumentsValidator;
|
||||
import org.hibernate.query.sqm.produce.function.FunctionArgumentTypeResolver;
|
||||
|
@ -41,8 +42,8 @@ public class NamedSqmFunctionDescriptor
|
|||
public NamedSqmFunctionDescriptor(
|
||||
String functionName,
|
||||
boolean useParenthesesWhenNoArgs,
|
||||
ArgumentsValidator argumentsValidator,
|
||||
FunctionReturnTypeResolver returnTypeResolver) {
|
||||
@Nullable ArgumentsValidator argumentsValidator,
|
||||
@Nullable FunctionReturnTypeResolver returnTypeResolver) {
|
||||
this(
|
||||
functionName,
|
||||
useParenthesesWhenNoArgs,
|
||||
|
@ -59,9 +60,9 @@ public class NamedSqmFunctionDescriptor
|
|||
public NamedSqmFunctionDescriptor(
|
||||
String functionName,
|
||||
boolean useParenthesesWhenNoArgs,
|
||||
ArgumentsValidator argumentsValidator,
|
||||
FunctionReturnTypeResolver returnTypeResolver,
|
||||
FunctionArgumentTypeResolver argumentTypeResolver) {
|
||||
@Nullable ArgumentsValidator argumentsValidator,
|
||||
@Nullable FunctionReturnTypeResolver returnTypeResolver,
|
||||
@Nullable FunctionArgumentTypeResolver argumentTypeResolver) {
|
||||
this(
|
||||
functionName,
|
||||
useParenthesesWhenNoArgs,
|
||||
|
@ -78,9 +79,9 @@ public class NamedSqmFunctionDescriptor
|
|||
public NamedSqmFunctionDescriptor(
|
||||
String functionName,
|
||||
boolean useParenthesesWhenNoArgs,
|
||||
ArgumentsValidator argumentsValidator,
|
||||
FunctionReturnTypeResolver returnTypeResolver,
|
||||
FunctionArgumentTypeResolver argumentTypeResolver,
|
||||
@Nullable ArgumentsValidator argumentsValidator,
|
||||
@Nullable FunctionReturnTypeResolver returnTypeResolver,
|
||||
@Nullable FunctionArgumentTypeResolver argumentTypeResolver,
|
||||
String name,
|
||||
FunctionKind functionKind,
|
||||
String argumentListSignature,
|
||||
|
|
|
@ -238,7 +238,7 @@ public class SelfRenderingSqmFunction<T> extends SqmFunction<T> {
|
|||
|
||||
@Override
|
||||
public MappingModelExpressible<?> get() {
|
||||
return argumentTypeResolver.resolveFunctionArgumentType( function, argumentIndex, converter );
|
||||
return argumentTypeResolver.resolveFunctionArgumentType( function.getArguments(), argumentIndex, converter );
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -6,15 +6,18 @@ package org.hibernate.query.sqm.function;
|
|||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import org.hibernate.Incubating;
|
||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
|
||||
import org.hibernate.metamodel.mapping.MappingModelExpressible;
|
||||
import org.hibernate.metamodel.mapping.SelectableMapping;
|
||||
import org.hibernate.query.derived.AnonymousTupleTableGroupProducer;
|
||||
import org.hibernate.query.derived.AnonymousTupleType;
|
||||
import org.hibernate.query.sqm.NodeBuilder;
|
||||
import org.hibernate.query.sqm.produce.function.ArgumentsValidator;
|
||||
import org.hibernate.query.sqm.produce.function.FunctionArgumentTypeResolver;
|
||||
import org.hibernate.query.sqm.produce.function.SetReturningFunctionTypeResolver;
|
||||
import org.hibernate.query.sqm.sql.SqmToSqlAstConverter;
|
||||
import org.hibernate.query.sqm.tree.SqmCopyContext;
|
||||
|
@ -98,13 +101,59 @@ public class SelfRenderingSqmSetReturningFunction<T> extends SqmSetReturningFunc
|
|||
if ( sqmArguments.isEmpty() ) {
|
||||
return emptyList();
|
||||
}
|
||||
final ArrayList<SqlAstNode> sqlAstArguments = new ArrayList<>( sqmArguments.size() );
|
||||
for ( int i = 0; i < sqmArguments.size(); i++ ) {
|
||||
sqlAstArguments.add(
|
||||
(SqlAstNode) sqmArguments.get( i ).accept( walker )
|
||||
);
|
||||
final FunctionArgumentTypeResolver argumentTypeResolver;
|
||||
if ( getFunctionDescriptor() instanceof AbstractSqmSetReturningFunctionDescriptor ) {
|
||||
argumentTypeResolver = ( (AbstractSqmSetReturningFunctionDescriptor) getFunctionDescriptor() ).getArgumentTypeResolver();
|
||||
}
|
||||
else {
|
||||
argumentTypeResolver = null;
|
||||
}
|
||||
if ( argumentTypeResolver == null ) {
|
||||
final ArrayList<SqlAstNode> sqlAstArguments = new ArrayList<>( sqmArguments.size() );
|
||||
for ( int i = 0; i < sqmArguments.size(); i++ ) {
|
||||
sqlAstArguments.add(
|
||||
(SqlAstNode) sqmArguments.get( i ).accept( walker )
|
||||
);
|
||||
}
|
||||
return sqlAstArguments;
|
||||
}
|
||||
else {
|
||||
final FunctionArgumentTypeResolverTypeAccess typeAccess = new FunctionArgumentTypeResolverTypeAccess(
|
||||
walker,
|
||||
this,
|
||||
argumentTypeResolver
|
||||
);
|
||||
final ArrayList<SqlAstNode> sqlAstArguments = new ArrayList<>( sqmArguments.size() );
|
||||
for ( int i = 0; i < sqmArguments.size(); i++ ) {
|
||||
typeAccess.argumentIndex = i;
|
||||
sqlAstArguments.add(
|
||||
(SqlAstNode) walker.visitWithInferredType( sqmArguments.get( i ), typeAccess )
|
||||
);
|
||||
}
|
||||
return sqlAstArguments;
|
||||
}
|
||||
}
|
||||
|
||||
private static class FunctionArgumentTypeResolverTypeAccess implements Supplier<MappingModelExpressible<?>> {
|
||||
|
||||
private final SqmToSqlAstConverter converter;
|
||||
private final SqmSetReturningFunction<?> function;
|
||||
private final FunctionArgumentTypeResolver argumentTypeResolver;
|
||||
private int argumentIndex;
|
||||
|
||||
public FunctionArgumentTypeResolverTypeAccess(
|
||||
SqmToSqlAstConverter converter,
|
||||
SqmSetReturningFunction<?> function,
|
||||
FunctionArgumentTypeResolver argumentTypeResolver) {
|
||||
this.converter = converter;
|
||||
this.function = function;
|
||||
this.argumentTypeResolver = argumentTypeResolver;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MappingModelExpressible<?> get() {
|
||||
return argumentTypeResolver.resolveFunctionArgumentType( function.getArguments(), argumentIndex, converter );
|
||||
}
|
||||
return sqlAstArguments;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -123,8 +172,9 @@ public class SelfRenderingSqmSetReturningFunction<T> extends SqmSetReturningFunc
|
|||
final SelectableMapping[] selectableMappings = getSetReturningTypeResolver().resolveFunctionReturnType(
|
||||
arguments,
|
||||
identifierVariable,
|
||||
lateral,
|
||||
withOrdinality,
|
||||
walker.getCreationContext().getTypeConfiguration()
|
||||
walker
|
||||
);
|
||||
final AnonymousTupleTableGroupProducer tableGroupProducer = getType().resolveTableGroupProducer(
|
||||
identifierVariable,
|
||||
|
|
|
@ -19,6 +19,7 @@ import java.time.LocalDateTime;
|
|||
import java.time.LocalTime;
|
||||
import java.time.temporal.Temporal;
|
||||
import java.time.temporal.TemporalAccessor;
|
||||
import java.time.temporal.TemporalAmount;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
|
@ -5842,4 +5843,113 @@ public class SqmCriteriaNodeBuilder implements NodeBuilder, Serializable {
|
|||
queryEngine
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <E extends Temporal> SqmSetReturningFunction<E> generateTimeSeries(Expression<E> start, Expression<E> stop, Expression<? extends TemporalAmount> step) {
|
||||
return getSetReturningFunctionDescriptor( "generate_series" ).generateSqmExpression(
|
||||
asList( (SqmTypedNode<?>) start, (SqmTypedNode<?>) stop, (SqmTypedNode<?>) step ),
|
||||
queryEngine
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <E extends Temporal> SqmSetReturningFunction<E> generateTimeSeries(E start, E stop, TemporalAmount step) {
|
||||
return generateTimeSeries( value( start ), value( stop ), value( step ) );
|
||||
}
|
||||
|
||||
@Override
|
||||
public <E extends Temporal> SqmSetReturningFunction<E> generateTimeSeries(E start, Expression<E> stop, TemporalAmount step) {
|
||||
return generateTimeSeries( value( start ), stop, value( step ) );
|
||||
}
|
||||
|
||||
@Override
|
||||
public <E extends Temporal> SqmSetReturningFunction<E> generateTimeSeries(Expression<E> start, E stop, TemporalAmount step) {
|
||||
return generateTimeSeries( start, value( stop ), value( step ) );
|
||||
}
|
||||
|
||||
@Override
|
||||
public <E extends Temporal> SqmSetReturningFunction<E> generateTimeSeries(Expression<E> start, Expression<E> stop, TemporalAmount step) {
|
||||
return generateTimeSeries( start, stop, value( step ) );
|
||||
}
|
||||
|
||||
@Override
|
||||
public <E extends Temporal> SqmSetReturningFunction<E> generateTimeSeries(E start, E stop, Expression<? extends TemporalAmount> step) {
|
||||
return generateTimeSeries( value( start ), value( stop ), step );
|
||||
}
|
||||
|
||||
@Override
|
||||
public <E extends Temporal> SqmSetReturningFunction<E> generateTimeSeries(Expression<E> start, E stop, Expression<? extends TemporalAmount> step) {
|
||||
return generateTimeSeries( start, value( stop ), step );
|
||||
}
|
||||
|
||||
@Override
|
||||
public <E extends Temporal> SqmSetReturningFunction<E> generateTimeSeries(E start, Expression<E> stop, Expression<? extends TemporalAmount> step) {
|
||||
return generateTimeSeries( value( start ), stop, step );
|
||||
}
|
||||
|
||||
@Override
|
||||
public <E extends Number> SqmSetReturningFunction<E> generateSeries(Expression<E> start, Expression<E> stop, Expression<E> step) {
|
||||
return getSetReturningFunctionDescriptor( "generate_series" ).generateSqmExpression(
|
||||
asList( (SqmTypedNode<?>) start, (SqmTypedNode<?>) stop, (SqmTypedNode<?>) step ),
|
||||
queryEngine
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <E extends Number> SqmSetReturningFunction<E> generateSeries(E start, E stop, E step) {
|
||||
return generateSeries( value( start ), value( stop ), value( step ) );
|
||||
}
|
||||
|
||||
@Override
|
||||
public <E extends Number> SqmSetReturningFunction<E> generateSeries(E start, E stop, Expression<E> step) {
|
||||
return generateSeries( value( start ), value( stop ), step );
|
||||
}
|
||||
|
||||
@Override
|
||||
public <E extends Number> SqmSetReturningFunction<E> generateSeries(Expression<E> start, E stop, E step) {
|
||||
return generateSeries( start, value( stop ), value( step ) );
|
||||
}
|
||||
|
||||
@Override
|
||||
public <E extends Number> SqmSetReturningFunction<E> generateSeries(E start, Expression<E> stop, E step) {
|
||||
return generateSeries( value( start ), stop, value( step ) );
|
||||
}
|
||||
|
||||
@Override
|
||||
public <E extends Number> SqmSetReturningFunction<E> generateSeries(Expression<E> start, Expression<E> stop, E step) {
|
||||
return generateSeries( start, stop, value( step ) );
|
||||
}
|
||||
|
||||
@Override
|
||||
public <E extends Number> SqmSetReturningFunction<E> generateSeries(Expression<E> start, E stop, Expression<E> step) {
|
||||
return generateSeries( start, value( stop ), step );
|
||||
}
|
||||
|
||||
@Override
|
||||
public <E extends Number> SqmSetReturningFunction<E> generateSeries(E start, Expression<E> stop, Expression<E> step) {
|
||||
return generateSeries( value( start ), stop, step );
|
||||
}
|
||||
|
||||
@Override
|
||||
public <E extends Number> SqmSetReturningFunction<E> generateSeries(Expression<E> start, Expression<E> stop) {
|
||||
return getSetReturningFunctionDescriptor( "generate_series" ).generateSqmExpression(
|
||||
asList( (SqmTypedNode<?>) start, (SqmTypedNode<?>) stop ),
|
||||
queryEngine
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <E extends Number> SqmSetReturningFunction<E> generateSeries(Expression<E> start, E stop) {
|
||||
return generateSeries( start, value( stop ) );
|
||||
}
|
||||
|
||||
@Override
|
||||
public <E extends Number> SqmSetReturningFunction<E> generateSeries(E start, Expression<E> stop) {
|
||||
return generateSeries( value( start ), stop );
|
||||
}
|
||||
|
||||
@Override
|
||||
public <E extends Number> SqmSetReturningFunction<E> generateSeries(E start, E stop) {
|
||||
return generateSeries( value( start ), value( stop ) );
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,9 +4,17 @@
|
|||
*/
|
||||
package org.hibernate.query.sqm.produce.function;
|
||||
|
||||
import org.checkerframework.checker.nullness.qual.Nullable;
|
||||
import org.hibernate.metamodel.mapping.MappingModelExpressible;
|
||||
import org.hibernate.query.sqm.function.NamedSqmFunctionDescriptor;
|
||||
import org.hibernate.query.sqm.sql.SqmToSqlAstConverter;
|
||||
import org.hibernate.query.sqm.tree.SqmCopyContext;
|
||||
import org.hibernate.query.sqm.tree.SqmTypedNode;
|
||||
import org.hibernate.query.sqm.tree.expression.SqmExpression;
|
||||
import org.hibernate.query.sqm.tree.expression.SqmFunction;
|
||||
import org.hibernate.sql.ast.tree.expression.Expression;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Pluggable strategy for resolving a function argument type for a specific call.
|
||||
|
@ -22,9 +30,48 @@ public interface FunctionArgumentTypeResolver {
|
|||
* the implied type would be defined by the type of `something`.
|
||||
*
|
||||
* @return The resolved type.
|
||||
* @deprecated Use {@link #resolveFunctionArgumentType(List, int, SqmToSqlAstConverter)} instead
|
||||
*/
|
||||
MappingModelExpressible<?> resolveFunctionArgumentType(
|
||||
@Deprecated(forRemoval = true)
|
||||
@Nullable MappingModelExpressible<?> resolveFunctionArgumentType(
|
||||
SqmFunction<?> function,
|
||||
int argumentIndex,
|
||||
SqmToSqlAstConverter converter);
|
||||
|
||||
/**
|
||||
* Resolve the argument type for a function given its context-implied return type.
|
||||
* <p>
|
||||
* The <em>context-implied</em> type is the type implied by where the function
|
||||
* occurs in the query. E.g., for an equality predicate (`something = some_function`)
|
||||
* the implied type would be defined by the type of `something`.
|
||||
*
|
||||
* @return The resolved type.
|
||||
* @since 7.0
|
||||
*/
|
||||
default @Nullable MappingModelExpressible<?> resolveFunctionArgumentType(
|
||||
List<? extends SqmTypedNode<?>> arguments,
|
||||
int argumentIndex,
|
||||
SqmToSqlAstConverter converter) {
|
||||
return resolveFunctionArgumentType(
|
||||
new SqmFunction<>(
|
||||
"",
|
||||
new NamedSqmFunctionDescriptor( "", false, null, null ),
|
||||
null,
|
||||
arguments,
|
||||
converter.getCreationContext().getSessionFactory().getNodeBuilder()
|
||||
) {
|
||||
@Override
|
||||
public Expression convertToSqlAst(SqmToSqlAstConverter walker) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public SqmExpression<Object> copy(SqmCopyContext context) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
},
|
||||
argumentIndex,
|
||||
converter
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,6 +9,7 @@ import org.hibernate.metamodel.mapping.SelectableMapping;
|
|||
import org.hibernate.metamodel.mapping.SqlExpressible;
|
||||
import org.hibernate.query.derived.AnonymousTupleType;
|
||||
import org.hibernate.query.sqm.produce.function.internal.SetReturningFunctionTypeResolverBuilder;
|
||||
import org.hibernate.query.sqm.sql.SqmToSqlAstConverter;
|
||||
import org.hibernate.query.sqm.tree.SqmTypedNode;
|
||||
import org.hibernate.sql.ast.tree.SqlAstNode;
|
||||
import org.hibernate.type.BasicType;
|
||||
|
@ -40,8 +41,9 @@ public interface SetReturningFunctionTypeResolver {
|
|||
SelectableMapping[] resolveFunctionReturnType(
|
||||
List<? extends SqlAstNode> arguments,
|
||||
String tableIdentifierVariable,
|
||||
boolean lateral,
|
||||
boolean withOrdinality,
|
||||
TypeConfiguration typeConfiguration);
|
||||
SqmToSqlAstConverter converter);
|
||||
|
||||
/**
|
||||
* Creates a builder for a type resolver.
|
||||
|
|
|
@ -10,7 +10,10 @@ import java.sql.Time;
|
|||
import java.sql.Timestamp;
|
||||
import java.util.List;
|
||||
|
||||
import org.checkerframework.checker.nullness.qual.Nullable;
|
||||
import org.hibernate.metamodel.mapping.MappingModelExpressible;
|
||||
import org.hibernate.query.sqm.produce.function.internal.AbstractFunctionArgumentTypeResolver;
|
||||
import org.hibernate.query.sqm.sql.SqmToSqlAstConverter;
|
||||
import org.hibernate.query.sqm.tree.SqmTypedNode;
|
||||
import org.hibernate.query.sqm.tree.expression.SqmExpression;
|
||||
import org.hibernate.type.spi.TypeConfiguration;
|
||||
|
@ -25,44 +28,59 @@ public final class StandardFunctionArgumentTypeResolvers {
|
|||
private StandardFunctionArgumentTypeResolvers() {
|
||||
}
|
||||
|
||||
public static final FunctionArgumentTypeResolver NULL = (function, argumentIndex, converter) -> {
|
||||
return null;
|
||||
public static final FunctionArgumentTypeResolver NULL = new AbstractFunctionArgumentTypeResolver() {
|
||||
@Override
|
||||
public @Nullable MappingModelExpressible<?> resolveFunctionArgumentType(List<? extends SqmTypedNode<?>> arguments, int argumentIndex, SqmToSqlAstConverter converter) {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
public static final FunctionArgumentTypeResolver IMPLIED_RESULT_TYPE = (function, argumentIndex, converter) -> {
|
||||
return converter.resolveFunctionImpliedReturnType();
|
||||
public static final FunctionArgumentTypeResolver IMPLIED_RESULT_TYPE = new AbstractFunctionArgumentTypeResolver() {
|
||||
@Override
|
||||
public @Nullable MappingModelExpressible<?> resolveFunctionArgumentType(List<? extends SqmTypedNode<?>> arguments, int argumentIndex, SqmToSqlAstConverter converter) {
|
||||
return converter.resolveFunctionImpliedReturnType();
|
||||
}
|
||||
};
|
||||
|
||||
public static final FunctionArgumentTypeResolver ARGUMENT_OR_IMPLIED_RESULT_TYPE = (function, argumentIndex, converter) -> {
|
||||
final List<? extends SqmTypedNode<?>> arguments = function.getArguments();
|
||||
final int argumentsSize = arguments.size();
|
||||
for ( int i = 0 ; i < argumentIndex; i++ ) {
|
||||
final SqmTypedNode<?> node = arguments.get( i );
|
||||
if ( node instanceof SqmExpression<?> ) {
|
||||
final MappingModelExpressible<?> expressible = converter.determineValueMapping( (SqmExpression<?>) node );
|
||||
if ( expressible != null ) {
|
||||
return expressible;
|
||||
public static final FunctionArgumentTypeResolver ARGUMENT_OR_IMPLIED_RESULT_TYPE = new AbstractFunctionArgumentTypeResolver() {
|
||||
@Override
|
||||
public @Nullable MappingModelExpressible<?> resolveFunctionArgumentType(List<? extends SqmTypedNode<?>> arguments, int argumentIndex, SqmToSqlAstConverter converter) {
|
||||
final int argumentsSize = arguments.size();
|
||||
for ( int i = 0; i < argumentIndex; i++ ) {
|
||||
final SqmTypedNode<?> node = arguments.get( i );
|
||||
if ( node instanceof SqmExpression<?> ) {
|
||||
final MappingModelExpressible<?> expressible = converter.determineValueMapping(
|
||||
(SqmExpression<?>) node );
|
||||
if ( expressible != null ) {
|
||||
return expressible;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for ( int i = argumentIndex + 1 ; i < argumentsSize; i++ ) {
|
||||
final SqmTypedNode<?> node = arguments.get( i );
|
||||
if ( node instanceof SqmExpression<?> ) {
|
||||
final MappingModelExpressible<?> expressible = converter.determineValueMapping( (SqmExpression<?>) node );
|
||||
if ( expressible != null ) {
|
||||
return expressible;
|
||||
for ( int i = argumentIndex + 1; i < argumentsSize; i++ ) {
|
||||
final SqmTypedNode<?> node = arguments.get( i );
|
||||
if ( node instanceof SqmExpression<?> ) {
|
||||
final MappingModelExpressible<?> expressible = converter.determineValueMapping(
|
||||
(SqmExpression<?>) node );
|
||||
if ( expressible != null ) {
|
||||
return expressible;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return converter.resolveFunctionImpliedReturnType();
|
||||
return converter.resolveFunctionImpliedReturnType();
|
||||
}
|
||||
};
|
||||
|
||||
public static FunctionArgumentTypeResolver invariant(
|
||||
TypeConfiguration typeConfiguration,
|
||||
FunctionParameterType type) {
|
||||
final MappingModelExpressible<?> expressible = getMappingModelExpressible( typeConfiguration, type );
|
||||
return (function, argumentIndex, converter) -> expressible;
|
||||
return new AbstractFunctionArgumentTypeResolver() {
|
||||
@Override
|
||||
public @Nullable MappingModelExpressible<?> resolveFunctionArgumentType(List<? extends SqmTypedNode<?>> arguments, int argumentIndex, SqmToSqlAstConverter converter) {
|
||||
return expressible;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public static FunctionArgumentTypeResolver invariant(
|
||||
|
@ -73,82 +91,105 @@ public final class StandardFunctionArgumentTypeResolvers {
|
|||
expressibles[i] = getMappingModelExpressible( typeConfiguration, types[i] );
|
||||
}
|
||||
|
||||
return (function, argumentIndex, converter) -> expressibles[argumentIndex];
|
||||
return new AbstractFunctionArgumentTypeResolver() {
|
||||
@Override
|
||||
public @Nullable MappingModelExpressible<?> resolveFunctionArgumentType(List<? extends SqmTypedNode<?>> arguments, int argumentIndex, SqmToSqlAstConverter converter) {
|
||||
return expressibles[argumentIndex];
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public static FunctionArgumentTypeResolver invariant(FunctionParameterType... types) {
|
||||
return (function, argumentIndex, converter) -> getMappingModelExpressible(
|
||||
function.nodeBuilder().getTypeConfiguration(),
|
||||
types[argumentIndex]
|
||||
);
|
||||
return new AbstractFunctionArgumentTypeResolver() {
|
||||
@Override
|
||||
public @Nullable MappingModelExpressible<?> resolveFunctionArgumentType(List<? extends SqmTypedNode<?>> arguments, int argumentIndex, SqmToSqlAstConverter converter) {
|
||||
return getMappingModelExpressible(
|
||||
converter.getCreationContext().getTypeConfiguration(),
|
||||
types[argumentIndex]
|
||||
);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public static FunctionArgumentTypeResolver impliedOrInvariant(
|
||||
TypeConfiguration typeConfiguration,
|
||||
FunctionParameterType type) {
|
||||
final MappingModelExpressible<?> expressible = getMappingModelExpressible( typeConfiguration, type );
|
||||
return (function, argumentIndex, converter) -> {
|
||||
final MappingModelExpressible<?> mappingModelExpressible = converter.resolveFunctionImpliedReturnType();
|
||||
if ( mappingModelExpressible != null ) {
|
||||
return mappingModelExpressible;
|
||||
return new AbstractFunctionArgumentTypeResolver() {
|
||||
@Override
|
||||
public @Nullable MappingModelExpressible<?> resolveFunctionArgumentType(List<? extends SqmTypedNode<?>> arguments, int argumentIndex, SqmToSqlAstConverter converter) {
|
||||
final MappingModelExpressible<?> mappingModelExpressible = converter.resolveFunctionImpliedReturnType();
|
||||
if ( mappingModelExpressible != null ) {
|
||||
return mappingModelExpressible;
|
||||
}
|
||||
return expressible;
|
||||
}
|
||||
return expressible;
|
||||
};
|
||||
}
|
||||
|
||||
public static FunctionArgumentTypeResolver argumentsOrImplied(int... indices) {
|
||||
return (function, argumentIndex, converter) -> {
|
||||
final List<? extends SqmTypedNode<?>> arguments = function.getArguments();
|
||||
final int argumentsSize = arguments.size();
|
||||
for ( int index : indices ) {
|
||||
if ( index >= argumentIndex || index >= argumentsSize ) {
|
||||
break;
|
||||
}
|
||||
final SqmTypedNode<?> node = arguments.get( index );
|
||||
if ( node instanceof SqmExpression<?> ) {
|
||||
final MappingModelExpressible<?> expressible = converter.determineValueMapping( (SqmExpression<?>) node );
|
||||
if ( expressible != null ) {
|
||||
return expressible;
|
||||
return new AbstractFunctionArgumentTypeResolver() {
|
||||
@Override
|
||||
public @Nullable MappingModelExpressible<?> resolveFunctionArgumentType(List<? extends SqmTypedNode<?>> arguments, int argumentIndex, SqmToSqlAstConverter converter) {
|
||||
final int argumentsSize = arguments.size();
|
||||
for ( int index : indices ) {
|
||||
if ( index >= argumentIndex || index >= argumentsSize ) {
|
||||
break;
|
||||
}
|
||||
final SqmTypedNode<?> node = arguments.get( index );
|
||||
if ( node instanceof SqmExpression<?> ) {
|
||||
final MappingModelExpressible<?> expressible = converter.determineValueMapping(
|
||||
(SqmExpression<?>) node );
|
||||
if ( expressible != null ) {
|
||||
return expressible;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for ( int index : indices ) {
|
||||
if ( index <= argumentIndex || index >= argumentsSize ) {
|
||||
break;
|
||||
}
|
||||
final SqmTypedNode<?> node = arguments.get( index );
|
||||
if ( node instanceof SqmExpression<?> ) {
|
||||
final MappingModelExpressible<?> expressible = converter.determineValueMapping( (SqmExpression<?>) node );
|
||||
if ( expressible != null ) {
|
||||
return expressible;
|
||||
for ( int index : indices ) {
|
||||
if ( index <= argumentIndex || index >= argumentsSize ) {
|
||||
break;
|
||||
}
|
||||
final SqmTypedNode<?> node = arguments.get( index );
|
||||
if ( node instanceof SqmExpression<?> ) {
|
||||
final MappingModelExpressible<?> expressible = converter.determineValueMapping(
|
||||
(SqmExpression<?>) node );
|
||||
if ( expressible != null ) {
|
||||
return expressible;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return converter.resolveFunctionImpliedReturnType();
|
||||
return converter.resolveFunctionImpliedReturnType();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public static FunctionArgumentTypeResolver composite(FunctionArgumentTypeResolver... resolvers) {
|
||||
return (function, argumentIndex, converter) -> {
|
||||
for ( FunctionArgumentTypeResolver resolver : resolvers ) {
|
||||
final MappingModelExpressible<?> result = resolver.resolveFunctionArgumentType(
|
||||
function,
|
||||
argumentIndex,
|
||||
converter
|
||||
);
|
||||
if ( result != null ) {
|
||||
return result;
|
||||
return new AbstractFunctionArgumentTypeResolver() {
|
||||
@Override
|
||||
public @Nullable MappingModelExpressible<?> resolveFunctionArgumentType(List<? extends SqmTypedNode<?>> arguments, int argumentIndex, SqmToSqlAstConverter converter) {
|
||||
for ( FunctionArgumentTypeResolver resolver : resolvers ) {
|
||||
final MappingModelExpressible<?> result = resolver.resolveFunctionArgumentType(
|
||||
arguments,
|
||||
argumentIndex,
|
||||
converter
|
||||
);
|
||||
if ( result != null ) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
return null;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public static FunctionArgumentTypeResolver byArgument(FunctionArgumentTypeResolver... resolvers) {
|
||||
return (function, argumentIndex, converter) -> {
|
||||
return resolvers[argumentIndex].resolveFunctionArgumentType( function, argumentIndex, converter );
|
||||
return new AbstractFunctionArgumentTypeResolver() {
|
||||
@Override
|
||||
public @Nullable MappingModelExpressible<?> resolveFunctionArgumentType(List<? extends SqmTypedNode<?>> arguments, int argumentIndex, SqmToSqlAstConverter converter) {
|
||||
return resolvers[argumentIndex].resolveFunctionArgumentType( arguments, argumentIndex, converter );
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,25 @@
|
|||
/*
|
||||
* SPDX-License-Identifier: LGPL-2.1-or-later
|
||||
* Copyright Red Hat Inc. and Hibernate Authors
|
||||
*/
|
||||
package org.hibernate.query.sqm.produce.function.internal;
|
||||
|
||||
import org.checkerframework.checker.nullness.qual.Nullable;
|
||||
import org.hibernate.metamodel.mapping.MappingModelExpressible;
|
||||
import org.hibernate.query.sqm.produce.function.FunctionArgumentTypeResolver;
|
||||
import org.hibernate.query.sqm.sql.SqmToSqlAstConverter;
|
||||
import org.hibernate.query.sqm.tree.SqmTypedNode;
|
||||
import org.hibernate.query.sqm.tree.expression.SqmFunction;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public abstract class AbstractFunctionArgumentTypeResolver implements FunctionArgumentTypeResolver {
|
||||
@Override
|
||||
@SuppressWarnings("removal")
|
||||
public @Nullable MappingModelExpressible<?> resolveFunctionArgumentType(SqmFunction<?> function, int argumentIndex, SqmToSqlAstConverter converter) {
|
||||
return resolveFunctionArgumentType( function.getArguments(), argumentIndex, converter );
|
||||
}
|
||||
|
||||
@Override
|
||||
public abstract @Nullable MappingModelExpressible<?> resolveFunctionArgumentType(List<? extends SqmTypedNode<?>> arguments, int argumentIndex, SqmToSqlAstConverter converter);
|
||||
}
|
|
@ -13,6 +13,7 @@ import org.hibernate.metamodel.mapping.internal.SelectableMappingImpl;
|
|||
import org.hibernate.query.derived.AnonymousTupleType;
|
||||
import org.hibernate.query.sqm.SqmExpressible;
|
||||
import org.hibernate.query.sqm.produce.function.SetReturningFunctionTypeResolver;
|
||||
import org.hibernate.query.sqm.sql.SqmToSqlAstConverter;
|
||||
import org.hibernate.query.sqm.tree.SqmTypedNode;
|
||||
import org.hibernate.sql.ast.tree.SqlAstNode;
|
||||
import org.hibernate.sql.ast.tree.expression.Expression;
|
||||
|
@ -116,12 +117,13 @@ public class SetReturningFunctionTypeResolverBuilder implements SetReturningFunc
|
|||
public SelectableMapping[] resolveFunctionReturnType(
|
||||
List<? extends SqlAstNode> arguments,
|
||||
String tableIdentifierVariable,
|
||||
boolean lateral,
|
||||
boolean withOrdinality,
|
||||
TypeConfiguration typeConfiguration) {
|
||||
SqmToSqlAstConverter converter) {
|
||||
final SelectableMapping[] selectableMappings = new SelectableMapping[typeResolvers.length + (withOrdinality ? 1 : 0)];
|
||||
int i = 0;
|
||||
for ( TypeResolver typeResolver : typeResolvers ) {
|
||||
final JdbcMapping jdbcMapping = typeResolver.resolveFunctionReturnType( arguments, typeConfiguration );
|
||||
final JdbcMapping jdbcMapping = typeResolver.resolveFunctionReturnType( arguments, converter );
|
||||
selectableMappings[i] = new SelectableMappingImpl(
|
||||
"",
|
||||
typeResolver.selectionExpression(),
|
||||
|
@ -146,7 +148,7 @@ public class SetReturningFunctionTypeResolverBuilder implements SetReturningFunc
|
|||
if ( withOrdinality ) {
|
||||
selectableMappings[i] = new SelectableMappingImpl(
|
||||
"",
|
||||
determineIndexSelectionExpression( selectableMappings, tableIdentifierVariable, typeConfiguration ),
|
||||
determineIndexSelectionExpression( selectableMappings, tableIdentifierVariable, converter ),
|
||||
new SelectablePath( CollectionPart.Nature.INDEX.getName() ),
|
||||
null,
|
||||
null,
|
||||
|
@ -161,14 +163,15 @@ public class SetReturningFunctionTypeResolverBuilder implements SetReturningFunc
|
|||
false,
|
||||
false,
|
||||
false,
|
||||
typeConfiguration.getBasicTypeForJavaType( Long.class )
|
||||
converter.getCreationContext().getTypeConfiguration().getBasicTypeForJavaType( Long.class )
|
||||
);
|
||||
}
|
||||
return selectableMappings;
|
||||
}
|
||||
|
||||
private String determineIndexSelectionExpression(SelectableMapping[] selectableMappings, String tableIdentifierVariable, TypeConfiguration typeConfiguration) {
|
||||
final String defaultOrdinalityColumnName = typeConfiguration.getSessionFactory().getJdbcServices()
|
||||
private String determineIndexSelectionExpression(SelectableMapping[] selectableMappings, String tableIdentifierVariable, SqmToSqlAstConverter walker) {
|
||||
final String defaultOrdinalityColumnName = walker.getCreationContext().getSessionFactory()
|
||||
.getJdbcServices()
|
||||
.getDialect()
|
||||
.getDefaultOrdinalityColumnName();
|
||||
String name = defaultOrdinalityColumnName == null ? "i" : defaultOrdinalityColumnName;
|
||||
|
@ -195,7 +198,7 @@ public class SetReturningFunctionTypeResolverBuilder implements SetReturningFunc
|
|||
|
||||
SqmExpressible<?> resolveTupleType(List<? extends SqmTypedNode<?>> arguments, TypeConfiguration typeConfiguration);
|
||||
|
||||
JdbcMapping resolveFunctionReturnType(List<? extends SqlAstNode> arguments, TypeConfiguration typeConfiguration);
|
||||
JdbcMapping resolveFunctionReturnType(List<? extends SqlAstNode> arguments, SqmToSqlAstConverter walker);
|
||||
}
|
||||
|
||||
private record BasicTypeReferenceTypeResolver(
|
||||
|
@ -210,8 +213,8 @@ public class SetReturningFunctionTypeResolverBuilder implements SetReturningFunc
|
|||
}
|
||||
|
||||
@Override
|
||||
public JdbcMapping resolveFunctionReturnType(List<? extends SqlAstNode> arguments, TypeConfiguration typeConfiguration) {
|
||||
return typeConfiguration.getBasicTypeRegistry().resolve( basicTypeReference );
|
||||
public JdbcMapping resolveFunctionReturnType(List<? extends SqlAstNode> arguments, SqmToSqlAstConverter walker) {
|
||||
return walker.getCreationContext().getTypeConfiguration().getBasicTypeRegistry().resolve( basicTypeReference );
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -227,7 +230,7 @@ public class SetReturningFunctionTypeResolverBuilder implements SetReturningFunc
|
|||
}
|
||||
|
||||
@Override
|
||||
public JdbcMapping resolveFunctionReturnType(List<? extends SqlAstNode> arguments, TypeConfiguration typeConfiguration) {
|
||||
public JdbcMapping resolveFunctionReturnType(List<? extends SqlAstNode> arguments, SqmToSqlAstConverter walker) {
|
||||
return basicType;
|
||||
}
|
||||
}
|
||||
|
@ -244,7 +247,7 @@ public class SetReturningFunctionTypeResolverBuilder implements SetReturningFunc
|
|||
}
|
||||
|
||||
@Override
|
||||
public JdbcMapping resolveFunctionReturnType(List<? extends SqlAstNode> arguments, TypeConfiguration typeConfiguration) {
|
||||
public JdbcMapping resolveFunctionReturnType(List<? extends SqlAstNode> arguments, SqmToSqlAstConverter walker) {
|
||||
return ((Expression) arguments.get( argPosition )).getExpressionType().getSingleJdbcMapping();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -283,8 +283,11 @@ public abstract class BaseSemanticQueryWalker implements SemanticQueryWalker<Obj
|
|||
}
|
||||
|
||||
protected void consumeFromClauseRoot(SqmRoot<?> sqmRoot) {
|
||||
if ( sqmRoot instanceof SqmDerivedRoot<?> ) {
|
||||
( (SqmDerivedRoot<?>) sqmRoot ).getQueryPart().accept( this );
|
||||
if ( sqmRoot instanceof SqmDerivedRoot<?> derivedRoot ) {
|
||||
derivedRoot.getQueryPart().accept( this );
|
||||
}
|
||||
else if ( sqmRoot instanceof SqmFunctionRoot<?> functionRoot ) {
|
||||
functionRoot.getFunction().accept( this );
|
||||
}
|
||||
consumeJoins( sqmRoot );
|
||||
}
|
||||
|
@ -416,7 +419,7 @@ public abstract class BaseSemanticQueryWalker implements SemanticQueryWalker<Obj
|
|||
}
|
||||
|
||||
@Override
|
||||
public Object visitRootFunction(SqmFunctionRoot<?>sqmRoot) {
|
||||
public Object visitRootFunction(SqmFunctionRoot<?> sqmRoot) {
|
||||
return sqmRoot;
|
||||
}
|
||||
|
||||
|
|
|
@ -7085,7 +7085,10 @@ public abstract class BaseSqmToSqlAstConverter<T extends Statement> extends Base
|
|||
);
|
||||
}
|
||||
else {
|
||||
BasicValuedMapping durationType = (BasicValuedMapping) toDuration.getNodeType();
|
||||
final MappingModelExpressible<?> inferredValueMapping = getInferredValueMapping();
|
||||
final BasicValuedMapping durationType = inferredValueMapping != null
|
||||
? (BasicValuedMapping) inferredValueMapping
|
||||
: (BasicValuedMapping) toDuration.getNodeType();
|
||||
Duration duration;
|
||||
if ( scaledMagnitude.getExpressionType().getSingleJdbcMapping().getJdbcType().isInterval() ) {
|
||||
duration = new Duration( extractEpoch( scaledMagnitude ), SECOND, durationType );
|
||||
|
|
|
@ -53,7 +53,7 @@ public interface SqmToSqlAstConverter extends SemanticQueryWalker<Object>, SqlAs
|
|||
* Returns the function return type implied from the context within which it is used.
|
||||
* If there is no current function being processed or no context implied type, the return is <code>null</code>.
|
||||
*/
|
||||
MappingModelExpressible<?> resolveFunctionImpliedReturnType();
|
||||
@Nullable MappingModelExpressible<?> resolveFunctionImpliedReturnType();
|
||||
|
||||
MappingModelExpressible<?> determineValueMapping(SqmExpression<?> sqmExpression);
|
||||
|
||||
|
|
|
@ -76,7 +76,9 @@ public class SqmFunctionRoot<E> extends SqmRoot<E> implements JpaFunctionRoot<E>
|
|||
|
||||
@Override
|
||||
public SqmPath<Long> index() {
|
||||
return get( CollectionPart.Nature.INDEX.getName() );
|
||||
//noinspection unchecked
|
||||
final SqmPathSource<Long> indexPathSource = (SqmPathSource<Long>) function.getType().getSubPathSource( CollectionPart.Nature.INDEX.getName() );
|
||||
return resolvePath( indexPathSource.getPathName(), indexPathSource );
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -129,7 +129,9 @@ public class SqmFunctionJoin<E> extends AbstractSqmJoin<Object, E> implements Jp
|
|||
|
||||
@Override
|
||||
public SqmPath<Long> index() {
|
||||
return get( CollectionPart.Nature.INDEX.getName() );
|
||||
//noinspection unchecked
|
||||
final SqmPathSource<Long> indexPathSource = (SqmPathSource<Long>) function.getType().getSubPathSource( CollectionPart.Nature.INDEX.getName() );
|
||||
return resolvePath( indexPathSource.getPathName(), indexPathSource );
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -6,6 +6,7 @@ package org.hibernate.sql.ast.spi;
|
|||
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.SQLException;
|
||||
import java.time.Period;
|
||||
import java.util.ArrayList;
|
||||
import java.util.BitSet;
|
||||
import java.util.Collection;
|
||||
|
@ -59,6 +60,7 @@ import org.hibernate.persister.internal.SqlFragmentPredicate;
|
|||
import org.hibernate.query.IllegalQueryOperationException;
|
||||
import org.hibernate.query.ReturnableType;
|
||||
import org.hibernate.query.SortDirection;
|
||||
import org.hibernate.query.common.TemporalUnit;
|
||||
import org.hibernate.query.derived.AnonymousTupleTableGroupProducer;
|
||||
import org.hibernate.query.internal.NullPrecedenceHelper;
|
||||
import org.hibernate.query.spi.Limit;
|
||||
|
@ -230,7 +232,10 @@ import jakarta.persistence.criteria.Nulls;
|
|||
|
||||
import static org.hibernate.persister.entity.DiscriminatorHelper.jdbcLiteral;
|
||||
import static org.hibernate.query.sqm.BinaryArithmeticOperator.DIVIDE_PORTABLE;
|
||||
import static org.hibernate.query.common.TemporalUnit.DAY;
|
||||
import static org.hibernate.query.common.TemporalUnit.MONTH;
|
||||
import static org.hibernate.query.common.TemporalUnit.NANOSECOND;
|
||||
import static org.hibernate.query.common.TemporalUnit.SECOND;
|
||||
import static org.hibernate.sql.ast.SqlTreePrinter.logSqlAst;
|
||||
import static org.hibernate.sql.results.graph.DomainResultGraphPrinter.logDomainResultGraph;
|
||||
|
||||
|
@ -7299,8 +7304,16 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
|
|||
|
||||
@Override
|
||||
public void visitDuration(Duration duration) {
|
||||
duration.getMagnitude().accept( this );
|
||||
if ( !duration.getExpressionType().getJdbcMapping().getJdbcType().isInterval() ) {
|
||||
if ( duration.getExpressionType().getJdbcMapping().getJdbcType().isInterval() ) {
|
||||
if ( duration.getMagnitude() instanceof Literal literal ) {
|
||||
renderIntervalLiteral( literal, duration.getUnit() );
|
||||
}
|
||||
else {
|
||||
renderInterval( duration );
|
||||
}
|
||||
}
|
||||
else {
|
||||
duration.getMagnitude().accept( this );
|
||||
// Convert to NANOSECOND because DurationJavaType requires values in that unit
|
||||
appendSql(
|
||||
duration.getUnit().conversionFactor( NANOSECOND, dialect )
|
||||
|
@ -7308,6 +7321,43 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
|
|||
}
|
||||
}
|
||||
|
||||
protected void renderInterval(Duration duration) {
|
||||
final TemporalUnit unit = duration.getUnit();
|
||||
appendSql( "(interval '1' " );
|
||||
final TemporalUnit targetResolution = switch ( unit ) {
|
||||
case NANOSECOND -> SECOND;
|
||||
case SECOND, MINUTE, HOUR, DAY, MONTH, YEAR -> unit;
|
||||
case WEEK -> DAY;
|
||||
case QUARTER -> MONTH;
|
||||
case DATE, TIME, EPOCH, DAY_OF_MONTH, DAY_OF_WEEK, DAY_OF_YEAR, WEEK_OF_MONTH, WEEK_OF_YEAR, OFFSET,
|
||||
TIMEZONE_HOUR, TIMEZONE_MINUTE, NATIVE ->
|
||||
throw new IllegalArgumentException( "Invalid duration unit: " + unit );
|
||||
};
|
||||
appendSql( targetResolution.toString() );
|
||||
appendSql( '*' );
|
||||
duration.getMagnitude().accept( this );
|
||||
appendSql( duration.getUnit().conversionFactor( targetResolution, dialect ) );
|
||||
appendSql( ')' );
|
||||
}
|
||||
|
||||
protected void renderIntervalLiteral(Literal literal, TemporalUnit unit) {
|
||||
final Number value = (Number) literal.getLiteralValue();
|
||||
dialect.appendIntervalLiteral( this, switch ( unit ) {
|
||||
case NANOSECOND -> java.time.Duration.ofNanos( value.longValue() );
|
||||
case SECOND -> java.time.Duration.ofSeconds( value.longValue() );
|
||||
case MINUTE -> java.time.Duration.ofMinutes( value.longValue() );
|
||||
case HOUR -> java.time.Duration.ofHours( value.longValue() );
|
||||
case DAY -> Period.ofDays( value.intValue() );
|
||||
case WEEK -> Period.ofWeeks( value.intValue() );
|
||||
case MONTH -> Period.ofMonths( value.intValue() );
|
||||
case YEAR -> Period.ofYears( value.intValue() );
|
||||
case QUARTER -> Period.ofMonths( value.intValue() * 3 );
|
||||
case DATE, TIME, EPOCH, DAY_OF_MONTH, DAY_OF_WEEK, DAY_OF_YEAR, WEEK_OF_MONTH, WEEK_OF_YEAR, OFFSET,
|
||||
TIMEZONE_HOUR, TIMEZONE_MINUTE, NATIVE ->
|
||||
throw new IllegalArgumentException( "Invalid duration unit: " + unit );
|
||||
} );
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visitConversion(Conversion conversion) {
|
||||
final Duration duration = conversion.getDuration();
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
*/
|
||||
package org.hibernate.sql.ast.tree.expression;
|
||||
|
||||
import org.checkerframework.checker.nullness.qual.Nullable;
|
||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
|
||||
import org.hibernate.sql.ast.SqlAstTranslator;
|
||||
|
@ -16,9 +17,15 @@ import org.hibernate.sql.ast.spi.SqlAppender;
|
|||
*/
|
||||
public class SelfRenderingSqlFragmentExpression implements SelfRenderingExpression {
|
||||
private final String expression;
|
||||
private final @Nullable JdbcMappingContainer expressionType;
|
||||
|
||||
public SelfRenderingSqlFragmentExpression(String expression) {
|
||||
this( expression, null );
|
||||
}
|
||||
|
||||
public SelfRenderingSqlFragmentExpression(String expression, @Nullable JdbcMappingContainer expressionType) {
|
||||
this.expression = expression;
|
||||
this.expressionType = expressionType;
|
||||
}
|
||||
|
||||
public String getExpression() {
|
||||
|
@ -27,7 +34,7 @@ public class SelfRenderingSqlFragmentExpression implements SelfRenderingExpressi
|
|||
|
||||
@Override
|
||||
public JdbcMappingContainer getExpressionType() {
|
||||
return null;
|
||||
return expressionType;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -0,0 +1,189 @@
|
|||
/*
|
||||
* SPDX-License-Identifier: LGPL-2.1-or-later
|
||||
* Copyright Red Hat Inc. and Hibernate Authors
|
||||
*/
|
||||
package org.hibernate.orm.test.function.srf;
|
||||
|
||||
import jakarta.persistence.Tuple;
|
||||
import org.hibernate.dialect.SybaseASEDialect;
|
||||
import org.hibernate.query.criteria.JpaCriteriaQuery;
|
||||
import org.hibernate.query.criteria.JpaFunctionRoot;
|
||||
import org.hibernate.query.sqm.NodeBuilder;
|
||||
import org.hibernate.testing.orm.domain.StandardDomainModel;
|
||||
import org.hibernate.testing.orm.domain.library.Book;
|
||||
import org.hibernate.testing.orm.junit.DialectFeatureChecks;
|
||||
import org.hibernate.testing.orm.junit.DomainModel;
|
||||
import org.hibernate.testing.orm.junit.RequiresDialectFeature;
|
||||
import org.hibernate.testing.orm.junit.SessionFactory;
|
||||
import org.hibernate.testing.orm.junit.SessionFactoryScope;
|
||||
import org.hibernate.testing.orm.junit.SkipForDialect;
|
||||
import org.junit.jupiter.api.AfterAll;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.time.LocalDate;
|
||||
import java.time.Month;
|
||||
import java.util.List;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
||||
/**
|
||||
* @author Christian Beikov
|
||||
*/
|
||||
@DomainModel(standardModels = StandardDomainModel.LIBRARY)
|
||||
@SessionFactory
|
||||
@RequiresDialectFeature(feature = DialectFeatureChecks.SupportsGenerateSeries.class)
|
||||
public class GenerateSeriesTest {
|
||||
|
||||
@BeforeAll
|
||||
public void setup(SessionFactoryScope scope) {
|
||||
scope.inTransaction( session -> {
|
||||
session.persist( new Book(2, "Test") );
|
||||
} );
|
||||
}
|
||||
|
||||
@AfterAll
|
||||
public void cleanup(SessionFactoryScope scope) {
|
||||
scope.inTransaction( session -> {
|
||||
session.createMutationQuery( "delete Book" ).executeUpdate();
|
||||
} );
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGenerateSeries(SessionFactoryScope scope) {
|
||||
scope.inSession( em -> {
|
||||
//tag::hql-set-returning-function-generate-series-example[]
|
||||
List<Integer> resultList = em.createQuery( "select e from generate_series(1, 2) e order by e", Integer.class )
|
||||
.getResultList();
|
||||
//end::hql-set-returning-function-generate-series-example[]
|
||||
|
||||
assertEquals( 2, resultList.size() );
|
||||
assertEquals( 1, resultList.get( 0 ) );
|
||||
assertEquals( 2, resultList.get( 1 ) );
|
||||
} );
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNodeBuilderGenerateSeries(SessionFactoryScope scope) {
|
||||
scope.inSession( em -> {
|
||||
final NodeBuilder cb = (NodeBuilder) em.getCriteriaBuilder();
|
||||
final JpaCriteriaQuery<Integer> cq = cb.createQuery(Integer.class);
|
||||
final JpaFunctionRoot<Integer> root = cq.from( cb.generateSeries( 1, 2 ) );
|
||||
cq.select( root );
|
||||
cq.orderBy( cb.asc( root ) );
|
||||
List<Integer> resultList = em.createQuery( cq ).getResultList();
|
||||
|
||||
assertEquals( 2, resultList.size() );
|
||||
assertEquals( 1, resultList.get( 0 ) );
|
||||
assertEquals( 2, resultList.get( 1 ) );
|
||||
} );
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGenerateSeriesOrdinality(SessionFactoryScope scope) {
|
||||
scope.inSession( em -> {
|
||||
//tag::hql-set-returning-function-generate-series-ordinality-example[]
|
||||
List<Tuple> resultList = em.createQuery(
|
||||
"select index(e), e from generate_series(2, 3, 1) e order by index(e)",
|
||||
Tuple.class
|
||||
)
|
||||
.getResultList();
|
||||
//end::hql-set-returning-function-generate-series-ordinality-example[]
|
||||
|
||||
assertEquals( 2, resultList.size() );
|
||||
assertEquals( 1L, resultList.get( 0 ).get( 0 ) );
|
||||
assertEquals( 2, resultList.get( 0 ).get( 1 ) );
|
||||
assertEquals( 2L, resultList.get( 1 ).get( 0 ) );
|
||||
assertEquals( 3, resultList.get( 1 ).get( 1 ) );
|
||||
} );
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNodeBuilderGenerateSeriesOrdinality(SessionFactoryScope scope) {
|
||||
scope.inSession( em -> {
|
||||
final NodeBuilder cb = (NodeBuilder) em.getCriteriaBuilder();
|
||||
final JpaCriteriaQuery<Tuple> cq = cb.createTupleQuery();
|
||||
final JpaFunctionRoot<Integer> root = cq.from( cb.generateSeries( 2, 3, 1 ) );
|
||||
cq.multiselect( root.index(), root );
|
||||
cq.orderBy( cb.asc( root.index() ) );
|
||||
List<Tuple> resultList = em.createQuery( cq ).getResultList();
|
||||
|
||||
assertEquals( 2, resultList.size() );
|
||||
assertEquals( 1L, resultList.get( 0 ).get( 0 ) );
|
||||
assertEquals( 2, resultList.get( 0 ).get( 1 ) );
|
||||
assertEquals( 2L, resultList.get( 1 ).get( 0 ) );
|
||||
assertEquals( 3, resultList.get( 1 ).get( 1 ) );
|
||||
} );
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGenerateTimeSeries(SessionFactoryScope scope) {
|
||||
scope.inSession( em -> {
|
||||
//tag::hql-set-returning-function-generate-series-temporal-example[]
|
||||
List<LocalDate> resultList = em.createQuery( "select e from generate_series(local date 2020-01-31, local date 2020-01-01, -1 day) e order by e", LocalDate.class )
|
||||
.getResultList();
|
||||
//end::hql-set-returning-function-generate-series-temporal-example[]
|
||||
|
||||
assertEquals( 31, resultList.size() );
|
||||
for ( int i = 0; i < resultList.size(); i++ ) {
|
||||
assertEquals( LocalDate.of( 2020, Month.JANUARY, i + 1 ), resultList.get( i ) );
|
||||
}
|
||||
} );
|
||||
}
|
||||
|
||||
@Test
|
||||
@SkipForDialect(dialectClass = SybaseASEDialect.class, reason = "Sybase bug?")
|
||||
public void testGenerateSeriesCorrelation(SessionFactoryScope scope) {
|
||||
scope.inSession( em -> {
|
||||
List<Integer> resultList = em.createQuery(
|
||||
"select e from Book b join lateral generate_series(1,b.id) e order by e", Integer.class )
|
||||
.getResultList();
|
||||
|
||||
assertEquals( 2, resultList.size() );
|
||||
} );
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGenerateSeriesNegative(SessionFactoryScope scope) {
|
||||
scope.inSession( em -> {
|
||||
List<Integer> resultList = em.createQuery( "select e from generate_series(2, 1, -1) e order by e", Integer.class )
|
||||
.getResultList();
|
||||
|
||||
assertEquals( 2, resultList.size() );
|
||||
assertEquals( 1, resultList.get( 0 ) );
|
||||
assertEquals( 2, resultList.get( 1 ) );
|
||||
} );
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGenerateSeriesNoProgression(SessionFactoryScope scope) {
|
||||
scope.inSession( em -> {
|
||||
List<Integer> resultList = em.createQuery( "select e from generate_series(2, 1, 1) e", Integer.class )
|
||||
.getResultList();
|
||||
|
||||
assertEquals( 0, resultList.size() );
|
||||
} );
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGenerateSeriesNoProgressionOrdinality(SessionFactoryScope scope) {
|
||||
scope.inSession( em -> {
|
||||
List<Tuple> resultList = em.createQuery( "select index(e), e from generate_series(2, 1, 1) e", Tuple.class )
|
||||
.getResultList();
|
||||
|
||||
assertEquals( 0, resultList.size() );
|
||||
} );
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGenerateSeriesSameBounds(SessionFactoryScope scope) {
|
||||
scope.inSession( em -> {
|
||||
List<Integer> resultList = em.createQuery( "select e from generate_series(2, 2, 1) e", Integer.class )
|
||||
.getResultList();
|
||||
|
||||
assertEquals( 1, resultList.size() );
|
||||
assertEquals( 2, resultList.get( 0 ) );
|
||||
} );
|
||||
}
|
||||
|
||||
}
|
|
@ -26,7 +26,6 @@ import org.hibernate.tool.schema.internal.ExceptionHandlerLoggedImpl;
|
|||
import org.hibernate.tool.schema.spi.ContributableMatcher;
|
||||
import org.hibernate.tool.schema.spi.ExceptionHandler;
|
||||
import org.hibernate.tool.schema.spi.ExecutionOptions;
|
||||
import org.hibernate.tool.schema.spi.SchemaFilter;
|
||||
import org.hibernate.tool.schema.spi.SchemaManagementTool;
|
||||
import org.hibernate.tool.schema.spi.ScriptSourceInput;
|
||||
import org.hibernate.tool.schema.spi.ScriptTargetOutput;
|
||||
|
|
|
@ -25,7 +25,6 @@ import org.hibernate.tool.schema.internal.ExceptionHandlerLoggedImpl;
|
|||
import org.hibernate.tool.schema.spi.ContributableMatcher;
|
||||
import org.hibernate.tool.schema.spi.ExceptionHandler;
|
||||
import org.hibernate.tool.schema.spi.ExecutionOptions;
|
||||
import org.hibernate.tool.schema.spi.SchemaFilter;
|
||||
import org.hibernate.tool.schema.spi.SchemaManagementTool;
|
||||
import org.hibernate.tool.schema.spi.ScriptSourceInput;
|
||||
import org.hibernate.tool.schema.spi.ScriptTargetOutput;
|
||||
|
|
|
@ -24,7 +24,6 @@ import org.hibernate.tool.schema.internal.ExceptionHandlerLoggedImpl;
|
|||
import org.hibernate.tool.schema.spi.ContributableMatcher;
|
||||
import org.hibernate.tool.schema.spi.ExceptionHandler;
|
||||
import org.hibernate.tool.schema.spi.ExecutionOptions;
|
||||
import org.hibernate.tool.schema.spi.SchemaFilter;
|
||||
import org.hibernate.tool.schema.spi.SchemaManagementTool;
|
||||
import org.hibernate.tool.schema.spi.ScriptSourceInput;
|
||||
import org.hibernate.tool.schema.spi.ScriptTargetOutput;
|
||||
|
|
|
@ -27,7 +27,6 @@ import org.hibernate.tool.schema.internal.ExceptionHandlerLoggedImpl;
|
|||
import org.hibernate.tool.schema.spi.ContributableMatcher;
|
||||
import org.hibernate.tool.schema.spi.ExceptionHandler;
|
||||
import org.hibernate.tool.schema.spi.ExecutionOptions;
|
||||
import org.hibernate.tool.schema.spi.SchemaFilter;
|
||||
import org.hibernate.tool.schema.spi.SchemaManagementTool;
|
||||
import org.hibernate.tool.schema.spi.ScriptSourceInput;
|
||||
import org.hibernate.tool.schema.spi.ScriptTargetOutput;
|
||||
|
|
|
@ -835,6 +835,12 @@ abstract public class DialectFeatureChecks {
|
|||
}
|
||||
}
|
||||
|
||||
public static class SupportsGenerateSeries implements DialectFeatureCheck {
|
||||
public boolean apply(Dialect dialect) {
|
||||
return definesSetReturningFunction( dialect, "generate_series" );
|
||||
}
|
||||
}
|
||||
|
||||
public static class SupportsArrayAgg implements DialectFeatureCheck {
|
||||
public boolean apply(Dialect dialect) {
|
||||
return definesFunction( dialect, "array_agg" );
|
||||
|
|
|
@ -6,28 +6,25 @@ package org.hibernate.vector;
|
|||
|
||||
import java.util.List;
|
||||
|
||||
import org.checkerframework.checker.nullness.qual.Nullable;
|
||||
import org.hibernate.metamodel.mapping.MappingModelExpressible;
|
||||
import org.hibernate.query.sqm.produce.function.FunctionArgumentTypeResolver;
|
||||
import org.hibernate.query.sqm.produce.function.internal.AbstractFunctionArgumentTypeResolver;
|
||||
import org.hibernate.query.sqm.sql.SqmToSqlAstConverter;
|
||||
import org.hibernate.query.sqm.tree.SqmTypedNode;
|
||||
import org.hibernate.query.sqm.tree.expression.SqmExpression;
|
||||
import org.hibernate.query.sqm.tree.expression.SqmFunction;
|
||||
import org.hibernate.type.SqlTypes;
|
||||
import org.hibernate.type.StandardBasicTypes;
|
||||
|
||||
/**
|
||||
* A {@link FunctionArgumentTypeResolver} for {@link SqlTypes#VECTOR} functions.
|
||||
*/
|
||||
public class VectorArgumentTypeResolver implements FunctionArgumentTypeResolver {
|
||||
public class VectorArgumentTypeResolver extends AbstractFunctionArgumentTypeResolver {
|
||||
|
||||
public static final FunctionArgumentTypeResolver INSTANCE = new VectorArgumentTypeResolver();
|
||||
|
||||
@Override
|
||||
public MappingModelExpressible<?> resolveFunctionArgumentType(
|
||||
SqmFunction<?> function,
|
||||
int argumentIndex,
|
||||
SqmToSqlAstConverter converter) {
|
||||
final List<? extends SqmTypedNode<?>> arguments = function.getArguments();
|
||||
public @Nullable MappingModelExpressible<?> resolveFunctionArgumentType(List<? extends SqmTypedNode<?>> arguments, int argumentIndex, SqmToSqlAstConverter converter) {
|
||||
for ( int i = 0; i < arguments.size(); i++ ) {
|
||||
if ( i != argumentIndex ) {
|
||||
final SqmTypedNode<?> node = arguments.get( i );
|
||||
|
|
|
@ -83,7 +83,8 @@ A set-returning function is a new type of function that can return rows and is e
|
|||
The concept is known in many different database SQL dialects and is sometimes referred to as table valued function or table function.
|
||||
|
||||
Custom set-returning functions can be registered via a `FunctionContributor` and Hibernate ORM
|
||||
also comes with out-of-the-box support for the set-returning function `unnest()`, which allows to turn an array into rows.
|
||||
also comes with out-of-the-box support for the set-returning functions `unnest()`, which allows to turn an array into rows,
|
||||
and `generate_series()`, which can be used to create a series of values as rows.
|
||||
|
||||
[[cleanup]]
|
||||
== Clean-up
|
||||
|
|
Loading…
Reference in New Issue