HHH-16780 Add array_agg and array constructor function

This commit is contained in:
Christian Beikov 2023-09-28 14:52:48 +02:00
parent d8bad73f58
commit 2cb4652688
87 changed files with 2554 additions and 321 deletions

View File

@ -461,6 +461,8 @@ public class CockroachLegacyDialect extends Dialect {
functionFactory.listagg_stringAgg( "string" ); functionFactory.listagg_stringAgg( "string" );
functionFactory.inverseDistributionOrderedSetAggregates(); functionFactory.inverseDistributionOrderedSetAggregates();
functionFactory.hypotheticalOrderedSetAggregates_windowEmulation(); functionFactory.hypotheticalOrderedSetAggregates_windowEmulation();
functionFactory.array_casting();
functionFactory.arrayAggregate();
functionContributions.getFunctionRegistry().register( functionContributions.getFunctionRegistry().register(
"trunc", "trunc",

View File

@ -44,6 +44,7 @@ import org.hibernate.dialect.sequence.SequenceSupport;
import org.hibernate.dialect.unique.AlterTableUniqueIndexDelegate; import org.hibernate.dialect.unique.AlterTableUniqueIndexDelegate;
import org.hibernate.dialect.unique.SkipNullableUniqueDelegate; import org.hibernate.dialect.unique.SkipNullableUniqueDelegate;
import org.hibernate.dialect.unique.UniqueDelegate; import org.hibernate.dialect.unique.UniqueDelegate;
import org.hibernate.engine.jdbc.Size;
import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo; import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo;
import org.hibernate.engine.jdbc.env.spi.IdentifierHelper; import org.hibernate.engine.jdbc.env.spi.IdentifierHelper;
import org.hibernate.engine.jdbc.env.spi.IdentifierHelperBuilder; import org.hibernate.engine.jdbc.env.spi.IdentifierHelperBuilder;
@ -260,7 +261,8 @@ public class DB2LegacyDialect extends Dialect {
public void initializeFunctionRegistry(FunctionContributions functionContributions) { public void initializeFunctionRegistry(FunctionContributions functionContributions) {
super.initializeFunctionRegistry(functionContributions); super.initializeFunctionRegistry(functionContributions);
CommonFunctionFactory functionFactory = new CommonFunctionFactory(functionContributions); final DdlTypeRegistry ddlTypeRegistry = functionContributions.getTypeConfiguration().getDdlTypeRegistry();
final CommonFunctionFactory functionFactory = new CommonFunctionFactory(functionContributions);
// AVG by default uses the input type, so we possibly need to cast the argument type, hence a special function // AVG by default uses the input type, so we possibly need to cast the argument type, hence a special function
functionFactory.avg_castingNonDoubleArguments( this, SqlAstNodeRenderingMode.DEFAULT ); functionFactory.avg_castingNonDoubleArguments( this, SqlAstNodeRenderingMode.DEFAULT );
@ -362,14 +364,13 @@ public class DB2LegacyDialect extends Dialect {
functionContributions.getTypeConfiguration(), functionContributions.getTypeConfiguration(),
SqlAstNodeRenderingMode.DEFAULT, SqlAstNodeRenderingMode.DEFAULT,
"||", "||",
functionContributions.getTypeConfiguration().getDdlTypeRegistry().getDescriptor( VARCHAR ) ddlTypeRegistry.getDescriptor( VARCHAR )
.getCastTypeName( .getCastTypeName(
Size.nil(),
functionContributions.getTypeConfiguration() functionContributions.getTypeConfiguration()
.getBasicTypeRegistry() .getBasicTypeRegistry()
.resolve( StandardBasicTypes.STRING ), .resolve( StandardBasicTypes.STRING ),
null, ddlTypeRegistry
null,
null
), ),
true true
) )

View File

@ -34,6 +34,7 @@ import org.hibernate.dialect.sequence.DerbySequenceSupport;
import org.hibernate.dialect.sequence.SequenceSupport; import org.hibernate.dialect.sequence.SequenceSupport;
import org.hibernate.dialect.temptable.TemporaryTable; import org.hibernate.dialect.temptable.TemporaryTable;
import org.hibernate.dialect.temptable.TemporaryTableKind; import org.hibernate.dialect.temptable.TemporaryTableKind;
import org.hibernate.engine.jdbc.Size;
import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo; import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo;
import org.hibernate.engine.jdbc.env.spi.IdentifierHelper; import org.hibernate.engine.jdbc.env.spi.IdentifierHelper;
import org.hibernate.engine.jdbc.env.spi.IdentifierHelperBuilder; import org.hibernate.engine.jdbc.env.spi.IdentifierHelperBuilder;
@ -319,8 +320,8 @@ public class DerbyLegacyDialect extends Dialect {
final BasicTypeRegistry basicTypeRegistry = functionContributions.getTypeConfiguration().getBasicTypeRegistry(); final BasicTypeRegistry basicTypeRegistry = functionContributions.getTypeConfiguration().getBasicTypeRegistry();
final BasicType<String> stringType = basicTypeRegistry.resolve( StandardBasicTypes.STRING ); final BasicType<String> stringType = basicTypeRegistry.resolve( StandardBasicTypes.STRING );
final DdlTypeRegistry ddlTypeRegistry = functionContributions.getTypeConfiguration().getDdlTypeRegistry();
CommonFunctionFactory functionFactory = new CommonFunctionFactory(functionContributions); final CommonFunctionFactory functionFactory = new CommonFunctionFactory(functionContributions);
// Derby needs an actual argument type for aggregates like SUM, AVG, MIN, MAX to determine the result type // Derby needs an actual argument type for aggregates like SUM, AVG, MIN, MAX to determine the result type
functionFactory.aggregates( this, SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER ); functionFactory.aggregates( this, SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER );
@ -331,8 +332,8 @@ public class DerbyLegacyDialect extends Dialect {
functionContributions.getTypeConfiguration(), functionContributions.getTypeConfiguration(),
SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER, SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER,
"||", "||",
functionContributions.getTypeConfiguration().getDdlTypeRegistry().getDescriptor( VARCHAR ) ddlTypeRegistry.getDescriptor( VARCHAR )
.getCastTypeName( stringType, null, null, null ), .getCastTypeName( Size.nil(), stringType, ddlTypeRegistry ),
true true
) )
); );

View File

@ -366,10 +366,12 @@ public class H2LegacyDialect extends Dialect {
functionFactory.rownum(); functionFactory.rownum();
if ( getVersion().isSameOrAfter( 1, 4, 200 ) ) { if ( getVersion().isSameOrAfter( 1, 4, 200 ) ) {
functionFactory.windowFunctions(); functionFactory.windowFunctions();
if ( getVersion().isSameOrAfter( 2 ) ) {
functionFactory.listagg( null );
functionFactory.inverseDistributionOrderedSetAggregates(); functionFactory.inverseDistributionOrderedSetAggregates();
functionFactory.hypotheticalOrderedSetAggregates(); functionFactory.hypotheticalOrderedSetAggregates();
if ( getVersion().isSameOrAfter( 2 ) ) {
functionFactory.listagg( null );
functionFactory.array();
functionFactory.arrayAggregate();
} }
else { else {
// Use group_concat until 2.x as listagg was buggy // Use group_concat until 2.x as listagg was buggy

View File

@ -247,6 +247,8 @@ public class HSQLLegacyDialect extends Dialect {
functionFactory.rownum(); functionFactory.rownum();
} }
functionFactory.listagg_groupConcat(); functionFactory.listagg_groupConcat();
functionFactory.array();
functionFactory.arrayAggregate();
} }
@Override @Override

View File

@ -105,12 +105,14 @@ import org.hibernate.type.descriptor.jdbc.OracleJsonBlobJdbcType;
import org.hibernate.type.descriptor.jdbc.NullJdbcType; import org.hibernate.type.descriptor.jdbc.NullJdbcType;
import org.hibernate.type.descriptor.jdbc.ObjectNullAsNullTypeJdbcType; import org.hibernate.type.descriptor.jdbc.ObjectNullAsNullTypeJdbcType;
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry; import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
import org.hibernate.type.descriptor.sql.internal.ArrayDdlTypeImpl;
import org.hibernate.type.descriptor.sql.internal.DdlTypeImpl; import org.hibernate.type.descriptor.sql.internal.DdlTypeImpl;
import org.hibernate.type.descriptor.sql.spi.DdlTypeRegistry; import org.hibernate.type.descriptor.sql.spi.DdlTypeRegistry;
import org.hibernate.type.spi.TypeConfiguration; import org.hibernate.type.spi.TypeConfiguration;
import jakarta.persistence.TemporalType; import jakarta.persistence.TemporalType;
import static java.util.regex.Pattern.CASE_INSENSITIVE;
import static org.hibernate.exception.spi.TemplatedViolatedConstraintNameExtractor.extractUsingTemplate; import static org.hibernate.exception.spi.TemplatedViolatedConstraintNameExtractor.extractUsingTemplate;
import static org.hibernate.query.sqm.TemporalUnit.DAY; import static org.hibernate.query.sqm.TemporalUnit.DAY;
import static org.hibernate.query.sqm.TemporalUnit.HOUR; import static org.hibernate.query.sqm.TemporalUnit.HOUR;
@ -124,6 +126,8 @@ import static org.hibernate.type.SqlTypes.BINARY;
import static org.hibernate.type.SqlTypes.BOOLEAN; import static org.hibernate.type.SqlTypes.BOOLEAN;
import static org.hibernate.type.SqlTypes.DATE; import static org.hibernate.type.SqlTypes.DATE;
import static org.hibernate.type.SqlTypes.DECIMAL; import static org.hibernate.type.SqlTypes.DECIMAL;
import static org.hibernate.type.SqlTypes.DOUBLE;
import static org.hibernate.type.SqlTypes.FLOAT;
import static org.hibernate.type.SqlTypes.GEOMETRY; import static org.hibernate.type.SqlTypes.GEOMETRY;
import static org.hibernate.type.SqlTypes.INTEGER; import static org.hibernate.type.SqlTypes.INTEGER;
import static org.hibernate.type.SqlTypes.JSON; import static org.hibernate.type.SqlTypes.JSON;
@ -133,6 +137,7 @@ import static org.hibernate.type.SqlTypes.REAL;
import static org.hibernate.type.SqlTypes.SMALLINT; import static org.hibernate.type.SqlTypes.SMALLINT;
import static org.hibernate.type.SqlTypes.SQLXML; import static org.hibernate.type.SqlTypes.SQLXML;
import static org.hibernate.type.SqlTypes.STRUCT; import static org.hibernate.type.SqlTypes.STRUCT;
import static org.hibernate.type.SqlTypes.TABLE;
import static org.hibernate.type.SqlTypes.TIME; import static org.hibernate.type.SqlTypes.TIME;
import static org.hibernate.type.SqlTypes.TIMESTAMP; import static org.hibernate.type.SqlTypes.TIMESTAMP;
import static org.hibernate.type.SqlTypes.TIMESTAMP_WITH_TIMEZONE; import static org.hibernate.type.SqlTypes.TIMESTAMP_WITH_TIMEZONE;
@ -150,17 +155,20 @@ import static org.hibernate.type.descriptor.DateTimeUtils.appendAsTimestampWithN
*/ */
public class OracleLegacyDialect extends Dialect { public class OracleLegacyDialect extends Dialect {
private static final Pattern DISTINCT_KEYWORD_PATTERN = Pattern.compile( "\\bdistinct\\b" ); private static final Pattern DISTINCT_KEYWORD_PATTERN = Pattern.compile( "\\bdistinct\\b", CASE_INSENSITIVE );
private static final Pattern GROUP_BY_KEYWORD_PATTERN = Pattern.compile( "\\bgroup\\sby\\b" ); private static final Pattern GROUP_BY_KEYWORD_PATTERN = Pattern.compile( "\\bgroup\\s+by\\b", CASE_INSENSITIVE );
private static final Pattern ORDER_BY_KEYWORD_PATTERN = Pattern.compile( "\\border\\sby\\b" ); private static final Pattern ORDER_BY_KEYWORD_PATTERN = Pattern.compile( "\\border\\s+by\\b", CASE_INSENSITIVE );
private static final Pattern UNION_KEYWORD_PATTERN = Pattern.compile( "\\bunion\\b" ); private static final Pattern UNION_KEYWORD_PATTERN = Pattern.compile( "\\bunion\\b", CASE_INSENSITIVE );
private static final Pattern SQL_STATEMENT_TYPE_PATTERN = Pattern.compile("^(?:/\\*.*?\\*/)?\\s*(select|insert|update|delete)\\s+.*?", Pattern.CASE_INSENSITIVE);
private static final Pattern SQL_STATEMENT_TYPE_PATTERN =
Pattern.compile( "^(?:/\\*.*?\\*/)?\\s*(select|insert|update|delete)\\s+.*?", CASE_INSENSITIVE );
private static final int PARAM_LIST_SIZE_LIMIT = 1000; private static final int PARAM_LIST_SIZE_LIMIT = 1000;
public static final String PREFER_LONG_RAW = "hibernate.dialect.oracle.prefer_long_raw"; public static final String PREFER_LONG_RAW = "hibernate.dialect.oracle.prefer_long_raw";
private static final String yqmSelect = private static final String yqmSelect =
"( TRUNC(%2$s, 'MONTH') + NUMTOYMINTERVAL(%1$s, 'MONTH') + ( LEAST( EXTRACT( DAY FROM %2$s ), EXTRACT( DAY FROM LAST_DAY( TRUNC(%2$s, 'MONTH') + NUMTOYMINTERVAL(%1$s, 'MONTH') ) ) ) - 1 ) )"; "(trunc(%2$s, 'MONTH') + numtoyminterval(%1$s, 'MONTH') + (least(extract(day from %2$s), extract(day from last_day(trunc(%2$s, 'MONTH') + numtoyminterval(%1$s, 'MONTH')))) - 1))";
private static final String ADD_YEAR_EXPRESSION = String.format( yqmSelect, "?2*12", "?3" ); private static final String ADD_YEAR_EXPRESSION = String.format( yqmSelect, "?2*12", "?3" );
private static final String ADD_QUARTER_EXPRESSION = String.format( yqmSelect, "?2*3", "?3" ); private static final String ADD_QUARTER_EXPRESSION = String.format( yqmSelect, "?2*3", "?3" );
@ -220,6 +228,7 @@ public class OracleLegacyDialect extends Dialect {
functionFactory.addMonths(); functionFactory.addMonths();
functionFactory.monthsBetween(); functionFactory.monthsBetween();
functionFactory.everyAny_minMaxCase(); functionFactory.everyAny_minMaxCase();
functionFactory.repeat_rpad();
functionFactory.radians_acos(); functionFactory.radians_acos();
functionFactory.degrees_acos(); functionFactory.degrees_acos();
@ -273,6 +282,9 @@ public class OracleLegacyDialect extends Dialect {
new OracleTruncFunction( functionContributions.getTypeConfiguration() ) new OracleTruncFunction( functionContributions.getTypeConfiguration() )
); );
functionContributions.getFunctionRegistry().registerAlternateKey( "truncate", "trunc" ); functionContributions.getFunctionRegistry().registerAlternateKey( "truncate", "trunc" );
functionFactory.array_oracle();
functionFactory.arrayAggregate_jsonArrayagg();
} }
@Override @Override
@ -625,6 +637,10 @@ public class OracleLegacyDialect extends Dialect {
case REAL: case REAL:
// Oracle's 'real' type is actually double precision // Oracle's 'real' type is actually double precision
return "float(24)"; return "float(24)";
case DOUBLE:
// Oracle's 'double precision' means float(126), and
// we never need 126 bits (38 decimal digits)
return "float(53)";
case NUMERIC: case NUMERIC:
case DECIMAL: case DECIMAL:
@ -670,6 +686,9 @@ public class OracleLegacyDialect extends Dialect {
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( JSON, "blob", this ) ); ddlTypeRegistry.addDescriptor( new DdlTypeImpl( JSON, "blob", this ) );
} }
} }
ddlTypeRegistry.addDescriptor( new ArrayDdlTypeImpl( this, false ) );
ddlTypeRegistry.addDescriptor( TABLE, new ArrayDdlTypeImpl( this, false ) );
} }
@Override @Override
@ -736,13 +755,20 @@ public class OracleLegacyDialect extends Dialect {
} }
break; break;
case Types.NUMERIC: case Types.NUMERIC:
if ( scale == -127 ) { if ( precision > 8 // precision of 0 means something funny
// For some reason, the Oracle JDBC driver reports FLOAT // For some reason, the Oracle JDBC driver reports
// as NUMERIC with scale -127 // FLOAT or DOUBLE as NUMERIC with scale -127
if ( precision <= getFloatPrecision() ) { // (but note that expressions with unknown type
return jdbcTypeRegistry.getDescriptor( Types.FLOAT ); // also get reported this way, so take care)
&& scale == -127 ) {
if ( precision <= 24 ) {
// Can be represented as a Java float
return jdbcTypeRegistry.getDescriptor( FLOAT );
}
else if ( precision <= 53 ) {
// Can be represented as a Java double
return jdbcTypeRegistry.getDescriptor( DOUBLE );
} }
return jdbcTypeRegistry.getDescriptor( Types.DOUBLE );
} }
//intentional fall-through: //intentional fall-through:
case Types.DECIMAL: case Types.DECIMAL:
@ -825,6 +851,7 @@ public class OracleLegacyDialect extends Dialect {
if ( OracleJdbcHelper.isUsable( serviceRegistry ) ) { if ( OracleJdbcHelper.isUsable( serviceRegistry ) ) {
typeContributions.contributeJdbcTypeConstructor( OracleJdbcHelper.getArrayJdbcTypeConstructor( serviceRegistry ) ); typeContributions.contributeJdbcTypeConstructor( OracleJdbcHelper.getArrayJdbcTypeConstructor( serviceRegistry ) );
typeContributions.contributeJdbcTypeConstructor( OracleJdbcHelper.getNestedTableJdbcTypeConstructor( serviceRegistry ) );
} }
else { else {
typeContributions.contributeJdbcType( OracleReflectionStructJdbcType.INSTANCE ); typeContributions.contributeJdbcType( OracleReflectionStructJdbcType.INSTANCE );

View File

@ -9,6 +9,7 @@ package org.hibernate.community.dialect;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.hibernate.dialect.OracleArrayJdbcType;
import org.hibernate.engine.spi.SessionFactoryImplementor; import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.internal.util.collections.Stack; import org.hibernate.internal.util.collections.Stack;
import org.hibernate.metamodel.mapping.EmbeddableValuedModelPart; import org.hibernate.metamodel.mapping.EmbeddableValuedModelPart;
@ -53,6 +54,7 @@ import org.hibernate.sql.ast.tree.update.Assignment;
import org.hibernate.sql.exec.spi.JdbcOperation; import org.hibernate.sql.exec.spi.JdbcOperation;
import org.hibernate.sql.results.internal.SqlSelectionImpl; import org.hibernate.sql.results.internal.SqlSelectionImpl;
import org.hibernate.type.SqlTypes; import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.jdbc.JdbcType;
/** /**
* A SQL AST translator for Oracle. * A SQL AST translator for Oracle.
@ -454,7 +456,8 @@ public class OracleLegacySqlAstTranslator<T extends JdbcOperation> extends Abstr
renderComparisonEmulateDecode( lhs, operator, rhs ); renderComparisonEmulateDecode( lhs, operator, rhs );
return; return;
} }
switch ( lhsExpressionType.getSingleJdbcMapping().getJdbcType().getDdlTypeCode() ) { final JdbcType jdbcType = lhsExpressionType.getSingleJdbcMapping().getJdbcType();
switch ( jdbcType.getDdlTypeCode() ) {
case SqlTypes.SQLXML: case SqlTypes.SQLXML:
// In Oracle, XMLTYPE is not "comparable", so we have to use the xmldiff function for this purpose // In Oracle, XMLTYPE is not "comparable", so we have to use the xmldiff function for this purpose
switch ( operator ) { switch ( operator ) {
@ -499,25 +502,51 @@ public class OracleLegacySqlAstTranslator<T extends JdbcOperation> extends Abstr
appendSql( ')' ); appendSql( ')' );
break; break;
case SqlTypes.ARRAY: case SqlTypes.ARRAY:
final String arrayTypeName = ( (OracleArrayJdbcType) jdbcType ).getTypeName();
switch ( operator ) { switch ( operator ) {
case DISTINCT_FROM: case DISTINCT_FROM:
appendSql( "decode(" );
arrayToString( lhs );
appendSql( ',' );
arrayToString( rhs );
appendSql( ",0,1)=1" );
break;
case NOT_DISTINCT_FROM: case NOT_DISTINCT_FROM:
appendSql( "decode(" ); appendSql( arrayTypeName );
arrayToString( lhs ); appendSql( "_distinct(" );
visitSqlSelectExpression( lhs );
appendSql( ',' ); appendSql( ',' );
arrayToString( rhs ); visitSqlSelectExpression( rhs );
appendSql( ",0,1)=0" ); appendSql( ")" );
break; break;
default: default:
arrayToString( lhs ); appendSql( arrayTypeName );
appendSql( operator.sqlText() ); appendSql( "_cmp(" );
arrayToString( rhs ); visitSqlSelectExpression( lhs );
appendSql( ',' );
visitSqlSelectExpression( rhs );
appendSql( ")" );
break;
}
switch ( operator ) {
case DISTINCT_FROM:
appendSql( "=1" );
break;
case NOT_DISTINCT_FROM:
appendSql( "=0" );
break;
case EQUAL:
appendSql( "=0" );
break;
case NOT_EQUAL:
appendSql( "<>0" );
break;
case LESS_THAN:
appendSql( "=-1" );
break;
case GREATER_THAN:
appendSql( "=1" );
break;
case LESS_THAN_OR_EQUAL:
appendSql( "<=0" );
break;
case GREATER_THAN_OR_EQUAL:
appendSql( ">=0" );
break;
} }
break; break;
default: default:
@ -526,19 +555,6 @@ public class OracleLegacySqlAstTranslator<T extends JdbcOperation> extends Abstr
} }
} }
private void arrayToString(Expression expression) {
appendSql("case when ");
expression.accept( this );
appendSql(" is not null then (select listagg(column_value||',')");
if ( !getDialect().getVersion().isSameOrAfter( 18 ) ) {
// The within group clause became optional in 18
appendSql(" within group(order by rownum)");
}
appendSql("||';' from table(");
expression.accept( this );
appendSql(")) else null end");
}
@Override @Override
protected void renderSelectTupleComparison( protected void renderSelectTupleComparison(
List<SqlSelection> lhsExpressions, List<SqlSelection> lhsExpressions,

View File

@ -84,6 +84,7 @@ import org.hibernate.type.descriptor.jdbc.ObjectNullAsBinaryTypeJdbcType;
import org.hibernate.type.descriptor.jdbc.UUIDJdbcType; import org.hibernate.type.descriptor.jdbc.UUIDJdbcType;
import org.hibernate.type.descriptor.jdbc.XmlJdbcType; import org.hibernate.type.descriptor.jdbc.XmlJdbcType;
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry; import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
import org.hibernate.type.descriptor.sql.internal.ArrayDdlTypeImpl;
import org.hibernate.type.descriptor.sql.internal.CapacityDependentDdlType; import org.hibernate.type.descriptor.sql.internal.CapacityDependentDdlType;
import org.hibernate.type.descriptor.sql.internal.DdlTypeImpl; import org.hibernate.type.descriptor.sql.internal.DdlTypeImpl;
import org.hibernate.type.descriptor.sql.internal.Scale6IntervalSecondDdlType; import org.hibernate.type.descriptor.sql.internal.Scale6IntervalSecondDdlType;
@ -228,6 +229,9 @@ public class PostgreSQLLegacyDialect extends Dialect {
super.registerColumnTypes( typeContributions, serviceRegistry ); super.registerColumnTypes( typeContributions, serviceRegistry );
final DdlTypeRegistry ddlTypeRegistry = typeContributions.getTypeConfiguration().getDdlTypeRegistry(); final DdlTypeRegistry ddlTypeRegistry = typeContributions.getTypeConfiguration().getDdlTypeRegistry();
// We need to configure that the array type uses the raw element type for casts
ddlTypeRegistry.addDescriptor( new ArrayDdlTypeImpl( this, true ) );
// Register this type to be able to support Float[] // Register this type to be able to support Float[]
// The issue is that the JDBC driver can't handle createArrayOf( "float(24)", ... ) // The issue is that the JDBC driver can't handle createArrayOf( "float(24)", ... )
// It requires the use of "real" or "float4" // It requires the use of "real" or "float4"
@ -577,6 +581,8 @@ public class PostgreSQLLegacyDialect extends Dialect {
functionFactory.locate_positionSubstring(); functionFactory.locate_positionSubstring();
functionFactory.windowFunctions(); functionFactory.windowFunctions();
functionFactory.listagg_stringAgg( "varchar" ); functionFactory.listagg_stringAgg( "varchar" );
functionFactory.array_casting();
functionFactory.arrayAggregate();
if ( getVersion().isSameOrAfter( 9, 4 ) ) { if ( getVersion().isSameOrAfter( 9, 4 ) ) {
functionFactory.makeDateTimeTimestamp(); functionFactory.makeDateTimeTimestamp();

View File

@ -448,6 +448,8 @@ public class CockroachDialect extends Dialect {
functionFactory.listagg_stringAgg( "string" ); functionFactory.listagg_stringAgg( "string" );
functionFactory.inverseDistributionOrderedSetAggregates(); functionFactory.inverseDistributionOrderedSetAggregates();
functionFactory.hypotheticalOrderedSetAggregates_windowEmulation(); functionFactory.hypotheticalOrderedSetAggregates_windowEmulation();
functionFactory.array_casting();
functionFactory.arrayAggregate();
functionContributions.getFunctionRegistry().register( functionContributions.getFunctionRegistry().register(
"trunc", "trunc",

View File

@ -37,6 +37,7 @@ import org.hibernate.dialect.sequence.DB2SequenceSupport;
import org.hibernate.dialect.sequence.SequenceSupport; import org.hibernate.dialect.sequence.SequenceSupport;
import org.hibernate.dialect.unique.AlterTableUniqueIndexDelegate; import org.hibernate.dialect.unique.AlterTableUniqueIndexDelegate;
import org.hibernate.dialect.unique.UniqueDelegate; import org.hibernate.dialect.unique.UniqueDelegate;
import org.hibernate.engine.jdbc.Size;
import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo; import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo;
import org.hibernate.engine.jdbc.env.spi.IdentifierHelper; import org.hibernate.engine.jdbc.env.spi.IdentifierHelper;
import org.hibernate.engine.jdbc.env.spi.IdentifierHelperBuilder; import org.hibernate.engine.jdbc.env.spi.IdentifierHelperBuilder;
@ -249,7 +250,8 @@ public class DB2Dialect extends Dialect {
public void initializeFunctionRegistry(FunctionContributions functionContributions) { public void initializeFunctionRegistry(FunctionContributions functionContributions) {
super.initializeFunctionRegistry(functionContributions); super.initializeFunctionRegistry(functionContributions);
CommonFunctionFactory functionFactory = new CommonFunctionFactory(functionContributions); final DdlTypeRegistry ddlTypeRegistry = functionContributions.getTypeConfiguration().getDdlTypeRegistry();
final CommonFunctionFactory functionFactory = new CommonFunctionFactory(functionContributions);
// AVG by default uses the input type, so we possibly need to cast the argument type, hence a special function // AVG by default uses the input type, so we possibly need to cast the argument type, hence a special function
functionFactory.avg_castingNonDoubleArguments( this, SqlAstNodeRenderingMode.DEFAULT ); functionFactory.avg_castingNonDoubleArguments( this, SqlAstNodeRenderingMode.DEFAULT );
@ -351,14 +353,13 @@ public class DB2Dialect extends Dialect {
functionContributions.getTypeConfiguration(), functionContributions.getTypeConfiguration(),
SqlAstNodeRenderingMode.DEFAULT, SqlAstNodeRenderingMode.DEFAULT,
"||", "||",
functionContributions.getTypeConfiguration().getDdlTypeRegistry().getDescriptor( VARCHAR ) ddlTypeRegistry.getDescriptor( VARCHAR )
.getCastTypeName( .getCastTypeName(
Size.nil(),
functionContributions.getTypeConfiguration() functionContributions.getTypeConfiguration()
.getBasicTypeRegistry() .getBasicTypeRegistry()
.resolve( StandardBasicTypes.STRING ), .resolve( StandardBasicTypes.STRING ),
null, ddlTypeRegistry
null,
null
), ),
true true
) )

View File

@ -30,6 +30,7 @@ import org.hibernate.dialect.temptable.TemporaryTable;
import org.hibernate.dialect.temptable.TemporaryTableKind; import org.hibernate.dialect.temptable.TemporaryTableKind;
import org.hibernate.dialect.unique.CreateTableUniqueDelegate; import org.hibernate.dialect.unique.CreateTableUniqueDelegate;
import org.hibernate.dialect.unique.UniqueDelegate; import org.hibernate.dialect.unique.UniqueDelegate;
import org.hibernate.engine.jdbc.Size;
import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo; import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo;
import org.hibernate.engine.jdbc.env.spi.IdentifierHelper; import org.hibernate.engine.jdbc.env.spi.IdentifierHelper;
import org.hibernate.engine.jdbc.env.spi.IdentifierHelperBuilder; import org.hibernate.engine.jdbc.env.spi.IdentifierHelperBuilder;
@ -309,8 +310,8 @@ public class DerbyDialect extends Dialect {
final BasicTypeRegistry basicTypeRegistry = functionContributions.getTypeConfiguration().getBasicTypeRegistry(); final BasicTypeRegistry basicTypeRegistry = functionContributions.getTypeConfiguration().getBasicTypeRegistry();
final BasicType<String> stringType = basicTypeRegistry.resolve( StandardBasicTypes.STRING ); final BasicType<String> stringType = basicTypeRegistry.resolve( StandardBasicTypes.STRING );
final DdlTypeRegistry ddlTypeRegistry = functionContributions.getTypeConfiguration().getDdlTypeRegistry();
CommonFunctionFactory functionFactory = new CommonFunctionFactory(functionContributions); final CommonFunctionFactory functionFactory = new CommonFunctionFactory(functionContributions);
// Derby needs an actual argument type for aggregates like SUM, AVG, MIN, MAX to determine the result type // Derby needs an actual argument type for aggregates like SUM, AVG, MIN, MAX to determine the result type
functionFactory.aggregates( this, SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER ); functionFactory.aggregates( this, SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER );
@ -321,8 +322,8 @@ public class DerbyDialect extends Dialect {
functionContributions.getTypeConfiguration(), functionContributions.getTypeConfiguration(),
SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER, SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER,
"||", "||",
functionContributions.getTypeConfiguration().getDdlTypeRegistry().getDescriptor( VARCHAR ) ddlTypeRegistry.getDescriptor( VARCHAR )
.getCastTypeName( stringType, null, null, null ), .getCastTypeName( Size.nil(), stringType, ddlTypeRegistry ),
true true
) )
); );

View File

@ -444,7 +444,7 @@ public abstract class Dialect implements ConversionContext, TypeContributor, Fun
ddlTypeRegistry.addDescriptor( simpleSqlType( LONG32VARBINARY ) ); ddlTypeRegistry.addDescriptor( simpleSqlType( LONG32VARBINARY ) );
if ( supportsStandardArrays() ) { if ( supportsStandardArrays() ) {
ddlTypeRegistry.addDescriptor( new ArrayDdlTypeImpl( this ) ); ddlTypeRegistry.addDescriptor( new ArrayDdlTypeImpl( this, false ) );
} }
if ( rowId( null ) != null ) { if ( rowId( null ) != null ) {
ddlTypeRegistry.addDescriptor( simpleSqlType( ROWID ) ); ddlTypeRegistry.addDescriptor( simpleSqlType( ROWID ) );

View File

@ -310,6 +310,8 @@ public class H2Dialect extends Dialect {
functionFactory.listagg( null ); functionFactory.listagg( null );
functionFactory.inverseDistributionOrderedSetAggregates(); functionFactory.inverseDistributionOrderedSetAggregates();
functionFactory.hypotheticalOrderedSetAggregates(); functionFactory.hypotheticalOrderedSetAggregates();
functionFactory.array();
functionFactory.arrayAggregate();
} }
@Override @Override

View File

@ -187,6 +187,8 @@ public class HSQLDialect extends Dialect {
// from v. 2.2.0 ROWNUM() is supported in all modes as the equivalent of Oracle ROWNUM // from v. 2.2.0 ROWNUM() is supported in all modes as the equivalent of Oracle ROWNUM
functionFactory.rownum(); functionFactory.rownum();
functionFactory.listagg_groupConcat(); functionFactory.listagg_groupConcat();
functionFactory.array();
functionFactory.arrayAggregate();
} }
@Override @Override

View File

@ -61,6 +61,10 @@ public class OracleArrayJdbcType implements JdbcType {
return elementJdbcType; return elementJdbcType;
} }
public String getTypeName() {
return typeName;
}
@Override @Override
public <T> JavaType<T> getJdbcRecommendedJavaTypeMapping( public <T> JavaType<T> getJdbcRecommendedJavaTypeMapping(
Integer precision, Integer precision,
@ -184,7 +188,7 @@ public class OracleArrayJdbcType implements JdbcType {
final Dialect dialect = database.getDialect(); final Dialect dialect = database.getDialect();
final BasicPluralJavaType<?> pluralJavaType = (BasicPluralJavaType<?>) javaType; final BasicPluralJavaType<?> pluralJavaType = (BasicPluralJavaType<?>) javaType;
final JavaType<?> elementJavaType = pluralJavaType.getElementJavaType(); final JavaType<?> elementJavaType = pluralJavaType.getElementJavaType();
final String elementTypeName = typeName==null ? getTypeName( elementJavaType, dialect ) : typeName; final String arrayTypeName = typeName == null ? getTypeName( elementJavaType, dialect ) : typeName;
final String elementType = final String elementType =
typeConfiguration.getDdlTypeRegistry().getTypeName( typeConfiguration.getDdlTypeRegistry().getTypeName(
getElementJdbcType().getDdlTypeCode(), getElementJdbcType().getDdlTypeCode(),
@ -200,25 +204,59 @@ public class OracleArrayJdbcType implements JdbcType {
int arrayLength = columnSize.getArrayLength() == null ? 127 : columnSize.getArrayLength(); int arrayLength = columnSize.getArrayLength() == null ? 127 : columnSize.getArrayLength();
database.addAuxiliaryDatabaseObject( database.addAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject( new NamedAuxiliaryDatabaseObject(
elementTypeName, arrayTypeName,
database.getDefaultNamespace(), database.getDefaultNamespace(),
getCreateArrayTypeCommand( elementTypeName, arrayLength, elementType ), new String[]{
getDropArrayTypeCommand( elementTypeName ), "create or replace type " + arrayTypeName
+ " as varying array(" + arrayLength + ") of " + elementType
},
new String[] { "drop type " + arrayTypeName + " force" },
emptySet(), emptySet(),
true true
) )
); );
} database.addAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
String[] getCreateArrayTypeCommand(String elementTypeName, int length, String elementType) { arrayTypeName + "_cmp",
return new String[]{ database.getDefaultNamespace(),
"create or replace type " + elementTypeName new String[]{
+ " as varying array(" + length + ") of " + elementType "create or replace function " + arrayTypeName + "_cmp(a in " + arrayTypeName +
}; ", b in " + arrayTypeName + ") return number deterministic is begin " +
} "if a is null or b is null then return null; end if; " +
"for i in 1 .. least(a.count,b.count) loop " +
String[] getDropArrayTypeCommand(String elementTypeName) { "if a(i) is null or b(i) is null then return null;" +
return EMPTY_STRING_ARRAY; //new String[] { "drop type " + elementTypeName + " force" }; "elsif a(i)>b(i) then return 1;" +
"elsif a(i)<b(i) then return -1; " +
"end if; " +
"end loop; " +
"if a.count=b.count then return 0; elsif a.count>b.count then return 1; else return -1; end if; " +
"end;"
},
new String[] { "drop function " + arrayTypeName + "_cmp" },
emptySet(),
false
)
);
database.addAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
arrayTypeName + "_distinct",
database.getDefaultNamespace(),
new String[]{
"create or replace function " + arrayTypeName + "_distinct(a in " + arrayTypeName +
", b in " + arrayTypeName + ") return number deterministic is begin " +
"if a is null and b is null then return 0; end if; " +
"if a is null or b is null or a.count <> b.count then return 1; end if; " +
"for i in 1 .. a.count loop " +
"if (a(i) is null)<>(b(i) is null) or a(i)<>b(i) then return 1; end if; " +
"end loop; " +
"return 0; " +
"end;"
},
new String[] { "drop function " + arrayTypeName + "_distinct" },
emptySet(),
false
)
);
} }
@Override @Override

View File

@ -27,7 +27,7 @@ public class OracleArrayJdbcTypeConstructor implements JdbcTypeConstructor {
TypeConfiguration typeConfiguration, TypeConfiguration typeConfiguration,
Dialect dialect, BasicType<?> elementType, Dialect dialect, BasicType<?> elementType,
ColumnTypeInformation columnTypeInformation) { ColumnTypeInformation columnTypeInformation) {
String typeName = columnTypeInformation.getTypeName(); String typeName = columnTypeInformation == null ? null : columnTypeInformation.getTypeName();
if ( typeName == null || typeName.isBlank() ) { if ( typeName == null || typeName.isBlank() ) {
typeName = OracleArrayJdbcType.getTypeName( elementType.getJavaTypeDescriptor(), dialect ); typeName = OracleArrayJdbcType.getTypeName( elementType.getJavaTypeDescriptor(), dialect );
} }
@ -46,7 +46,10 @@ public class OracleArrayJdbcTypeConstructor implements JdbcTypeConstructor {
JdbcType elementType, JdbcType elementType,
ColumnTypeInformation columnTypeInformation) { ColumnTypeInformation columnTypeInformation) {
// a bit wrong, since columnTypeInformation.getTypeName() is typically null! // a bit wrong, since columnTypeInformation.getTypeName() is typically null!
return new OracleArrayJdbcType( elementType, columnTypeInformation.getTypeName() ); return new OracleArrayJdbcType(
elementType,
columnTypeInformation == null ? null : columnTypeInformation.getTypeName()
);
} }
@Override @Override

View File

@ -311,6 +311,9 @@ public class OracleDialect extends Dialect {
new OracleTruncFunction( functionContributions.getTypeConfiguration() ) new OracleTruncFunction( functionContributions.getTypeConfiguration() )
); );
functionContributions.getFunctionRegistry().registerAlternateKey( "truncate", "trunc" ); functionContributions.getFunctionRegistry().registerAlternateKey( "truncate", "trunc" );
functionFactory.array_oracle();
functionFactory.arrayAggregate_jsonArrayagg();
} }
@Override @Override
@ -708,8 +711,8 @@ public class OracleDialect extends Dialect {
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( JSON, "blob", this ) ); ddlTypeRegistry.addDescriptor( new DdlTypeImpl( JSON, "blob", this ) );
} }
ddlTypeRegistry.addDescriptor( new ArrayDdlTypeImpl( this ) ); ddlTypeRegistry.addDescriptor( new ArrayDdlTypeImpl( this, false ) );
ddlTypeRegistry.addDescriptor( TABLE, new ArrayDdlTypeImpl( this ) ); ddlTypeRegistry.addDescriptor( TABLE, new ArrayDdlTypeImpl( this, false ) );
} }
@Override @Override

View File

@ -193,7 +193,7 @@ public class OracleNestedTableJdbcType implements JdbcType {
final Dialect dialect = database.getDialect(); final Dialect dialect = database.getDialect();
final BasicPluralJavaType<?> pluralJavaType = (BasicPluralJavaType<?>) javaType; final BasicPluralJavaType<?> pluralJavaType = (BasicPluralJavaType<?>) javaType;
final JavaType<?> elementJavaType = pluralJavaType.getElementJavaType(); final JavaType<?> elementJavaType = pluralJavaType.getElementJavaType();
final String elementTypeName = typeName==null ? getTypeName( elementJavaType, dialect ) : typeName; final String arrayTypeName = typeName==null ? getTypeName( elementJavaType, dialect ) : typeName;
final String elementType = final String elementType =
typeConfiguration.getDdlTypeRegistry().getTypeName( typeConfiguration.getDdlTypeRegistry().getTypeName(
getElementJdbcType().getDdlTypeCode(), getElementJdbcType().getDdlTypeCode(),
@ -208,27 +208,19 @@ public class OracleNestedTableJdbcType implements JdbcType {
); );
database.addAuxiliaryDatabaseObject( database.addAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject( new NamedAuxiliaryDatabaseObject(
elementTypeName, arrayTypeName,
database.getDefaultNamespace(), database.getDefaultNamespace(),
getCreateArrayTypeCommand( elementTypeName, elementType ), new String[]{
getDropArrayTypeCommand( elementTypeName ), "create or replace type " + arrayTypeName
+ " as table of " + elementType
},
new String[] { "drop type " + arrayTypeName + " force" },
emptySet(), emptySet(),
true true
) )
); );
} }
String[] getCreateArrayTypeCommand(String elementTypeName, String elementType) {
return new String[]{
"create or replace type " + elementTypeName
+ " as table of " + elementType
};
}
String[] getDropArrayTypeCommand(String elementTypeName) {
return EMPTY_STRING_ARRAY; //new String[] { "drop type " + elementTypeName + " force" };
}
@Override @Override
public String getExtraCreateTableInfo(JavaType<?> javaType, String columnName, String tableName, Database database) { public String getExtraCreateTableInfo(JavaType<?> javaType, String columnName, String tableName, Database database) {
final Dialect dialect = database.getDialect(); final Dialect dialect = database.getDialect();

View File

@ -26,7 +26,7 @@ public class OracleNestedTableJdbcTypeConstructor implements JdbcTypeConstructor
TypeConfiguration typeConfiguration, TypeConfiguration typeConfiguration,
Dialect dialect, BasicType<?> elementType, Dialect dialect, BasicType<?> elementType,
ColumnTypeInformation columnTypeInformation) { ColumnTypeInformation columnTypeInformation) {
String typeName = columnTypeInformation.getTypeName(); String typeName = columnTypeInformation == null ? null : columnTypeInformation.getTypeName();
if ( typeName == null || typeName.isBlank() ) { if ( typeName == null || typeName.isBlank() ) {
typeName = OracleArrayJdbcType.getTypeName( elementType.getJavaTypeDescriptor(), dialect ); typeName = OracleArrayJdbcType.getTypeName( elementType.getJavaTypeDescriptor(), dialect );
} }
@ -40,7 +40,10 @@ public class OracleNestedTableJdbcTypeConstructor implements JdbcTypeConstructor
JdbcType elementType, JdbcType elementType,
ColumnTypeInformation columnTypeInformation) { ColumnTypeInformation columnTypeInformation) {
// a bit wrong, since columnTypeInformation.getTypeName() is typically null! // a bit wrong, since columnTypeInformation.getTypeName() is typically null!
return new OracleNestedTableJdbcType( elementType, columnTypeInformation.getTypeName() ); return new OracleNestedTableJdbcType(
elementType,
columnTypeInformation == null ? null : columnTypeInformation.getTypeName()
);
} }
@Override @Override

View File

@ -19,6 +19,7 @@ import org.hibernate.query.sqm.ComparisonOperator;
import org.hibernate.query.sqm.FetchClauseType; import org.hibernate.query.sqm.FetchClauseType;
import org.hibernate.query.sqm.FrameExclusion; import org.hibernate.query.sqm.FrameExclusion;
import org.hibernate.query.sqm.FrameKind; import org.hibernate.query.sqm.FrameKind;
import org.hibernate.sql.ast.Clause;
import org.hibernate.sql.ast.spi.SqlSelection; import org.hibernate.sql.ast.spi.SqlSelection;
import org.hibernate.sql.ast.tree.Statement; import org.hibernate.sql.ast.tree.Statement;
import org.hibernate.sql.ast.tree.cte.CteMaterialization; import org.hibernate.sql.ast.tree.cte.CteMaterialization;
@ -50,6 +51,8 @@ import org.hibernate.sql.model.ast.ColumnValueBinding;
import org.hibernate.sql.model.internal.OptionalTableUpdate; import org.hibernate.sql.model.internal.OptionalTableUpdate;
import org.hibernate.sql.results.internal.SqlSelectionImpl; import org.hibernate.sql.results.internal.SqlSelectionImpl;
import org.hibernate.type.SqlTypes; import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.jdbc.ArrayJdbcType;
import org.hibernate.type.descriptor.jdbc.JdbcType;
/** /**
* A SQL AST translator for Oracle. * A SQL AST translator for Oracle.
@ -402,7 +405,8 @@ public class OracleSqlAstTranslator<T extends JdbcOperation> extends SqlAstTrans
renderComparisonEmulateDecode( lhs, operator, rhs ); renderComparisonEmulateDecode( lhs, operator, rhs );
return; return;
} }
switch ( lhsExpressionType.getSingleJdbcMapping().getJdbcType().getDdlTypeCode() ) { final JdbcType jdbcType = lhsExpressionType.getSingleJdbcMapping().getJdbcType();
switch ( jdbcType.getDdlTypeCode() ) {
case SqlTypes.SQLXML: case SqlTypes.SQLXML:
// In Oracle, XMLTYPE is not "comparable", so we have to use the xmldiff function for this purpose // In Oracle, XMLTYPE is not "comparable", so we have to use the xmldiff function for this purpose
switch ( operator ) { switch ( operator ) {
@ -447,25 +451,51 @@ public class OracleSqlAstTranslator<T extends JdbcOperation> extends SqlAstTrans
appendSql( ')' ); appendSql( ')' );
break; break;
case SqlTypes.ARRAY: case SqlTypes.ARRAY:
final String arrayTypeName = ( (OracleArrayJdbcType) jdbcType ).getTypeName();
switch ( operator ) { switch ( operator ) {
case DISTINCT_FROM: case DISTINCT_FROM:
appendSql( "decode(" );
arrayToString( lhs );
appendSql( ',' );
arrayToString( rhs );
appendSql( ",0,1)=1" );
break;
case NOT_DISTINCT_FROM: case NOT_DISTINCT_FROM:
appendSql( "decode(" ); appendSql( arrayTypeName );
arrayToString( lhs ); appendSql( "_distinct(" );
visitSqlSelectExpression( lhs );
appendSql( ',' ); appendSql( ',' );
arrayToString( rhs ); visitSqlSelectExpression( rhs );
appendSql( ",0,1)=0" ); appendSql( ")" );
break; break;
default: default:
arrayToString( lhs ); appendSql( arrayTypeName );
appendSql( operator.sqlText() ); appendSql( "_cmp(" );
arrayToString( rhs ); visitSqlSelectExpression( lhs );
appendSql( ',' );
visitSqlSelectExpression( rhs );
appendSql( ")" );
break;
}
switch ( operator ) {
case DISTINCT_FROM:
appendSql( "=1" );
break;
case NOT_DISTINCT_FROM:
appendSql( "=0" );
break;
case EQUAL:
appendSql( "=0" );
break;
case NOT_EQUAL:
appendSql( "<>0" );
break;
case LESS_THAN:
appendSql( "=-1" );
break;
case GREATER_THAN:
appendSql( "=1" );
break;
case LESS_THAN_OR_EQUAL:
appendSql( "<=0" );
break;
case GREATER_THAN_OR_EQUAL:
appendSql( ">=0" );
break;
} }
break; break;
default: default:
@ -474,15 +504,6 @@ public class OracleSqlAstTranslator<T extends JdbcOperation> extends SqlAstTrans
} }
} }
private void arrayToString(Expression expression) {
appendSql("case when ");
expression.accept( this );
appendSql(" is not null then (select listagg(column_value||',')");
appendSql("||';' from table(");
expression.accept( this );
appendSql(")) else null end");
}
@Override @Override
protected void renderSelectTupleComparison( protected void renderSelectTupleComparison(
List<SqlSelection> lhsExpressions, List<SqlSelection> lhsExpressions,

View File

@ -86,6 +86,7 @@ import org.hibernate.type.descriptor.jdbc.ObjectNullAsBinaryTypeJdbcType;
import org.hibernate.type.descriptor.jdbc.UUIDJdbcType; import org.hibernate.type.descriptor.jdbc.UUIDJdbcType;
import org.hibernate.type.descriptor.jdbc.XmlJdbcType; import org.hibernate.type.descriptor.jdbc.XmlJdbcType;
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry; import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
import org.hibernate.type.descriptor.sql.internal.ArrayDdlTypeImpl;
import org.hibernate.type.descriptor.sql.internal.CapacityDependentDdlType; import org.hibernate.type.descriptor.sql.internal.CapacityDependentDdlType;
import org.hibernate.type.descriptor.sql.internal.DdlTypeImpl; import org.hibernate.type.descriptor.sql.internal.DdlTypeImpl;
import org.hibernate.type.descriptor.sql.internal.NamedNativeEnumDdlTypeImpl; import org.hibernate.type.descriptor.sql.internal.NamedNativeEnumDdlTypeImpl;
@ -254,6 +255,9 @@ public class PostgreSQLDialect extends Dialect {
super.registerColumnTypes( typeContributions, serviceRegistry ); super.registerColumnTypes( typeContributions, serviceRegistry );
final DdlTypeRegistry ddlTypeRegistry = typeContributions.getTypeConfiguration().getDdlTypeRegistry(); final DdlTypeRegistry ddlTypeRegistry = typeContributions.getTypeConfiguration().getDdlTypeRegistry();
// We need to configure that the array type uses the raw element type for casts
ddlTypeRegistry.addDescriptor( new ArrayDdlTypeImpl( this, true ) );
// Register this type to be able to support Float[] // Register this type to be able to support Float[]
// The issue is that the JDBC driver can't handle createArrayOf( "float(24)", ... ) // The issue is that the JDBC driver can't handle createArrayOf( "float(24)", ... )
// It requires the use of "real" or "float4" // It requires the use of "real" or "float4"
@ -625,6 +629,8 @@ public class PostgreSQLDialect extends Dialect {
functionFactory.locate_positionSubstring(); functionFactory.locate_positionSubstring();
functionFactory.windowFunctions(); functionFactory.windowFunctions();
functionFactory.listagg_stringAgg( "varchar" ); functionFactory.listagg_stringAgg( "varchar" );
functionFactory.array_casting();
functionFactory.arrayAggregate();
functionFactory.makeDateTimeTimestamp(); functionFactory.makeDateTimeTimestamp();
// Note that PostgreSQL doesn't support the OVER clause for ordered set-aggregate functions // Note that PostgreSQL doesn't support the OVER clause for ordered set-aggregate functions

View File

@ -479,6 +479,7 @@ public class SpannerDialect extends Dialect {
functionFactory.listagg_stringAgg( "string" ); functionFactory.listagg_stringAgg( "string" );
functionFactory.inverseDistributionOrderedSetAggregates(); functionFactory.inverseDistributionOrderedSetAggregates();
functionFactory.hypotheticalOrderedSetAggregates(); functionFactory.hypotheticalOrderedSetAggregates();
functionFactory.array_withoutKeyword();
} }
@Override @Override

View File

@ -20,11 +20,13 @@ import org.hibernate.boot.model.relational.Namespace;
import org.hibernate.dialect.DB2Dialect; import org.hibernate.dialect.DB2Dialect;
import org.hibernate.dialect.DB2StructJdbcType; import org.hibernate.dialect.DB2StructJdbcType;
import org.hibernate.dialect.XmlHelper; import org.hibernate.dialect.XmlHelper;
import org.hibernate.engine.jdbc.Size;
import org.hibernate.mapping.AggregateColumn; import org.hibernate.mapping.AggregateColumn;
import org.hibernate.mapping.Column; import org.hibernate.mapping.Column;
import org.hibernate.metamodel.mapping.EmbeddableMappingType; import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.metamodel.mapping.SelectableMapping; import org.hibernate.metamodel.mapping.SelectableMapping;
import org.hibernate.metamodel.mapping.SelectablePath; import org.hibernate.metamodel.mapping.SelectablePath;
import org.hibernate.metamodel.mapping.SqlExpressible;
import org.hibernate.sql.ast.SqlAstNodeRenderingMode; import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
import org.hibernate.sql.ast.SqlAstTranslator; import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender; import org.hibernate.sql.ast.spi.SqlAppender;
@ -143,12 +145,14 @@ public class DB2AggregateSupport extends AggregateSupportImpl {
final DdlType ddlType = typeConfiguration.getDdlTypeRegistry().getDescriptor( final DdlType ddlType = typeConfiguration.getDdlTypeRegistry().getDescriptor(
column.getJdbcMapping().getJdbcType().getDefaultSqlTypeCode() column.getJdbcMapping().getJdbcType().getDefaultSqlTypeCode()
); );
final Size size = new Size();
size.setLength( column.getLength() );
size.setPrecision( column.getPrecision() );
size.setScale( column.getScale() );
return ddlType.getCastTypeName( return ddlType.getCastTypeName(
column.getJdbcMapping().getJdbcType(), size,
column.getJdbcMapping().getJavaTypeDescriptor(), (SqlExpressible) column.getJdbcMapping(),
column.getLength(), typeConfiguration.getDdlTypeRegistry()
column.getPrecision(),
column.getScale()
); );
} }
else{ else{

View File

@ -10,6 +10,7 @@ import java.util.Collections;
import java.util.List; import java.util.List;
import org.hibernate.dialect.Dialect; import org.hibernate.dialect.Dialect;
import org.hibernate.engine.jdbc.Size;
import org.hibernate.metamodel.mapping.JdbcMapping; import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.query.sqm.CastType; import org.hibernate.query.sqm.CastType;
import org.hibernate.query.sqm.function.AbstractSqmSelfRenderingFunctionDescriptor; import org.hibernate.query.sqm.function.AbstractSqmSelfRenderingFunctionDescriptor;
@ -56,10 +57,9 @@ public class CastingConcatFunction extends AbstractSqmSelfRenderingFunctionDescr
this.argumentRenderingMode = argumentRenderingMode; this.argumentRenderingMode = argumentRenderingMode;
this.concatArgumentCastType = typeConfiguration.getDdlTypeRegistry().getDescriptor( SqlTypes.VARCHAR ) this.concatArgumentCastType = typeConfiguration.getDdlTypeRegistry().getDescriptor( SqlTypes.VARCHAR )
.getCastTypeName( .getCastTypeName(
Size.nil(),
typeConfiguration.getBasicTypeRegistry().resolve( StandardBasicTypes.STRING ), typeConfiguration.getBasicTypeRegistry().resolve( StandardBasicTypes.STRING ),
null, typeConfiguration.getDdlTypeRegistry()
null,
null
); );
} }

View File

@ -12,6 +12,11 @@ import java.util.Arrays;
import org.hibernate.boot.model.FunctionContributions; import org.hibernate.boot.model.FunctionContributions;
import org.hibernate.dialect.Dialect; import org.hibernate.dialect.Dialect;
import org.hibernate.dialect.function.array.ArrayAggFunction;
import org.hibernate.dialect.function.array.ArrayConstructorFunction;
import org.hibernate.dialect.function.array.CastingArrayConstructorFunction;
import org.hibernate.dialect.function.array.OracleArrayAggEmulation;
import org.hibernate.dialect.function.array.OracleArrayConstructorFunction;
import org.hibernate.query.sqm.function.SqmFunctionRegistry; import org.hibernate.query.sqm.function.SqmFunctionRegistry;
import org.hibernate.query.sqm.produce.function.StandardFunctionArgumentTypeResolvers; import org.hibernate.query.sqm.produce.function.StandardFunctionArgumentTypeResolvers;
import org.hibernate.sql.ast.SqlAstNodeRenderingMode; import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
@ -2533,4 +2538,46 @@ public class CommonFunctionFactory {
.setArgumentListSignature( "(TEMPORAL_UNIT field, TEMPORAL datetime)" ) .setArgumentListSignature( "(TEMPORAL_UNIT field, TEMPORAL datetime)" )
.register(); .register();
} }
/**
* H2, HSQL array() constructor function
*/
public void array() {
functionRegistry.register( "array", new ArrayConstructorFunction( true ) );
}
/**
* CockroachDB and PostgreSQL array() constructor function
*/
public void array_casting() {
functionRegistry.register( "array", new CastingArrayConstructorFunction() );
}
/**
* Google Spanner array() constructor function
*/
public void array_withoutKeyword() {
functionRegistry.register( "array", new ArrayConstructorFunction( false ) );
}
/**
* Oracle array() constructor function
*/
public void array_oracle() {
functionRegistry.register( "array", new OracleArrayConstructorFunction() );
}
/**
* H2, HSQL, CockroachDB and PostgreSQL array_agg() function
*/
public void arrayAggregate() {
functionRegistry.register( ArrayAggFunction.FUNCTION_NAME, new ArrayAggFunction( "array_agg", false, true ) );
}
/**
* Oracle array_agg() function
*/
public void arrayAggregate_jsonArrayagg() {
functionRegistry.register( ArrayAggFunction.FUNCTION_NAME, new OracleArrayAggEmulation() );
}
} }

View File

@ -197,9 +197,7 @@ public class FormatFunction extends AbstractSqmFunctionDescriptor implements Fun
@Override @Override
public Expression convertToSqlAst(SqmToSqlAstConverter walker) { public Expression convertToSqlAst(SqmToSqlAstConverter walker) {
final List<SqlAstNode> arguments = resolveSqlAstArguments( getArguments(), walker ); final List<SqlAstNode> arguments = resolveSqlAstArguments( getArguments(), walker );
final ReturnableType<?> resultType = resolveResultType( final ReturnableType<?> resultType = resolveResultType( walker );
walker.getCreationContext().getMappingMetamodel().getTypeConfiguration()
);
final MappingModelExpressible<?> mappingModelExpressible = resultType == null ? null : getMappingModelExpressible( final MappingModelExpressible<?> mappingModelExpressible = resultType == null ? null : getMappingModelExpressible(
walker, walker,
resultType, resultType,

View File

@ -72,9 +72,7 @@ public class HypotheticalSetWindowEmulation extends HypotheticalSetFunction {
else if ( currentClause != Clause.SELECT ) { else if ( currentClause != Clause.SELECT ) {
throw new IllegalArgumentException( "Can't emulate [" + getName() + "] in clause " + currentClause + ". Only the SELECT clause is supported" ); throw new IllegalArgumentException( "Can't emulate [" + getName() + "] in clause " + currentClause + ". Only the SELECT clause is supported" );
} }
final ReturnableType<?> resultType = resolveResultType( final ReturnableType<?> resultType = resolveResultType( walker );
walker.getCreationContext().getMappingMetamodel().getTypeConfiguration()
);
List<SqlAstNode> arguments = resolveSqlAstArguments( getArguments(), walker ); List<SqlAstNode> arguments = resolveSqlAstArguments( getArguments(), walker );
ArgumentsValidator argumentsValidator = getArgumentsValidator(); ArgumentsValidator argumentsValidator = getArgumentsValidator();

View File

@ -8,6 +8,7 @@ package org.hibernate.dialect.function;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.function.Supplier;
import org.hibernate.metamodel.mapping.BasicValuedMapping; import org.hibernate.metamodel.mapping.BasicValuedMapping;
import org.hibernate.metamodel.mapping.JdbcMappingContainer; import org.hibernate.metamodel.mapping.JdbcMappingContainer;
@ -154,7 +155,9 @@ public class InverseDistributionFunction extends AbstractSqmSelfRenderingFunctio
} }
@Override @Override
protected ReturnableType<?> resolveResultType(TypeConfiguration typeConfiguration) { protected ReturnableType<?> resolveResultType(
Supplier<MappingModelExpressible<?>> inferredTypeSupplier,
TypeConfiguration typeConfiguration) {
return (ReturnableType<?>) return (ReturnableType<?>)
getWithinGroup().getSortSpecifications().get( 0 ) getWithinGroup().getSortSpecifications().get( 0 )
.getSortExpression() .getSortExpression()

View File

@ -67,9 +67,7 @@ public class InverseDistributionWindowEmulation extends InverseDistributionFunct
else if ( currentClause != Clause.SELECT ) { else if ( currentClause != Clause.SELECT ) {
throw new IllegalArgumentException( "Can't emulate [" + getName() + "] in clause " + currentClause + ". Only the SELECT clause is supported" ); throw new IllegalArgumentException( "Can't emulate [" + getName() + "] in clause " + currentClause + ". Only the SELECT clause is supported" );
} }
final ReturnableType<?> resultType = resolveResultType( final ReturnableType<?> resultType = resolveResultType( walker );
walker.getCreationContext().getMappingMetamodel().getTypeConfiguration()
);
List<SqlAstNode> arguments = resolveSqlAstArguments( getArguments(), walker ); List<SqlAstNode> arguments = resolveSqlAstArguments( getArguments(), walker );
ArgumentsValidator argumentsValidator = getArgumentsValidator(); ArgumentsValidator argumentsValidator = getArgumentsValidator();

View File

@ -0,0 +1,121 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.dialect.function.array;
import java.util.Collections;
import java.util.List;
import org.hibernate.query.sqm.function.AbstractSqmSelfRenderingFunctionDescriptor;
import org.hibernate.query.sqm.function.FunctionKind;
import org.hibernate.query.sqm.produce.function.StandardArgumentsValidators;
import org.hibernate.query.sqm.produce.function.StandardFunctionArgumentTypeResolvers;
import org.hibernate.sql.ast.Clause;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.sql.ast.tree.SqlAstNode;
import org.hibernate.sql.ast.tree.expression.Distinct;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.predicate.Predicate;
import org.hibernate.sql.ast.tree.select.SortSpecification;
/**
* @author Christian Beikov
*/
public class ArrayAggFunction extends AbstractSqmSelfRenderingFunctionDescriptor {
public static final String FUNCTION_NAME = "array_agg";
private final String functionName;
private final boolean withinGroupClause;
private final boolean supportsFilter;
public ArrayAggFunction(String functionName, boolean withinGroupClause, boolean supportsFilter) {
super(
FUNCTION_NAME,
FunctionKind.ORDERED_SET_AGGREGATE,
StandardArgumentsValidators.exactly( 1 ),
ArrayViaElementArgumentReturnTypeResolver.INSTANCE,
StandardFunctionArgumentTypeResolvers.NULL
);
this.functionName = functionName;
this.withinGroupClause = withinGroupClause;
this.supportsFilter = supportsFilter;
}
@Override
public void render(
SqlAppender sqlAppender,
List<? extends SqlAstNode> sqlAstArguments,
SqlAstTranslator<?> walker) {
render( sqlAppender, sqlAstArguments, null, Collections.emptyList(), walker );
}
@Override
public void render(
SqlAppender sqlAppender,
List<? extends SqlAstNode> sqlAstArguments,
Predicate filter,
SqlAstTranslator<?> walker) {
render( sqlAppender, sqlAstArguments, filter, Collections.emptyList(), walker );
}
@Override
public void render(
SqlAppender sqlAppender,
List<? extends SqlAstNode> sqlAstArguments,
Predicate filter,
List<SortSpecification> withinGroup,
SqlAstTranslator<?> translator) {
final boolean caseWrapper = filter != null && ( !supportsFilter || !translator.supportsFilterClause() );
sqlAppender.appendSql( functionName );
sqlAppender.appendSql( '(' );
final SqlAstNode firstArg = sqlAstArguments.get( 0 );
final Expression arg;
if ( firstArg instanceof Distinct ) {
sqlAppender.appendSql( "distinct " );
arg = ( (Distinct) firstArg ).getExpression();
}
else {
arg = (Expression) firstArg;
}
if ( caseWrapper ) {
translator.getCurrentClauseStack().push( Clause.WHERE );
sqlAppender.appendSql( "case when " );
filter.accept( translator );
sqlAppender.appendSql( " then " );
arg.accept( translator );
sqlAppender.appendSql( " else null end" );
translator.getCurrentClauseStack().pop();
}
else {
arg.accept( translator );
}
if ( withinGroup != null && !withinGroup.isEmpty() ) {
if ( withinGroupClause ) {
sqlAppender.appendSql( ')' );
sqlAppender.appendSql( " within group (" );
}
translator.getCurrentClauseStack().push( Clause.WITHIN_GROUP );
sqlAppender.appendSql( " order by " );
withinGroup.get( 0 ).accept( translator );
for ( int i = 1; i < withinGroup.size(); i++ ) {
sqlAppender.appendSql( ',' );
withinGroup.get( i ).accept( translator );
}
translator.getCurrentClauseStack().pop();
}
sqlAppender.appendSql( ')' );
if ( !caseWrapper && filter != null ) {
translator.getCurrentClauseStack().push( Clause.WHERE );
sqlAppender.appendSql( " filter (where " );
filter.accept( translator );
sqlAppender.appendSql( ')' );
translator.getCurrentClauseStack().pop();
}
}
}

View File

@ -0,0 +1,120 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.dialect.function.array;
import java.util.List;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
import org.hibernate.query.sqm.SqmExpressible;
import org.hibernate.query.sqm.function.AbstractSqmSelfRenderingFunctionDescriptor;
import org.hibernate.query.sqm.internal.TypecheckUtil;
import org.hibernate.query.sqm.produce.function.ArgumentsValidator;
import org.hibernate.query.sqm.produce.function.FunctionArgumentException;
import org.hibernate.query.sqm.produce.function.StandardFunctionArgumentTypeResolvers;
import org.hibernate.query.sqm.tree.SqmTypedNode;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.sql.ast.tree.SqlAstNode;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.type.BottomType;
import org.hibernate.type.spi.TypeConfiguration;
public class ArrayConstructorFunction extends AbstractSqmSelfRenderingFunctionDescriptor {
private final boolean withKeyword;
public ArrayConstructorFunction(boolean withKeyword) {
super(
"array",
ArrayConstructorArgumentsValidator.INSTANCE,
ArrayViaElementArgumentReturnTypeResolver.INSTANCE,
StandardFunctionArgumentTypeResolvers.NULL
);
this.withKeyword = withKeyword;
}
@Override
public void render(SqlAppender sqlAppender, List<? extends SqlAstNode> arguments, SqlAstTranslator<?> walker) {
if ( withKeyword ) {
sqlAppender.append( "array" );
}
final int size = arguments.size();
if ( size == 0 ) {
sqlAppender.append( '[' );
}
else {
char separator = '[';
for ( int i = 0; i < size; i++ ) {
SqlAstNode argument = arguments.get( i );
sqlAppender.append( separator );
argument.accept( walker );
separator = ',';
}
}
sqlAppender.append( ']' );
}
private static class ArrayConstructorArgumentsValidator implements ArgumentsValidator {
public static final ArgumentsValidator INSTANCE = new ArrayConstructorArgumentsValidator();
private ArrayConstructorArgumentsValidator() {
}
@Override
public void validate(
List<? extends SqmTypedNode<?>> arguments,
String functionName,
TypeConfiguration typeConfiguration) {
final SessionFactoryImplementor sessionFactory = typeConfiguration.getSessionFactory();
final int size = arguments.size();
SqmExpressible<?> firstType = null;
for ( int i = 0; i < size; i++ ) {
final SqmExpressible<?> argument = arguments.get( i ).getExpressible();
if ( firstType == null ) {
firstType = argument;
}
else if ( !TypecheckUtil.areTypesComparable( firstType, argument, sessionFactory ) ) {
throw new FunctionArgumentException(
String.format(
"All array arguments must have a compatible type compatible to the first argument type [%s], but argument %d has type '%s'",
firstType.getTypeName(),
i + 1,
argument.getTypeName()
)
);
}
}
}
@Override
public void validateSqlTypes(List<? extends SqlAstNode> arguments, String functionName) {
final int size = arguments.size();
JdbcMappingContainer firstType = null;
for ( int i = 0; i < size; i++ ) {
final JdbcMappingContainer argumentType = ( (Expression) arguments.get( i ) ).getExpressionType();
if ( argumentType != null && !( argumentType instanceof BottomType ) ) {
if ( firstType == null ) {
firstType = argumentType;
}
else if ( firstType != argumentType ) {
throw new FunctionArgumentException(
String.format(
"All array arguments must have a type compatible to the first argument type [%s], but argument %d has type '%s'",
firstType,
i + 1,
argumentType
)
);
}
}
}
}
}
}

View File

@ -0,0 +1,85 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.dialect.function.array;
import java.lang.reflect.Array;
import java.util.List;
import java.util.function.Supplier;
import org.hibernate.dialect.Dialect;
import org.hibernate.metamodel.mapping.BasicValuedMapping;
import org.hibernate.metamodel.mapping.MappingModelExpressible;
import org.hibernate.metamodel.model.domain.DomainType;
import org.hibernate.query.ReturnableType;
import org.hibernate.query.sqm.produce.function.FunctionReturnTypeResolver;
import org.hibernate.query.sqm.tree.SqmTypedNode;
import org.hibernate.sql.ast.tree.SqlAstNode;
import org.hibernate.type.BasicType;
import org.hibernate.type.descriptor.java.BasicPluralJavaType;
import org.hibernate.type.spi.TypeConfiguration;
/**
* A {@link FunctionReturnTypeResolver} that resolves an array type based on the arguments,
* which are supposed to be of the element type. The inferred type and implied type have precedence though.
*/
public class ArrayViaElementArgumentReturnTypeResolver implements FunctionReturnTypeResolver {
public static final FunctionReturnTypeResolver INSTANCE = new ArrayViaElementArgumentReturnTypeResolver();
private ArrayViaElementArgumentReturnTypeResolver() {
}
@Override
public ReturnableType<?> resolveFunctionReturnType(
ReturnableType<?> impliedType,
Supplier<MappingModelExpressible<?>> inferredTypeSupplier,
List<? extends SqmTypedNode<?>> arguments,
TypeConfiguration typeConfiguration) {
final MappingModelExpressible<?> inferredType = inferredTypeSupplier.get();
if ( inferredType != null ) {
if ( inferredType instanceof ReturnableType<?> ) {
return (ReturnableType<?>) inferredType;
}
else if ( inferredType instanceof BasicValuedMapping ) {
return (ReturnableType<?>) ( (BasicValuedMapping) inferredType ).getJdbcMapping();
}
}
if ( impliedType != null ) {
return impliedType;
}
for ( SqmTypedNode<?> argument : arguments ) {
final DomainType<?> sqmType = argument.getExpressible().getSqmType();
if ( sqmType instanceof ReturnableType<?> ) {
return resolveArrayType( sqmType, typeConfiguration );
}
}
return null;
}
@Override
public BasicValuedMapping resolveFunctionReturnType(
Supplier<BasicValuedMapping> impliedTypeAccess,
List<? extends SqlAstNode> arguments) {
return null;
}
@SuppressWarnings("unchecked")
public static BasicType<?> resolveArrayType(DomainType<?> elementType, TypeConfiguration typeConfiguration) {
@SuppressWarnings("unchecked") final BasicPluralJavaType<Object> arrayJavaType = (BasicPluralJavaType<Object>) typeConfiguration.getJavaTypeRegistry()
.getDescriptor(
Array.newInstance( elementType.getBindableJavaType(), 0 ).getClass()
);
final Dialect dialect = typeConfiguration.getCurrentBaseSqlTypeIndicators().getDialect();
return arrayJavaType.resolveType(
typeConfiguration,
dialect,
(BasicType<Object>) elementType,
null,
typeConfiguration.getCurrentBaseSqlTypeIndicators()
);
}
}

View File

@ -0,0 +1,128 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.dialect.function.array;
import java.util.List;
import org.hibernate.engine.jdbc.Size;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.metamodel.model.domain.DomainType;
import org.hibernate.query.ReturnableType;
import org.hibernate.query.spi.QueryEngine;
import org.hibernate.query.sqm.NodeBuilder;
import org.hibernate.query.sqm.function.FunctionRenderingSupport;
import org.hibernate.query.sqm.function.SelfRenderingFunctionSqlAstExpression;
import org.hibernate.query.sqm.function.SelfRenderingSqmFunction;
import org.hibernate.query.sqm.produce.function.ArgumentsValidator;
import org.hibernate.query.sqm.produce.function.FunctionReturnTypeResolver;
import org.hibernate.query.sqm.sql.SqmToSqlAstConverter;
import org.hibernate.query.sqm.tree.SqmTypedNode;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.sql.ast.tree.SqlAstNode;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.type.BasicPluralType;
import org.hibernate.type.BasicType;
import org.hibernate.type.descriptor.sql.DdlType;
import org.hibernate.type.descriptor.sql.spi.DdlTypeRegistry;
import org.hibernate.type.spi.TypeConfiguration;
/**
* Special array constructor function that also applies a cast to the array literal,
* based on the inferred result type. PostgreSQL needs this,
* because by default it assumes a {@code text[]}, which is not compatible with {@code varchar[]}.
*/
public class CastingArrayConstructorFunction extends ArrayConstructorFunction {
public CastingArrayConstructorFunction() {
super( true );
}
@Override
protected <T> SelfRenderingSqmFunction<T> generateSqmFunctionExpression(
List<? extends SqmTypedNode<?>> arguments,
ReturnableType<T> impliedResultType,
QueryEngine queryEngine) {
return new ArrayConstructorSqmFunction<>(
this,
this,
arguments,
impliedResultType,
getArgumentsValidator(),
getReturnTypeResolver(),
queryEngine.getCriteriaBuilder(),
getName()
);
}
protected static class ArrayConstructorSqmFunction<T> extends SelfRenderingSqmFunction<T> {
public ArrayConstructorSqmFunction(
CastingArrayConstructorFunction descriptor,
FunctionRenderingSupport renderingSupport,
List<? extends SqmTypedNode<?>> arguments,
ReturnableType<T> impliedResultType,
ArgumentsValidator argumentsValidator,
FunctionReturnTypeResolver returnTypeResolver,
NodeBuilder nodeBuilder,
String name) {
super(
descriptor,
renderingSupport,
arguments,
impliedResultType,
argumentsValidator,
returnTypeResolver,
nodeBuilder,
name
);
}
@Override
public Expression convertToSqlAst(SqmToSqlAstConverter walker) {
final ReturnableType<?> resultType = resolveResultType( walker );
List<SqlAstNode> arguments = resolveSqlAstArguments( getArguments(), walker );
if ( getArgumentsValidator() != null ) {
getArgumentsValidator().validateSqlTypes( arguments, getFunctionName() );
}
return new SelfRenderingFunctionSqlAstExpression(
getFunctionName(),
getRenderingSupport(),
arguments,
resultType,
resultType == null ? null : getMappingModelExpressible( walker, resultType, arguments )
) {
@Override
public void renderToSql(
SqlAppender sqlAppender,
SqlAstTranslator<?> walker,
SessionFactoryImplementor sessionFactory) {
String arrayTypeName = null;
if ( resultType != null ) {
final DomainType<?> type = resultType.getSqmType();
if ( type instanceof BasicPluralType<?, ?> ) {
final BasicPluralType<?, ?> pluralType = (BasicPluralType<?, ?>) type;
final TypeConfiguration typeConfiguration = sessionFactory.getTypeConfiguration();
final DdlTypeRegistry ddlTypeRegistry = typeConfiguration.getDdlTypeRegistry();
final DdlType ddlType = ddlTypeRegistry.getDescriptor(
pluralType.getJdbcType().getDdlTypeCode()
);
arrayTypeName = ddlType.getCastTypeName( Size.nil(), pluralType, ddlTypeRegistry );
sqlAppender.append( "cast(" );
}
}
super.renderToSql( sqlAppender, walker, sessionFactory );
if ( arrayTypeName != null ) {
sqlAppender.appendSql( " as " );
sqlAppender.appendSql( arrayTypeName );
sqlAppender.appendSql( ')' );
}
}
};
}
}
}

View File

@ -0,0 +1,94 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.dialect.function.array;
import java.lang.reflect.Array;
import java.util.List;
import java.util.function.Supplier;
import org.hibernate.dialect.Dialect;
import org.hibernate.metamodel.mapping.BasicValuedMapping;
import org.hibernate.metamodel.mapping.MappingModelExpressible;
import org.hibernate.metamodel.model.domain.DomainType;
import org.hibernate.query.ReturnableType;
import org.hibernate.query.sqm.produce.function.FunctionReturnTypeResolver;
import org.hibernate.query.sqm.tree.SqmTypedNode;
import org.hibernate.sql.ast.tree.SqlAstNode;
import org.hibernate.type.BasicType;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.java.BasicPluralJavaType;
import org.hibernate.type.descriptor.jdbc.DelegatingJdbcTypeIndicators;
import org.hibernate.type.descriptor.jdbc.JdbcTypeIndicators;
import org.hibernate.type.spi.TypeConfiguration;
/**
* A {@link FunctionReturnTypeResolver} that resolves a JSON encoded array type based on the arguments,
* which are supposed to be of the element type. The inferred type and implied type have precedence though.
*/
public class JsonArrayViaElementArgumentReturnTypeResolver implements FunctionReturnTypeResolver {
public static final FunctionReturnTypeResolver INSTANCE = new JsonArrayViaElementArgumentReturnTypeResolver();
private JsonArrayViaElementArgumentReturnTypeResolver() {
}
@Override
public ReturnableType<?> resolveFunctionReturnType(
ReturnableType<?> impliedType,
Supplier<MappingModelExpressible<?>> inferredTypeSupplier,
List<? extends SqmTypedNode<?>> arguments,
TypeConfiguration typeConfiguration) {
final MappingModelExpressible<?> inferredType = inferredTypeSupplier.get();
if ( inferredType != null ) {
if ( inferredType instanceof ReturnableType<?> ) {
return (ReturnableType<?>) inferredType;
}
else if ( inferredType instanceof BasicValuedMapping ) {
return (ReturnableType<?>) ( (BasicValuedMapping) inferredType ).getJdbcMapping();
}
}
if ( impliedType != null ) {
return impliedType;
}
for ( SqmTypedNode<?> argument : arguments ) {
final DomainType<?> sqmType = argument.getExpressible().getSqmType();
if ( sqmType instanceof ReturnableType<?> ) {
return resolveJsonArrayType( sqmType, typeConfiguration );
}
}
return null;
}
@Override
public BasicValuedMapping resolveFunctionReturnType(
Supplier<BasicValuedMapping> impliedTypeAccess,
List<? extends SqlAstNode> arguments) {
return null;
}
@SuppressWarnings("unchecked")
public static BasicType<?> resolveJsonArrayType(DomainType<?> elementType, TypeConfiguration typeConfiguration) {
@SuppressWarnings("unchecked") final BasicPluralJavaType<Object> arrayJavaType = (BasicPluralJavaType<Object>) typeConfiguration.getJavaTypeRegistry()
.getDescriptor(
Array.newInstance( elementType.getBindableJavaType(), 0 ).getClass()
);
final Dialect dialect = typeConfiguration.getCurrentBaseSqlTypeIndicators().getDialect();
final JdbcTypeIndicators jdbcTypeIndicators = new DelegatingJdbcTypeIndicators( typeConfiguration.getCurrentBaseSqlTypeIndicators() ) {
@Override
public Integer getExplicitJdbcTypeCode() {
return SqlTypes.JSON;
}
};
return arrayJavaType.resolveType(
typeConfiguration,
dialect,
(BasicType<Object>) elementType,
null,
jdbcTypeIndicators
);
}
}

View File

@ -0,0 +1,342 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.dialect.function.array;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.hibernate.engine.jdbc.Size;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
import org.hibernate.metamodel.model.domain.DomainType;
import org.hibernate.query.ReturnableType;
import org.hibernate.query.SemanticException;
import org.hibernate.query.spi.QueryEngine;
import org.hibernate.query.sqm.NodeBuilder;
import org.hibernate.query.sqm.SqmExpressible;
import org.hibernate.query.sqm.function.AbstractSqmSelfRenderingFunctionDescriptor;
import org.hibernate.query.sqm.function.FunctionKind;
import org.hibernate.query.sqm.function.FunctionRenderingSupport;
import org.hibernate.query.sqm.function.SelfRenderingOrderedSetAggregateFunctionSqlAstExpression;
import org.hibernate.query.sqm.function.SelfRenderingSqmOrderedSetAggregateFunction;
import org.hibernate.query.sqm.produce.function.ArgumentsValidator;
import org.hibernate.query.sqm.produce.function.FunctionReturnTypeResolver;
import org.hibernate.query.sqm.produce.function.StandardArgumentsValidators;
import org.hibernate.query.sqm.produce.function.StandardFunctionArgumentTypeResolvers;
import org.hibernate.query.sqm.sql.SqmToSqlAstConverter;
import org.hibernate.query.sqm.tree.SqmTypedNode;
import org.hibernate.query.sqm.tree.predicate.SqmPredicate;
import org.hibernate.query.sqm.tree.select.SqmOrderByClause;
import org.hibernate.query.sqm.tree.select.SqmSortSpecification;
import org.hibernate.sql.ast.Clause;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.sql.ast.tree.SqlAstNode;
import org.hibernate.sql.ast.tree.cte.CteContainer;
import org.hibernate.sql.ast.tree.cte.CteStatement;
import org.hibernate.sql.ast.tree.cte.SelfRenderingCteObject;
import org.hibernate.sql.ast.tree.expression.Distinct;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.expression.QueryTransformer;
import org.hibernate.sql.ast.tree.predicate.Predicate;
import org.hibernate.sql.ast.tree.select.QuerySpec;
import org.hibernate.sql.ast.tree.select.SortSpecification;
import org.hibernate.type.BasicPluralType;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.sql.DdlType;
import org.hibernate.type.descriptor.sql.spi.DdlTypeRegistry;
import org.hibernate.type.spi.TypeConfiguration;
/**
* @author Christian Beikov
*/
public class OracleArrayAggEmulation extends AbstractSqmSelfRenderingFunctionDescriptor {
public static final String FUNCTION_NAME = "array_agg";
public OracleArrayAggEmulation() {
super(
FUNCTION_NAME,
FunctionKind.ORDERED_SET_AGGREGATE,
StandardArgumentsValidators.exactly( 1 ),
JsonArrayViaElementArgumentReturnTypeResolver.INSTANCE,
StandardFunctionArgumentTypeResolvers.NULL
);
}
@Override
public void render(
SqlAppender sqlAppender,
List<? extends SqlAstNode> sqlAstArguments,
SqlAstTranslator<?> walker) {
render( sqlAppender, sqlAstArguments, null, Collections.emptyList(), walker );
}
@Override
public void render(
SqlAppender sqlAppender,
List<? extends SqlAstNode> sqlAstArguments,
Predicate filter,
SqlAstTranslator<?> walker) {
render( sqlAppender, sqlAstArguments, filter, Collections.emptyList(), walker );
}
@Override
public void render(
SqlAppender sqlAppender,
List<? extends SqlAstNode> sqlAstArguments,
Predicate filter,
List<SortSpecification> withinGroup,
SqlAstTranslator<?> translator) {
sqlAppender.appendSql( "json_arrayagg" );
sqlAppender.appendSql( '(' );
final SqlAstNode firstArg = sqlAstArguments.get( 0 );
final Expression arg;
if ( firstArg instanceof Distinct ) {
sqlAppender.appendSql( "distinct " );
arg = ( (Distinct) firstArg ).getExpression();
}
else {
arg = (Expression) firstArg;
}
arg.accept( translator );
if ( withinGroup != null && !withinGroup.isEmpty() ) {
translator.getCurrentClauseStack().push( Clause.WITHIN_GROUP );
sqlAppender.appendSql( " order by " );
withinGroup.get( 0 ).accept( translator );
for ( int i = 1; i < withinGroup.size(); i++ ) {
sqlAppender.appendSql( ',' );
withinGroup.get( i ).accept( translator );
}
translator.getCurrentClauseStack().pop();
}
sqlAppender.appendSql( " null on null returning " );
sqlAppender.appendSql(
translator.getSessionFactory().getTypeConfiguration().getDdlTypeRegistry()
.getTypeName( SqlTypes.JSON, translator.getSessionFactory().getJdbcServices().getDialect() )
);
sqlAppender.appendSql( ')' );
if ( filter != null ) {
translator.getCurrentClauseStack().push( Clause.WHERE );
sqlAppender.appendSql( " filter (where " );
filter.accept( translator );
sqlAppender.appendSql( ')' );
translator.getCurrentClauseStack().pop();
}
}
@Override
public <T> SelfRenderingSqmOrderedSetAggregateFunction<T> generateSqmOrderedSetAggregateFunctionExpression(
List<? extends SqmTypedNode<?>> arguments,
SqmPredicate filter,
SqmOrderByClause withinGroupClause,
ReturnableType<T> impliedResultType,
QueryEngine queryEngine) {
return new OracleArrayAggSqmFunction<>(
this,
this,
arguments,
filter,
withinGroupClause,
impliedResultType,
getArgumentsValidator(),
getReturnTypeResolver(),
queryEngine.getCriteriaBuilder(),
getName()
);
}
protected static class OracleArrayAggSqmFunction<T> extends SelfRenderingSqmOrderedSetAggregateFunction<T> {
public OracleArrayAggSqmFunction(
OracleArrayAggEmulation descriptor,
FunctionRenderingSupport renderingSupport,
List<? extends SqmTypedNode<?>> arguments,
SqmPredicate filter,
SqmOrderByClause withinGroupClause,
ReturnableType<T> impliedResultType,
ArgumentsValidator argumentsValidator,
FunctionReturnTypeResolver returnTypeResolver,
NodeBuilder nodeBuilder,
String name) {
super(
descriptor,
renderingSupport,
arguments,
filter,
withinGroupClause,
impliedResultType,
argumentsValidator,
returnTypeResolver,
nodeBuilder,
name
);
}
@Override
protected ReturnableType<?> resolveResultType(TypeConfiguration typeConfiguration) {
return getReturnTypeResolver().resolveFunctionReturnType(
getImpliedResultType(),
() -> null,
getArguments(),
nodeBuilder().getTypeConfiguration()
);
}
@Override
public Expression convertToSqlAst(SqmToSqlAstConverter walker) {
final ReturnableType<?> resultType = resolveResultType( walker );
if ( resultType == null ) {
throw new SemanticException(
"Oracle array_agg emulation requires knowledge about the return type, but resolved return type could not be determined"
);
}
final DomainType<?> type = resultType.getSqmType();
if ( !( type instanceof BasicPluralType<?, ?> ) ) {
throw new SemanticException(
"Oracle array_agg emulation requires a basic plural return type, but resolved return type was: " + type
);
}
final BasicPluralType<?, ?> pluralType = (BasicPluralType<?, ?>) type;
if ( pluralType.getJdbcType().getDefaultSqlTypeCode() == SqlTypes.JSON ) {
// If we can return the result as JSON, we don't need further special handling
return super.convertToSqlAst( walker );
}
// If we have to return an array type, then we must apply some further magic to transform the json array
// into an array of the desired array type via a with-clause defined function
final TypeConfiguration typeConfiguration = walker.getCreationContext().getSessionFactory().getTypeConfiguration();
final DdlTypeRegistry ddlTypeRegistry = typeConfiguration.getDdlTypeRegistry();
final DdlType ddlType = ddlTypeRegistry.getDescriptor(
pluralType.getJdbcType().getDdlTypeCode()
);
final String arrayTypeName = ddlType.getCastTypeName( Size.nil(), pluralType, ddlTypeRegistry );
List<SqlAstNode> arguments = resolveSqlAstArguments( getArguments(), walker );
if ( getArgumentsValidator() != null ) {
getArgumentsValidator().validateSqlTypes( arguments, getFunctionName() );
}
List<SortSpecification> withinGroup;
if ( getWithinGroup() == null ) {
withinGroup = Collections.emptyList();
}
else {
walker.getCurrentClauseStack().push( Clause.WITHIN_GROUP );
try {
final List<SqmSortSpecification> sortSpecifications = getWithinGroup().getSortSpecifications();
withinGroup = new ArrayList<>( sortSpecifications.size() );
for ( SqmSortSpecification sortSpecification : sortSpecifications ) {
final SortSpecification specification = (SortSpecification) walker.visitSortSpecification( sortSpecification );
if ( specification != null ) {
withinGroup.add( specification );
}
}
}
finally {
walker.getCurrentClauseStack().pop();
}
}
final OracleArrayAggEmulationSqlAstExpression expression = new OracleArrayAggEmulationSqlAstExpression(
getFunctionName(),
getRenderingSupport(),
arguments,
getFilter() == null ? null : walker.visitNestedTopLevelPredicate( getFilter() ),
withinGroup,
resultType,
getMappingModelExpressible( walker, resultType, arguments ),
arrayTypeName
);
walker.registerQueryTransformer( expression );
return expression;
}
private static class OracleArrayAggEmulationSqlAstExpression
extends SelfRenderingOrderedSetAggregateFunctionSqlAstExpression
implements QueryTransformer {
private final String arrayTypeName;
private final String functionName;
public OracleArrayAggEmulationSqlAstExpression(
String functionName,
FunctionRenderingSupport renderer,
List<? extends SqlAstNode> sqlAstArguments,
Predicate filter,
List<SortSpecification> withinGroup,
ReturnableType<?> type,
JdbcMappingContainer expressible,
String arrayTypeName) {
super(
functionName,
renderer,
sqlAstArguments,
filter,
withinGroup,
type,
expressible
);
this.arrayTypeName = arrayTypeName;
this.functionName = "json_to_" + arrayTypeName;
}
@Override
public QuerySpec transform(CteContainer cteContainer, QuerySpec querySpec, SqmToSqlAstConverter converter) {
if ( cteContainer.getCteStatement( functionName ) == null ) {
cteContainer.addCteObject(
new SelfRenderingCteObject() {
@Override
public String getName() {
return functionName;
}
@Override
public void render(
SqlAppender sqlAppender,
SqlAstTranslator<?> walker,
SessionFactoryImplementor sessionFactory) {
sqlAppender.appendSql( "function " );
sqlAppender.appendSql( functionName );
sqlAppender.appendSql( "(p_json_array in " );
sqlAppender.appendSql(
sessionFactory.getTypeConfiguration().getDdlTypeRegistry()
.getTypeName(
SqlTypes.JSON,
sessionFactory.getJdbcServices().getDialect()
)
);
sqlAppender.appendSql( ") return " );
sqlAppender.appendSql( arrayTypeName );
sqlAppender.appendSql( " is v_result " );
sqlAppender.appendSql( arrayTypeName );
sqlAppender.appendSql( "; begin select t.value bulk collect into v_result " );
sqlAppender.appendSql( "from json_table(p_json_array,'$[*]' columns (value path '$')) t;" );
sqlAppender.appendSql( "return v_result; end; " );
}
}
);
}
return querySpec;
}
@Override
public void renderToSql(
SqlAppender sqlAppender,
SqlAstTranslator<?> walker,
SessionFactoryImplementor sessionFactory) {
// Oracle doesn't have an array_agg function, so we must use the collect function,
// which requires that we cast the result to the array type.
// On empty results, we require that array_agg returns null,
// but Oracle rather returns an empty collection, so we have to handle that.
// Unfortunately, nullif doesn't work with collection types,
// so we have to render a case when expression instead
sqlAppender.append( functionName );
sqlAppender.append( '(' );
super.renderToSql( sqlAppender, walker, sessionFactory );
sqlAppender.appendSql( ')' );
}
}
}
}

View File

@ -0,0 +1,137 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.dialect.function.array;
import java.util.List;
import org.hibernate.engine.jdbc.Size;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.metamodel.model.domain.DomainType;
import org.hibernate.query.ReturnableType;
import org.hibernate.query.SemanticException;
import org.hibernate.query.spi.QueryEngine;
import org.hibernate.query.sqm.NodeBuilder;
import org.hibernate.query.sqm.function.FunctionRenderingSupport;
import org.hibernate.query.sqm.function.SelfRenderingFunctionSqlAstExpression;
import org.hibernate.query.sqm.function.SelfRenderingSqmFunction;
import org.hibernate.query.sqm.produce.function.ArgumentsValidator;
import org.hibernate.query.sqm.produce.function.FunctionReturnTypeResolver;
import org.hibernate.query.sqm.sql.SqmToSqlAstConverter;
import org.hibernate.query.sqm.tree.SqmTypedNode;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.sql.ast.tree.SqlAstNode;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.type.BasicPluralType;
import org.hibernate.type.descriptor.sql.DdlType;
import org.hibernate.type.descriptor.sql.spi.DdlTypeRegistry;
import org.hibernate.type.spi.TypeConfiguration;
public class OracleArrayConstructorFunction extends ArrayConstructorFunction {
public OracleArrayConstructorFunction() {
super( false );
}
@Override
protected <T> SelfRenderingSqmFunction<T> generateSqmFunctionExpression(
List<? extends SqmTypedNode<?>> arguments,
ReturnableType<T> impliedResultType,
QueryEngine queryEngine) {
return new ArrayConstructorSqmFunction<>(
this,
this,
arguments,
impliedResultType,
getArgumentsValidator(),
getReturnTypeResolver(),
queryEngine.getCriteriaBuilder(),
getName()
);
}
protected static class ArrayConstructorSqmFunction<T> extends SelfRenderingSqmFunction<T> {
public ArrayConstructorSqmFunction(
OracleArrayConstructorFunction descriptor,
FunctionRenderingSupport renderingSupport,
List<? extends SqmTypedNode<?>> arguments,
ReturnableType<T> impliedResultType,
ArgumentsValidator argumentsValidator,
FunctionReturnTypeResolver returnTypeResolver,
NodeBuilder nodeBuilder,
String name) {
super(
descriptor,
renderingSupport,
arguments,
impliedResultType,
argumentsValidator,
returnTypeResolver,
nodeBuilder,
name
);
}
@Override
public Expression convertToSqlAst(SqmToSqlAstConverter walker) {
final ReturnableType<?> resultType = resolveResultType( walker );
List<SqlAstNode> arguments = resolveSqlAstArguments( getArguments(), walker );
if ( getArgumentsValidator() != null ) {
getArgumentsValidator().validateSqlTypes( arguments, getFunctionName() );
}
if ( resultType == null ) {
throw new SemanticException(
"Oracle array constructor emulation requires knowledge about the return type, but resolved return type could not be determined"
);
}
final DomainType<?> type = resultType.getSqmType();
if ( !( type instanceof BasicPluralType<?, ?> ) ) {
throw new SemanticException(
"Oracle array constructor emulation requires a basic plural return type, but resolved return type was: " + type
);
}
final BasicPluralType<?, ?> pluralType = (BasicPluralType<?, ?>) type;
final TypeConfiguration typeConfiguration = walker.getCreationContext().getSessionFactory().getTypeConfiguration();
final DdlTypeRegistry ddlTypeRegistry = typeConfiguration.getDdlTypeRegistry();
final DdlType ddlType = ddlTypeRegistry.getDescriptor(
pluralType.getJdbcType().getDdlTypeCode()
);
final String arrayTypeName = ddlType.getCastTypeName( Size.nil(), pluralType, ddlTypeRegistry );
return new SelfRenderingFunctionSqlAstExpression(
getFunctionName(),
getRenderingSupport(),
arguments,
resultType,
getMappingModelExpressible( walker, resultType, arguments )
) {
@Override
public void renderToSql(
SqlAppender sqlAppender,
SqlAstTranslator<?> walker,
SessionFactoryImplementor sessionFactory) {
sqlAppender.appendSql( arrayTypeName );
final List<? extends SqlAstNode> arguments = getArguments();
final int size = arguments.size();
if ( size == 0 ) {
sqlAppender.append( '(' );
}
else {
char separator = '(';
for ( int i = 0; i < size; i++ ) {
SqlAstNode argument = arguments.get( i );
sqlAppender.append( separator );
argument.accept( walker );
separator = ',';
}
}
sqlAppender.append( ')' );
}
};
}
}
}

View File

@ -0,0 +1,217 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.dialect.function.array;
import java.util.List;
import org.hibernate.engine.jdbc.Size;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.metamodel.model.domain.DomainType;
import org.hibernate.query.ReturnableType;
import org.hibernate.query.SemanticException;
import org.hibernate.query.spi.QueryEngine;
import org.hibernate.query.sqm.NodeBuilder;
import org.hibernate.query.sqm.function.FunctionRenderingSupport;
import org.hibernate.query.sqm.function.SelfRenderingFunctionSqlAstExpression;
import org.hibernate.query.sqm.function.SelfRenderingSqmOrderedSetAggregateFunction;
import org.hibernate.query.sqm.produce.function.ArgumentsValidator;
import org.hibernate.query.sqm.produce.function.FunctionReturnTypeResolver;
import org.hibernate.query.sqm.sql.SqmToSqlAstConverter;
import org.hibernate.query.sqm.tree.SqmTypedNode;
import org.hibernate.query.sqm.tree.expression.SqmDistinct;
import org.hibernate.query.sqm.tree.predicate.SqmPredicate;
import org.hibernate.query.sqm.tree.select.SqmOrderByClause;
import org.hibernate.sql.ast.Clause;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.sql.ast.tree.SqlAstNode;
import org.hibernate.sql.ast.tree.expression.Distinct;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.predicate.Predicate;
import org.hibernate.sql.ast.tree.select.SortSpecification;
import org.hibernate.type.BasicPluralType;
import org.hibernate.type.descriptor.sql.DdlType;
import org.hibernate.type.descriptor.sql.spi.DdlTypeRegistry;
import org.hibernate.type.spi.TypeConfiguration;
/**
* @author Christian Beikov
*/
public class OracleCollectArrayAggEmulation extends ArrayAggFunction {
public OracleCollectArrayAggEmulation() {
super( "collect", false, false );
}
@Override
public <T> SelfRenderingSqmOrderedSetAggregateFunction<T> generateSqmOrderedSetAggregateFunctionExpression(
List<? extends SqmTypedNode<?>> arguments,
SqmPredicate filter,
SqmOrderByClause withinGroupClause,
ReturnableType<T> impliedResultType,
QueryEngine queryEngine) {
if ( arguments.get( 0 ) instanceof SqmDistinct<?> ) {
throw new SemanticException( "Can't emulate distinct clause for Oracle array_agg emulation" );
}
if ( filter != null ) {
throw new SemanticException( "Can't emulate filter clause for Oracle array_agg emulation" );
}
return super.generateSqmOrderedSetAggregateFunctionExpression(
arguments,
filter,
withinGroupClause,
impliedResultType,
queryEngine
);
}
@Override
public void render(
SqlAppender sqlAppender,
List<? extends SqlAstNode> sqlAstArguments,
Predicate filter,
List<SortSpecification> withinGroup,
SqlAstTranslator<?> translator) {
sqlAppender.appendSql( "json_arrayagg" );
sqlAppender.appendSql( '(' );
final SqlAstNode firstArg = sqlAstArguments.get( 0 );
final Expression arg;
if ( firstArg instanceof Distinct ) {
sqlAppender.appendSql( "distinct " );
arg = ( (Distinct) firstArg ).getExpression();
}
else {
arg = (Expression) firstArg;
}
arg.accept( translator );
if ( withinGroup != null && !withinGroup.isEmpty() ) {
translator.getCurrentClauseStack().push( Clause.WITHIN_GROUP );
sqlAppender.appendSql( " order by " );
withinGroup.get( 0 ).accept( translator );
for ( int i = 1; i < withinGroup.size(); i++ ) {
sqlAppender.appendSql( ',' );
withinGroup.get( i ).accept( translator );
}
translator.getCurrentClauseStack().pop();
}
sqlAppender.appendSql( " null on null returning clob" );
sqlAppender.appendSql( ')' );
if ( filter != null ) {
translator.getCurrentClauseStack().push( Clause.WHERE );
sqlAppender.appendSql( " filter (where " );
filter.accept( translator );
sqlAppender.appendSql( ')' );
translator.getCurrentClauseStack().pop();
}
}
// @Override
// public <T> SelfRenderingSqmOrderedSetAggregateFunction<T> generateSqmOrderedSetAggregateFunctionExpression(
// List<? extends SqmTypedNode<?>> arguments,
// SqmPredicate filter,
// SqmOrderByClause withinGroupClause,
// ReturnableType<T> impliedResultType,
// QueryEngine queryEngine) {
// return new OracleArrayAggSqmFunction<>(
// this,
// this,
// arguments,
// filter,
// withinGroupClause,
// impliedResultType,
// getArgumentsValidator(),
// getReturnTypeResolver(),
// queryEngine.getCriteriaBuilder(),
// getName()
// );
// }
protected static class OracleArrayAggSqmFunction<T> extends SelfRenderingSqmOrderedSetAggregateFunction<T> {
public OracleArrayAggSqmFunction(
OracleCollectArrayAggEmulation descriptor,
FunctionRenderingSupport renderingSupport,
List<? extends SqmTypedNode<?>> arguments,
SqmPredicate filter,
SqmOrderByClause withinGroupClause,
ReturnableType<T> impliedResultType,
ArgumentsValidator argumentsValidator,
FunctionReturnTypeResolver returnTypeResolver,
NodeBuilder nodeBuilder,
String name) {
super(
descriptor,
renderingSupport,
arguments,
filter,
withinGroupClause,
impliedResultType,
argumentsValidator,
returnTypeResolver,
nodeBuilder,
name
);
}
@Override
public Expression convertToSqlAst(SqmToSqlAstConverter walker) {
final ReturnableType<?> resultType = resolveResultType( walker );
List<SqlAstNode> arguments = resolveSqlAstArguments( getArguments(), walker );
if ( getArgumentsValidator() != null ) {
getArgumentsValidator().validateSqlTypes( arguments, getFunctionName() );
}
if ( resultType == null ) {
throw new SemanticException(
"Oracle array_agg emulation requires knowledge about the return type, but resolved return type could not be determined"
);
}
final DomainType<?> type = resultType.getSqmType();
if ( !( type instanceof BasicPluralType<?, ?> ) ) {
throw new SemanticException(
"Oracle array_agg emulation requires a basic plural return type, but resolved return type was: " + type
);
}
final BasicPluralType<?, ?> pluralType = (BasicPluralType<?, ?>) type;
final TypeConfiguration typeConfiguration = walker.getCreationContext().getSessionFactory().getTypeConfiguration();
final DdlTypeRegistry ddlTypeRegistry = typeConfiguration.getDdlTypeRegistry();
final DdlType ddlType = ddlTypeRegistry.getDescriptor(
pluralType.getJdbcType().getDdlTypeCode()
);
final String arrayTypeName = ddlType.getCastTypeName( Size.nil(), pluralType, ddlTypeRegistry );
return new SelfRenderingFunctionSqlAstExpression(
getFunctionName(),
getRenderingSupport(),
arguments,
resultType,
getMappingModelExpressible( walker, resultType, arguments )
) {
@Override
public void renderToSql(
SqlAppender sqlAppender,
SqlAstTranslator<?> walker,
SessionFactoryImplementor sessionFactory) {
// Oracle doesn't have an array_agg function, so we must use the collect function,
// which requires that we cast the result to the array type.
// On empty results, we require that array_agg returns null,
// but Oracle rather returns an empty collection, so we have to handle that.
// Unfortunately, nullif doesn't work with collection types,
// so we have to render a case when expression instead
sqlAppender.append( "case when cast(" );
super.renderToSql( sqlAppender, walker, sessionFactory );
sqlAppender.appendSql( " as " );
sqlAppender.appendSql( arrayTypeName );
sqlAppender.appendSql( ")=" );
sqlAppender.appendSql( arrayTypeName );
sqlAppender.appendSql( "() then null else cast(" );
super.renderToSql( sqlAppender, walker, sessionFactory );
sqlAppender.appendSql( " as " );
sqlAppender.appendSql( arrayTypeName );
sqlAppender.appendSql( ") end" );
}
};
}
}
}

View File

@ -74,9 +74,7 @@ public class SelfRenderingSqmAggregateFunction<T> extends SelfRenderingSqmFuncti
@Override @Override
public Expression convertToSqlAst(SqmToSqlAstConverter walker) { public Expression convertToSqlAst(SqmToSqlAstConverter walker) {
final ReturnableType<?> resultType = resolveResultType( final ReturnableType<?> resultType = resolveResultType( walker );
walker.getCreationContext().getMappingMetamodel().getTypeConfiguration()
);
List<SqlAstNode> arguments = resolveSqlAstArguments( getArguments(), walker ); List<SqlAstNode> arguments = resolveSqlAstArguments( getArguments(), walker );
ArgumentsValidator argumentsValidator = getArgumentsValidator(); ArgumentsValidator argumentsValidator = getArgumentsValidator();

View File

@ -136,9 +136,7 @@ public class SelfRenderingSqmFunction<T> extends SqmFunction<T> {
@Override @Override
public Expression convertToSqlAst(SqmToSqlAstConverter walker) { public Expression convertToSqlAst(SqmToSqlAstConverter walker) {
final ReturnableType<?> resultType = resolveResultType( final ReturnableType<?> resultType = resolveResultType( walker );
walker.getCreationContext().getMappingMetamodel().getTypeConfiguration()
);
List<SqlAstNode> arguments = resolveSqlAstArguments( getArguments(), walker ); List<SqlAstNode> arguments = resolveSqlAstArguments( getArguments(), walker );
if ( argumentsValidator != null ) { if ( argumentsValidator != null ) {
@ -163,9 +161,26 @@ public class SelfRenderingSqmFunction<T> extends SqmFunction<T> {
} }
protected ReturnableType<?> resolveResultType(TypeConfiguration typeConfiguration) { protected ReturnableType<?> resolveResultType(TypeConfiguration typeConfiguration) {
return resolveResultType( () -> null, typeConfiguration );
}
protected ReturnableType<?> resolveResultType(SqmToSqlAstConverter walker) {
if ( resultType == null ) {
return resolveResultType(
walker::resolveFunctionImpliedReturnType,
walker.getCreationContext().getMappingMetamodel().getTypeConfiguration()
);
}
return resultType;
}
protected ReturnableType<?> resolveResultType(
Supplier<MappingModelExpressible<?>> inferredTypeSupplier,
TypeConfiguration typeConfiguration) {
if ( resultType == null ) { if ( resultType == null ) {
resultType = returnTypeResolver.resolveFunctionReturnType( resultType = returnTypeResolver.resolveFunctionReturnType(
impliedResultType, impliedResultType,
inferredTypeSupplier,
getArguments(), getArguments(),
typeConfiguration typeConfiguration
); );

View File

@ -91,9 +91,7 @@ public class SelfRenderingSqmOrderedSetAggregateFunction<T> extends SelfRenderin
@Override @Override
public Expression convertToSqlAst(SqmToSqlAstConverter walker) { public Expression convertToSqlAst(SqmToSqlAstConverter walker) {
final ReturnableType<?> resultType = resolveResultType( final ReturnableType<?> resultType = resolveResultType( walker );
walker.getCreationContext().getMappingMetamodel().getTypeConfiguration()
);
List<SqlAstNode> arguments = resolveSqlAstArguments( getArguments(), walker ); List<SqlAstNode> arguments = resolveSqlAstArguments( getArguments(), walker );
ArgumentsValidator argumentsValidator = getArgumentsValidator(); ArgumentsValidator argumentsValidator = getArgumentsValidator();

View File

@ -82,9 +82,7 @@ public class SelfRenderingSqmWindowFunction<T> extends SelfRenderingSqmFunction<
@Override @Override
public Expression convertToSqlAst(SqmToSqlAstConverter walker) { public Expression convertToSqlAst(SqmToSqlAstConverter walker) {
final ReturnableType<?> resultType = resolveResultType( final ReturnableType<?> resultType = resolveResultType( walker );
walker.getCreationContext().getMappingMetamodel().getTypeConfiguration()
);
List<SqlAstNode> arguments = resolveSqlAstArguments( getArguments(), walker ); List<SqlAstNode> arguments = resolveSqlAstArguments( getArguments(), walker );
ArgumentsValidator argumentsValidator = getArgumentsValidator(); ArgumentsValidator argumentsValidator = getArgumentsValidator();

View File

@ -7,6 +7,7 @@
package org.hibernate.query.sqm.produce.function; package org.hibernate.query.sqm.produce.function;
import org.hibernate.metamodel.mapping.BasicValuedMapping; import org.hibernate.metamodel.mapping.BasicValuedMapping;
import org.hibernate.metamodel.mapping.MappingModelExpressible;
import org.hibernate.query.ReturnableType; import org.hibernate.query.ReturnableType;
import org.hibernate.query.sqm.tree.SqmTypedNode; import org.hibernate.query.sqm.tree.SqmTypedNode;
import org.hibernate.sql.ast.tree.SqlAstNode; import org.hibernate.sql.ast.tree.SqlAstNode;
@ -31,11 +32,34 @@ public interface FunctionReturnTypeResolver {
* of `some_function`. * of `some_function`.
* *
* @return The resolved type. * @return The resolved type.
* @deprecated Use {@link #resolveFunctionReturnType(ReturnableType, Supplier, List, TypeConfiguration)} instead
*/ */
ReturnableType<?> resolveFunctionReturnType( @Deprecated(forRemoval = true)
default ReturnableType<?> resolveFunctionReturnType(
ReturnableType<?> impliedType, ReturnableType<?> impliedType,
List<? extends SqmTypedNode<?>> arguments, List<? extends SqmTypedNode<?>> arguments,
TypeConfiguration typeConfiguration); TypeConfiguration typeConfiguration) {
throw new UnsupportedOperationException( "Not implemented for " + getClass().getName() );
}
/**
* Resolve the return type for a function given its context-implied type and
* the arguments to this call.
* <p>
* The <em>context-implied</em> type is the type implied by where the function
* occurs in the query. E.g., for an equality predicate (`something = some_function`)
* the implied type of the return from `some_function` would be defined by the type
* of `some_function`.
*
* @return The resolved type.
*/
default ReturnableType<?> resolveFunctionReturnType(
ReturnableType<?> impliedType,
Supplier<MappingModelExpressible<?>> inferredTypeSupplier,
List<? extends SqmTypedNode<?>> arguments,
TypeConfiguration typeConfiguration) {
return resolveFunctionReturnType( impliedType, arguments, typeConfiguration );
}
/** /**
* Resolve the return type for a function given its context-implied type and * Resolve the return type for a function given its context-implied type and

View File

@ -283,6 +283,7 @@ import org.hibernate.sql.ast.tree.SqlAstNode;
import org.hibernate.sql.ast.tree.Statement; import org.hibernate.sql.ast.tree.Statement;
import org.hibernate.sql.ast.tree.cte.CteColumn; import org.hibernate.sql.ast.tree.cte.CteColumn;
import org.hibernate.sql.ast.tree.cte.CteContainer; import org.hibernate.sql.ast.tree.cte.CteContainer;
import org.hibernate.sql.ast.tree.cte.CteObject;
import org.hibernate.sql.ast.tree.cte.CteStatement; import org.hibernate.sql.ast.tree.cte.CteStatement;
import org.hibernate.sql.ast.tree.cte.CteTable; import org.hibernate.sql.ast.tree.cte.CteTable;
import org.hibernate.sql.ast.tree.cte.CteTableGroup; import org.hibernate.sql.ast.tree.cte.CteTableGroup;
@ -8497,10 +8498,12 @@ public abstract class BaseSqmToSqlAstConverter<T extends Statement> extends Base
private static class CteContainerImpl implements CteContainer { private static class CteContainerImpl implements CteContainer {
private final CteContainer parent; private final CteContainer parent;
private final Map<String, CteStatement> cteStatements; private final Map<String, CteStatement> cteStatements;
private final Map<String, CteObject> cteObjects;
public CteContainerImpl(CteContainer parent) { public CteContainerImpl(CteContainer parent) {
this.parent = parent; this.parent = parent;
this.cteStatements = new LinkedHashMap<>(); this.cteStatements = new LinkedHashMap<>();
this.cteObjects = new LinkedHashMap<>();
} }
@Override @Override
@ -8519,7 +8522,30 @@ public abstract class BaseSqmToSqlAstConverter<T extends Statement> extends Base
@Override @Override
public void addCteStatement(CteStatement cteStatement) { public void addCteStatement(CteStatement cteStatement) {
cteStatements.put( cteStatement.getCteTable().getTableExpression(), cteStatement ); if ( cteStatements.putIfAbsent( cteStatement.getCteTable().getTableExpression(), cteStatement ) != null ) {
throw new IllegalArgumentException( "A CTE with the label " + cteStatement.getCteTable().getTableExpression() + " already exists" );
}
}
@Override
public Map<String, CteObject> getCteObjects() {
return cteObjects;
}
@Override
public CteObject getCteObject(String cteObjectName) {
final CteObject cteObject = cteObjects.get( cteObjectName );
if ( cteObject == null && parent != null ) {
return parent.getCteObject( cteObjectName );
}
return cteObject;
}
@Override
public void addCteObject(CteObject cteObject) {
if ( cteObjects.putIfAbsent( cteObject.getName(), cteObject ) != null ) {
throw new IllegalArgumentException( "A CTE object with the name " + cteObject.getName() + " already exists" );
}
} }
} }

View File

@ -32,6 +32,7 @@ import org.hibernate.dialect.DmlTargetColumnQualifierSupport;
import org.hibernate.dialect.Dialect; import org.hibernate.dialect.Dialect;
import org.hibernate.dialect.RowLockStrategy; import org.hibernate.dialect.RowLockStrategy;
import org.hibernate.dialect.SelectItemReferenceStrategy; import org.hibernate.dialect.SelectItemReferenceStrategy;
import org.hibernate.engine.jdbc.Size;
import org.hibernate.engine.jdbc.spi.JdbcServices; import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.engine.spi.AbstractDelegatingWrapperOptions; import org.hibernate.engine.spi.AbstractDelegatingWrapperOptions;
import org.hibernate.engine.spi.SessionFactoryImplementor; import org.hibernate.engine.spi.SessionFactoryImplementor;
@ -95,10 +96,12 @@ import org.hibernate.sql.ast.tree.Statement;
import org.hibernate.sql.ast.tree.cte.CteColumn; import org.hibernate.sql.ast.tree.cte.CteColumn;
import org.hibernate.sql.ast.tree.cte.CteContainer; import org.hibernate.sql.ast.tree.cte.CteContainer;
import org.hibernate.sql.ast.tree.cte.CteMaterialization; import org.hibernate.sql.ast.tree.cte.CteMaterialization;
import org.hibernate.sql.ast.tree.cte.CteObject;
import org.hibernate.sql.ast.tree.cte.CteSearchClauseKind; import org.hibernate.sql.ast.tree.cte.CteSearchClauseKind;
import org.hibernate.sql.ast.tree.cte.CteStatement; import org.hibernate.sql.ast.tree.cte.CteStatement;
import org.hibernate.sql.ast.tree.cte.CteTableGroup; import org.hibernate.sql.ast.tree.cte.CteTableGroup;
import org.hibernate.sql.ast.tree.cte.SearchClauseSpecification; import org.hibernate.sql.ast.tree.cte.SearchClauseSpecification;
import org.hibernate.sql.ast.tree.cte.SelfRenderingCteObject;
import org.hibernate.sql.ast.tree.delete.DeleteStatement; import org.hibernate.sql.ast.tree.delete.DeleteStatement;
import org.hibernate.sql.ast.tree.expression.AggregateColumnWriteExpression; import org.hibernate.sql.ast.tree.expression.AggregateColumnWriteExpression;
import org.hibernate.sql.ast.tree.expression.Any; import org.hibernate.sql.ast.tree.expression.Any;
@ -1596,11 +1599,12 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
else { else {
cteStatements = originalCteStatements; cteStatements = originalCteStatements;
} }
if ( cteStatements.isEmpty() ) { final Collection<CteObject> cteObjects = cteContainer.getCteObjects().values();
if ( cteStatements.isEmpty() && cteObjects.isEmpty() ) {
return; return;
} }
if ( !supportsWithClause() ) { if ( !supportsWithClause() ) {
if ( isRecursive( cteStatements ) ) { if ( isRecursive( cteStatements ) && cteObjects.isEmpty() ) {
throw new UnsupportedOperationException( "Can't emulate recursive CTEs!" ); throw new UnsupportedOperationException( "Can't emulate recursive CTEs!" );
} }
// This should be unreachable, because #needsCteInlining() must return true if #supportsWithClause() returns false, // This should be unreachable, because #needsCteInlining() must return true if #supportsWithClause() returns false,
@ -1642,6 +1646,10 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
String mainSeparator = ""; String mainSeparator = "";
if ( isTopLevel ) { if ( isTopLevel ) {
topLevelWithClauseIndex = sqlBuffer.length(); topLevelWithClauseIndex = sqlBuffer.length();
for ( CteObject cte : cteObjects ) {
visitCteObject( cte );
topLevelWithClauseIndex = sqlBuffer.length();
}
for ( CteStatement cte : cteStatements ) { for ( CteStatement cte : cteStatements ) {
appendSql( mainSeparator ); appendSql( mainSeparator );
visitCteStatement( cte ); visitCteStatement( cte );
@ -1688,6 +1696,10 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
// This is the case when there is an existing CTE, so we need a comma for the CTE that are about to render // This is the case when there is an existing CTE, so we need a comma for the CTE that are about to render
mainSeparator = COMMA_SEPARATOR; mainSeparator = COMMA_SEPARATOR;
} }
for ( CteObject cte : cteObjects ) {
visitCteObject( cte );
topLevelWithClauseIndex = sqlBuffer.length();
}
for ( CteStatement cte : cteStatements ) { for ( CteStatement cte : cteStatements ) {
appendSql( mainSeparator ); appendSql( mainSeparator );
visitCteStatement( cte ); visitCteStatement( cte );
@ -1702,6 +1714,9 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
sqlBuffer.append( temporaryRest ); sqlBuffer.append( temporaryRest );
} }
else { else {
for ( CteObject cte : cteObjects ) {
visitCteObject( cte );
}
for ( CteStatement cte : cteStatements ) { for ( CteStatement cte : cteStatements ) {
appendSql( mainSeparator ); appendSql( mainSeparator );
visitCteStatement( cte ); visitCteStatement( cte );
@ -1739,6 +1754,15 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
renderCycleClause( cte ); renderCycleClause( cte );
} }
protected void visitCteObject(CteObject cteObject) {
if ( cteObject instanceof SelfRenderingCteObject ) {
( (SelfRenderingCteObject) cteObject ).render( this, this, sessionFactory );
}
else {
throw new IllegalArgumentException( "Can't render CTE object " + cteObject.getName() + ": " + cteObject );
}
}
private boolean isRecursive(Collection<CteStatement> cteStatements) { private boolean isRecursive(Collection<CteStatement> cteStatements) {
for ( CteStatement cteStatement : cteStatements ) { for ( CteStatement cteStatement : cteStatements ) {
if ( cteStatement.isRecursive() ) { if ( cteStatement.isRecursive() ) {
@ -5763,7 +5787,7 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
queryParts.add( statement.getQueryPart() ); queryParts.add( statement.getQueryPart() );
return new ExistsPredicate( return new ExistsPredicate(
new SelectStatement( new SelectStatement(
statement.getCteStatements(), statement,
new QueryGroup( false, SetOperator.INTERSECT, queryParts ), new QueryGroup( false, SetOperator.INTERSECT, queryParts ),
Collections.emptyList() Collections.emptyList()
), ),
@ -5817,7 +5841,7 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
return new ExistsPredicate( return new ExistsPredicate(
new SelectStatement( new SelectStatement(
statement.getCteStatements(), statement,
existsQuery, existsQuery,
Collections.emptyList() Collections.emptyList()
), ),
@ -5868,7 +5892,7 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
final ExistsPredicate existsPredicate = new ExistsPredicate( final ExistsPredicate existsPredicate = new ExistsPredicate(
new SelectStatement( new SelectStatement(
statement.getCteStatements(), statement,
existsQuery, existsQuery,
Collections.emptyList() Collections.emptyList()
), ),
@ -5995,7 +6019,7 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
existsPredicate, existsPredicate,
new BetweenPredicate( new BetweenPredicate(
new SelectStatement( new SelectStatement(
statement.getCteStatements(), statement,
countQuery, countQuery,
Collections.emptyList() Collections.emptyList()
), ),
@ -6067,7 +6091,7 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
private SelectStatement stripToSelectClause(SelectStatement statement) { private SelectStatement stripToSelectClause(SelectStatement statement) {
return new SelectStatement( return new SelectStatement(
statement.getCteStatements(), statement,
stripToSelectClause( statement.getQueryPart() ), stripToSelectClause( statement.getQueryPart() ),
Collections.emptyList() Collections.emptyList()
); );
@ -6247,18 +6271,18 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
return castTarget.getSqlType(); return castTarget.getSqlType();
} }
else { else {
final Size castTargetSize = castTarget.toSize();
final DdlTypeRegistry ddlTypeRegistry = factory.getTypeConfiguration().getDdlTypeRegistry();
final SqlExpressible expressionType = (SqlExpressible) castTarget.getExpressionType(); final SqlExpressible expressionType = (SqlExpressible) castTarget.getExpressionType();
if ( expressionType instanceof BasicPluralType<?, ?> ) { if ( expressionType instanceof BasicPluralType<?, ?> ) {
final BasicPluralType<?, ?> containerType = (BasicPluralType<?, ?>) expressionType; final BasicPluralType<?, ?> containerType = (BasicPluralType<?, ?>) expressionType;
final BasicPluralJavaType<?> javaTypeDescriptor = (BasicPluralJavaType<?>) containerType.getJavaTypeDescriptor(); final BasicPluralJavaType<?> javaTypeDescriptor = (BasicPluralJavaType<?>) containerType.getJavaTypeDescriptor();
final BasicType<?> elementType = containerType.getElementType(); final BasicType<?> elementType = containerType.getElementType();
final String elementTypeName = factory.getTypeConfiguration().getDdlTypeRegistry() final String elementTypeName = ddlTypeRegistry.getDescriptor( elementType.getJdbcType().getDdlTypeCode() )
.getDescriptor( elementType.getJdbcType().getDdlTypeCode() )
.getCastTypeName( .getCastTypeName(
castTargetSize,
elementType, elementType,
castTarget.getLength(), ddlTypeRegistry
castTarget.getPrecision(),
castTarget.getScale()
); );
final String arrayTypeName = factory.getJdbcServices().getDialect().getArrayTypeName( final String arrayTypeName = factory.getJdbcServices().getDialect().getArrayTypeName(
javaTypeDescriptor.getElementJavaType().getJavaTypeClass().getSimpleName(), javaTypeDescriptor.getElementJavaType().getJavaTypeClass().getSimpleName(),
@ -6269,7 +6293,6 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
return arrayTypeName; return arrayTypeName;
} }
} }
final DdlTypeRegistry ddlTypeRegistry = factory.getTypeConfiguration().getDdlTypeRegistry();
DdlType ddlType = ddlTypeRegistry DdlType ddlType = ddlTypeRegistry
.getDescriptor( expressionType.getJdbcMapping().getJdbcType().getDdlTypeCode() ); .getDescriptor( expressionType.getJdbcMapping().getJdbcType().getDdlTypeCode() );
if ( ddlType == null ) { if ( ddlType == null ) {
@ -6279,10 +6302,9 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
} }
return ddlType.getCastTypeName( return ddlType.getCastTypeName(
castTargetSize,
expressionType, expressionType,
castTarget.getLength(), ddlTypeRegistry
castTarget.getPrecision(),
castTarget.getScale()
); );
} }
} }

View File

@ -6,12 +6,9 @@
*/ */
package org.hibernate.sql.ast.tree; package org.hibernate.sql.ast.tree;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import java.util.Map;
import org.hibernate.sql.ast.tree.cte.CteStatement; import org.hibernate.sql.ast.tree.cte.CteContainer;
import org.hibernate.sql.ast.tree.expression.ColumnReference; import org.hibernate.sql.ast.tree.expression.ColumnReference;
import org.hibernate.sql.ast.tree.from.NamedTableReference; import org.hibernate.sql.ast.tree.from.NamedTableReference;
@ -23,14 +20,11 @@ public abstract class AbstractMutationStatement extends AbstractStatement implem
private final NamedTableReference targetTable; private final NamedTableReference targetTable;
private final List<ColumnReference> returningColumns; private final List<ColumnReference> returningColumns;
public AbstractMutationStatement(NamedTableReference targetTable) { public AbstractMutationStatement(
super( new LinkedHashMap<>() ); CteContainer cteContainer,
this.targetTable = targetTable; NamedTableReference targetTable,
this.returningColumns = Collections.emptyList(); List<ColumnReference> returningColumns) {
} super( cteContainer );
public AbstractMutationStatement(Map<String, CteStatement> cteStatements, NamedTableReference targetTable, List<ColumnReference> returningColumns) {
super( cteStatements );
this.targetTable = targetTable; this.targetTable = targetTable;
this.returningColumns = returningColumns; this.returningColumns = returningColumns;
} }

View File

@ -6,9 +6,11 @@
*/ */
package org.hibernate.sql.ast.tree; package org.hibernate.sql.ast.tree;
import java.util.LinkedHashMap;
import java.util.Map; import java.util.Map;
import org.hibernate.sql.ast.tree.cte.CteContainer; import org.hibernate.sql.ast.tree.cte.CteContainer;
import org.hibernate.sql.ast.tree.cte.CteObject;
import org.hibernate.sql.ast.tree.cte.CteStatement; import org.hibernate.sql.ast.tree.cte.CteStatement;
/** /**
@ -17,9 +19,17 @@ import org.hibernate.sql.ast.tree.cte.CteStatement;
public abstract class AbstractStatement implements Statement, CteContainer { public abstract class AbstractStatement implements Statement, CteContainer {
private final Map<String, CteStatement> cteStatements; private final Map<String, CteStatement> cteStatements;
private final Map<String, CteObject> cteObjects;
public AbstractStatement(Map<String, CteStatement> cteStatements) { public AbstractStatement(CteContainer cteContainer) {
this.cteStatements = cteStatements; if ( cteContainer == null ) {
this.cteStatements = new LinkedHashMap<>();
this.cteObjects = new LinkedHashMap<>();
}
else {
this.cteStatements = cteContainer.getCteStatements();
this.cteObjects = cteContainer.getCteObjects();
}
} }
@Override @Override
@ -38,4 +48,21 @@ public abstract class AbstractStatement implements Statement, CteContainer {
throw new IllegalArgumentException( "A CTE with the label " + cteStatement.getCteTable().getTableExpression() + " already exists" ); throw new IllegalArgumentException( "A CTE with the label " + cteStatement.getCteTable().getTableExpression() + " already exists" );
} }
} }
@Override
public Map<String, CteObject> getCteObjects() {
return cteObjects;
}
@Override
public CteObject getCteObject(String cteObjectName) {
return cteObjects.get( cteObjectName );
}
@Override
public void addCteObject(CteObject cteObject) {
if ( cteObjects.putIfAbsent( cteObject.getName(), cteObject ) != null ) {
throw new IllegalArgumentException( "A CTE object with the name " + cteObject.getName() + " already exists" );
}
}
} }

View File

@ -6,12 +6,10 @@
*/ */
package org.hibernate.sql.ast.tree; package org.hibernate.sql.ast.tree;
import java.util.LinkedHashMap; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map;
import org.hibernate.sql.ast.tree.cte.CteContainer; import org.hibernate.sql.ast.tree.cte.CteContainer;
import org.hibernate.sql.ast.tree.cte.CteStatement;
import org.hibernate.sql.ast.tree.expression.ColumnReference; import org.hibernate.sql.ast.tree.expression.ColumnReference;
import org.hibernate.sql.ast.tree.from.FromClause; import org.hibernate.sql.ast.tree.from.FromClause;
import org.hibernate.sql.ast.tree.from.NamedTableReference; import org.hibernate.sql.ast.tree.from.NamedTableReference;
@ -25,9 +23,7 @@ public abstract class AbstractUpdateOrDeleteStatement extends AbstractMutationSt
NamedTableReference targetTable, NamedTableReference targetTable,
FromClause fromClause, FromClause fromClause,
Predicate restriction) { Predicate restriction) {
super( targetTable ); this( null, targetTable, fromClause, restriction, Collections.emptyList() );
this.fromClause = fromClause;
this.restriction = restriction;
} }
public AbstractUpdateOrDeleteStatement( public AbstractUpdateOrDeleteStatement(
@ -35,9 +31,7 @@ public abstract class AbstractUpdateOrDeleteStatement extends AbstractMutationSt
FromClause fromClause, FromClause fromClause,
Predicate restriction, Predicate restriction,
List<ColumnReference> returningColumns) { List<ColumnReference> returningColumns) {
super( new LinkedHashMap<>(), targetTable, returningColumns ); this( null, targetTable, fromClause, restriction, returningColumns );
this.fromClause = fromClause;
this.restriction = restriction;
} }
public AbstractUpdateOrDeleteStatement( public AbstractUpdateOrDeleteStatement(
@ -46,22 +40,7 @@ public abstract class AbstractUpdateOrDeleteStatement extends AbstractMutationSt
FromClause fromClause, FromClause fromClause,
Predicate restriction, Predicate restriction,
List<ColumnReference> returningColumns) { List<ColumnReference> returningColumns) {
this( super( cteContainer, targetTable, returningColumns );
cteContainer.getCteStatements(),
targetTable,
fromClause,
restriction,
returningColumns
);
}
public AbstractUpdateOrDeleteStatement(
Map<String, CteStatement> cteStatements,
NamedTableReference targetTable,
FromClause fromClause,
Predicate restriction,
List<ColumnReference> returningColumns) {
super( cteStatements, targetTable, returningColumns );
this.fromClause = fromClause; this.fromClause = fromClause;
this.restriction = restriction; this.restriction = restriction;
} }

View File

@ -22,4 +22,13 @@ public interface CteContainer {
CteStatement getCteStatement(String cteLabel); CteStatement getCteStatement(String cteLabel);
void addCteStatement(CteStatement cteStatement); void addCteStatement(CteStatement cteStatement);
Map<String, CteObject> getCteObjects();
CteObject getCteObject(String cteObjectName);
void addCteObject(CteObject cteObject);
} }

View File

@ -0,0 +1,18 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.sql.ast.tree.cte;
/**
* An object that is part of a WITH clause.
*
* @author Christian Beikov
*/
public interface CteObject {
String getName();
}

View File

@ -0,0 +1,22 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.sql.ast.tree.cte;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
/**
* A self rendering object that is part of a WITH clause, like a function.
*
* @author Christian Beikov
*/
public interface SelfRenderingCteObject extends CteObject {
void render(SqlAppender sqlAppender, SqlAstTranslator<?> walker, SessionFactoryImplementor sessionFactory);
}

View File

@ -6,14 +6,13 @@
*/ */
package org.hibernate.sql.ast.tree.delete; package org.hibernate.sql.ast.tree.delete;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map;
import org.hibernate.sql.ast.SqlAstWalker; import org.hibernate.sql.ast.SqlAstWalker;
import org.hibernate.sql.ast.spi.SqlAstHelper; import org.hibernate.sql.ast.spi.SqlAstHelper;
import org.hibernate.sql.ast.tree.AbstractUpdateOrDeleteStatement; import org.hibernate.sql.ast.tree.AbstractUpdateOrDeleteStatement;
import org.hibernate.sql.ast.tree.cte.CteContainer; import org.hibernate.sql.ast.tree.cte.CteContainer;
import org.hibernate.sql.ast.tree.cte.CteStatement;
import org.hibernate.sql.ast.tree.expression.ColumnReference; import org.hibernate.sql.ast.tree.expression.ColumnReference;
import org.hibernate.sql.ast.tree.from.FromClause; import org.hibernate.sql.ast.tree.from.FromClause;
import org.hibernate.sql.ast.tree.from.NamedTableReference; import org.hibernate.sql.ast.tree.from.NamedTableReference;
@ -28,18 +27,18 @@ public class DeleteStatement extends AbstractUpdateOrDeleteStatement {
public static final String DEFAULT_ALIAS = "to_delete_"; public static final String DEFAULT_ALIAS = "to_delete_";
public DeleteStatement(NamedTableReference targetTable, Predicate restriction) { public DeleteStatement(NamedTableReference targetTable, Predicate restriction) {
this( targetTable, new FromClause(), restriction ); this( null, targetTable, new FromClause(), restriction, Collections.emptyList() );
} }
public DeleteStatement( public DeleteStatement(
NamedTableReference targetTable, NamedTableReference targetTable,
Predicate restriction, Predicate restriction,
List<ColumnReference> returningColumns) { List<ColumnReference> returningColumns) {
this( targetTable, new FromClause(), restriction, returningColumns ); this( null, targetTable, new FromClause(), restriction, returningColumns );
} }
public DeleteStatement(NamedTableReference targetTable, FromClause fromClause, Predicate restriction) { public DeleteStatement(NamedTableReference targetTable, FromClause fromClause, Predicate restriction) {
super( targetTable, fromClause, restriction ); this( null, targetTable, fromClause, restriction, Collections.emptyList() );
} }
public DeleteStatement( public DeleteStatement(
@ -47,7 +46,7 @@ public class DeleteStatement extends AbstractUpdateOrDeleteStatement {
FromClause fromClause, FromClause fromClause,
Predicate restriction, Predicate restriction,
List<ColumnReference> returningColumns) { List<ColumnReference> returningColumns) {
super( targetTable, fromClause, restriction, returningColumns ); this( null, targetTable, fromClause, restriction, returningColumns );
} }
public DeleteStatement( public DeleteStatement(
@ -56,22 +55,7 @@ public class DeleteStatement extends AbstractUpdateOrDeleteStatement {
FromClause fromClause, FromClause fromClause,
Predicate restriction, Predicate restriction,
List<ColumnReference> returningColumns) { List<ColumnReference> returningColumns) {
this( super( cteContainer, targetTable, fromClause, restriction, returningColumns );
cteContainer.getCteStatements(),
targetTable,
fromClause,
restriction,
returningColumns
);
}
public DeleteStatement(
Map<String, CteStatement> cteStatements,
NamedTableReference targetTable,
FromClause fromClause,
Predicate restriction,
List<ColumnReference> returningColumns) {
super( cteStatements, targetTable, fromClause, restriction, returningColumns );
} }
public static class DeleteStatementBuilder { public static class DeleteStatementBuilder {

View File

@ -6,6 +6,7 @@
*/ */
package org.hibernate.sql.ast.tree.expression; package org.hibernate.sql.ast.tree.expression;
import org.hibernate.engine.jdbc.Size;
import org.hibernate.metamodel.mapping.JdbcMapping; import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.metamodel.mapping.JdbcMappingContainer; import org.hibernate.metamodel.mapping.JdbcMappingContainer;
import org.hibernate.sql.ast.SqlAstWalker; import org.hibernate.sql.ast.SqlAstWalker;
@ -62,4 +63,12 @@ public class CastTarget implements Expression, SqlAstNode {
public void accept(SqlAstWalker sqlTreeWalker) { public void accept(SqlAstWalker sqlTreeWalker) {
sqlTreeWalker.visitCastTarget( this ); sqlTreeWalker.visitCastTarget( this );
} }
public Size toSize() {
final Size size = new Size();
size.setLength( length );
size.setPrecision( precision );
size.setScale( scale );
return size;
}
} }

View File

@ -38,25 +38,18 @@ public class InsertSelectStatement extends AbstractMutationStatement implements
private List<Values> valuesList = new ArrayList<>(); private List<Values> valuesList = new ArrayList<>();
public InsertSelectStatement(NamedTableReference targetTable) { public InsertSelectStatement(NamedTableReference targetTable) {
super( targetTable ); this( null, targetTable, Collections.emptyList() );
} }
public InsertSelectStatement(NamedTableReference targetTable, List<ColumnReference> returningColumns) { public InsertSelectStatement(NamedTableReference targetTable, List<ColumnReference> returningColumns) {
super( new LinkedHashMap<>(), targetTable, returningColumns ); this( null, targetTable, returningColumns );
} }
public InsertSelectStatement( public InsertSelectStatement(
CteContainer cteContainer, CteContainer cteContainer,
NamedTableReference targetTable, NamedTableReference targetTable,
List<ColumnReference> returningColumns) { List<ColumnReference> returningColumns) {
this( cteContainer.getCteStatements(), targetTable, returningColumns ); super( cteContainer, targetTable, returningColumns );
}
public InsertSelectStatement(
Map<String, CteStatement> cteStatements,
NamedTableReference targetTable,
List<ColumnReference> returningColumns) {
super( cteStatements, targetTable, returningColumns );
} }
@Override @Override

View File

@ -39,21 +39,14 @@ public class SelectStatement extends AbstractStatement implements SqlAstNode, Ex
} }
public SelectStatement(QueryPart queryPart, List<DomainResult<?>> domainResults) { public SelectStatement(QueryPart queryPart, List<DomainResult<?>> domainResults) {
this( new LinkedHashMap<>(), queryPart, domainResults ); this( null, queryPart, domainResults );
} }
public SelectStatement( public SelectStatement(
CteContainer cteContainer, CteContainer cteContainer,
QueryPart queryPart, QueryPart queryPart,
List<DomainResult<?>> domainResults) { List<DomainResult<?>> domainResults) {
this( cteContainer.getCteStatements(), queryPart, domainResults ); super( cteContainer );
}
public SelectStatement(
Map<String, CteStatement> cteStatements,
QueryPart queryPart,
List<DomainResult<?>> domainResults) {
super( cteStatements );
this.queryPart = queryPart; this.queryPart = queryPart;
this.domainResults = domainResults; this.domainResults = domainResults;
} }

View File

@ -7,6 +7,7 @@
package org.hibernate.sql.ast.tree.update; package org.hibernate.sql.ast.tree.update;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -32,7 +33,7 @@ public class UpdateStatement extends AbstractUpdateOrDeleteStatement {
NamedTableReference targetTable, NamedTableReference targetTable,
List<Assignment> assignments, List<Assignment> assignments,
Predicate restriction) { Predicate restriction) {
this( targetTable, new FromClause(), assignments, restriction ); this( null, targetTable, new FromClause(), assignments, restriction, Collections.emptyList() );
} }
public UpdateStatement( public UpdateStatement(
@ -40,7 +41,7 @@ public class UpdateStatement extends AbstractUpdateOrDeleteStatement {
List<Assignment> assignments, List<Assignment> assignments,
Predicate restriction, Predicate restriction,
List<ColumnReference> returningColumns) { List<ColumnReference> returningColumns) {
this( targetTable, new FromClause(), assignments, restriction, returningColumns ); this( null, targetTable, new FromClause(), assignments, restriction, returningColumns );
} }
public UpdateStatement( public UpdateStatement(
@ -48,8 +49,7 @@ public class UpdateStatement extends AbstractUpdateOrDeleteStatement {
FromClause fromClause, FromClause fromClause,
List<Assignment> assignments, List<Assignment> assignments,
Predicate restriction) { Predicate restriction) {
super( targetTable, fromClause, restriction ); this( null, targetTable, fromClause, assignments, restriction, Collections.emptyList() );
this.assignments = assignments;
} }
public UpdateStatement( public UpdateStatement(
@ -58,8 +58,7 @@ public class UpdateStatement extends AbstractUpdateOrDeleteStatement {
List<Assignment> assignments, List<Assignment> assignments,
Predicate restriction, Predicate restriction,
List<ColumnReference> returningColumns) { List<ColumnReference> returningColumns) {
super( targetTable, fromClause, restriction, returningColumns ); this( null, targetTable, fromClause, assignments, restriction, returningColumns );
this.assignments = assignments;
} }
public UpdateStatement( public UpdateStatement(
@ -69,24 +68,7 @@ public class UpdateStatement extends AbstractUpdateOrDeleteStatement {
List<Assignment> assignments, List<Assignment> assignments,
Predicate restriction, Predicate restriction,
List<ColumnReference> returningColumns) { List<ColumnReference> returningColumns) {
this( super( cteContainer, targetTable, fromClause, restriction, returningColumns );
cteContainer.getCteStatements(),
targetTable,
fromClause,
assignments,
restriction,
returningColumns
);
}
public UpdateStatement(
Map<String, CteStatement> cteStatements,
NamedTableReference targetTable,
FromClause fromClause,
List<Assignment> assignments,
Predicate restriction,
List<ColumnReference> returningColumns) {
super( cteStatements, targetTable, fromClause, restriction, returningColumns );
this.assignments = assignments; this.assignments = assignments;
} }

View File

@ -64,7 +64,7 @@ public abstract class AbstractArrayJavaType<T, E> extends AbstractClassJavaType<
} }
final BasicValueConverter<E, ?> valueConverter = elementType.getValueConverter(); final BasicValueConverter<E, ?> valueConverter = elementType.getValueConverter();
return valueConverter == null return valueConverter == null
? createType( typeConfiguration, dialect, this, elementType, columnTypeInformation, stdIndicators ) ? resolveType( typeConfiguration, dialect, this, elementType, columnTypeInformation, stdIndicators )
: createTypeUsingConverter( typeConfiguration, dialect, elementType, columnTypeInformation, stdIndicators, valueConverter ); : createTypeUsingConverter( typeConfiguration, dialect, elementType, columnTypeInformation, stdIndicators, valueConverter );
} }
@ -92,41 +92,53 @@ public abstract class AbstractArrayJavaType<T, E> extends AbstractClassJavaType<
); );
} }
BasicType<T> createType( BasicType<T> resolveType(
TypeConfiguration typeConfiguration, TypeConfiguration typeConfiguration,
Dialect dialect, Dialect dialect,
AbstractArrayJavaType<T,E> arrayJavaType, AbstractArrayJavaType<T,E> arrayJavaType,
BasicType<E> elementType, BasicType<E> elementType,
ColumnTypeInformation columnTypeInformation, ColumnTypeInformation columnTypeInformation,
JdbcTypeIndicators stdIndicators) { JdbcTypeIndicators stdIndicators) {
return typeConfiguration.getBasicTypeRegistry().getRegisteredType( elementType.getName() ) == elementType final JdbcType arrayJdbcType = getArrayJdbcType(
? typeConfiguration.standardBasicTypeForJavaType(
arrayJavaType.getJavaType(),
javaType -> basicArrayType( typeConfiguration, dialect, elementType, columnTypeInformation, stdIndicators, arrayJavaType )
)
: basicArrayType( typeConfiguration, dialect, elementType, columnTypeInformation, stdIndicators, arrayJavaType );
}
BasicType<T> basicArrayType(
TypeConfiguration typeConfiguration,
Dialect dialect,
BasicType<E> elementType,
ColumnTypeInformation columnTypeInformation,
JdbcTypeIndicators stdIndicators,
JavaType<T> javaType) {
return new BasicArrayType<>(
elementType,
getArrayJdbcType(
typeConfiguration, typeConfiguration,
dialect, dialect,
stdIndicators.getExplicitJdbcTypeCode(), stdIndicators.getExplicitJdbcTypeCode(),
elementType, elementType,
columnTypeInformation columnTypeInformation
),
javaType
); );
return typeConfiguration.getBasicTypeRegistry().resolve(
arrayJavaType,
arrayJdbcType,
() -> new BasicArrayType<>( elementType, arrayJdbcType, arrayJavaType )
);
// return typeConfiguration.getBasicTypeRegistry().getRegisteredType( elementType.getName() ) == elementType
// ? typeConfiguration.standardBasicTypeForJavaType(
// arrayJavaType.getJavaType(),
// javaType -> basicArrayType( typeConfiguration, dialect, elementType, columnTypeInformation, stdIndicators, arrayJavaType )
// )
// : basicArrayType( typeConfiguration, dialect, elementType, columnTypeInformation, stdIndicators, arrayJavaType );
} }
// BasicType<T> basicArrayType(
// TypeConfiguration typeConfiguration,
// Dialect dialect,
// BasicType<E> elementType,
// ColumnTypeInformation columnTypeInformation,
// JdbcTypeIndicators stdIndicators,
// JavaType<T> javaType) {
// return new BasicArrayType<>(
// elementType,
// getArrayJdbcType(
// typeConfiguration,
// dialect,
// stdIndicators.getExplicitJdbcTypeCode(),
// elementType,
// columnTypeInformation
// ),
// javaType
// );
// }
static JdbcType getArrayJdbcType( static JdbcType getArrayJdbcType(
TypeConfiguration typeConfiguration, TypeConfiguration typeConfiguration,
Dialect dialect, Dialect dialect,

View File

@ -82,7 +82,7 @@ public class ArrayJavaType<T> extends AbstractArrayJavaType<T[], T> {
} }
final BasicValueConverter<T, ?> valueConverter = elementType.getValueConverter(); final BasicValueConverter<T, ?> valueConverter = elementType.getValueConverter();
return valueConverter == null return valueConverter == null
? createType( typeConfiguration, dialect, arrayJavaType, elementType, columnTypeInformation, stdIndicators ) ? resolveType( typeConfiguration, dialect, arrayJavaType, elementType, columnTypeInformation, stdIndicators )
: createTypeUsingConverter( typeConfiguration, dialect, elementType, columnTypeInformation, stdIndicators, valueConverter ); : createTypeUsingConverter( typeConfiguration, dialect, elementType, columnTypeInformation, stdIndicators, valueConverter );
} }

View File

@ -27,6 +27,7 @@ import org.hibernate.internal.util.SerializationHelper;
import org.hibernate.internal.util.collections.CollectionHelper; import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.metamodel.CollectionClassification; import org.hibernate.metamodel.CollectionClassification;
import org.hibernate.tool.schema.extract.spi.ColumnTypeInformation; import org.hibernate.tool.schema.extract.spi.ColumnTypeInformation;
import org.hibernate.type.BasicArrayType;
import org.hibernate.type.BasicCollectionType; import org.hibernate.type.BasicCollectionType;
import org.hibernate.type.BasicPluralType; import org.hibernate.type.BasicPluralType;
import org.hibernate.type.BasicType; import org.hibernate.type.BasicType;
@ -112,17 +113,28 @@ public class BasicCollectionJavaType<C extends Collection<E>, E> extends Abstrac
} }
final BasicValueConverter<E, ?> valueConverter = elementType.getValueConverter(); final BasicValueConverter<E, ?> valueConverter = elementType.getValueConverter();
if ( valueConverter == null ) { if ( valueConverter == null ) {
final JdbcType arrayJdbcType = getArrayJdbcType(
typeConfiguration,
dialect,
stdIndicators.getPreferredSqlTypeCodeForArray(),
elementType,
columnTypeInformation
);
final Function<JavaType<Object>, BasicType<Object>> creator = javaType -> { final Function<JavaType<Object>, BasicType<Object>> creator = javaType -> {
final JdbcType arrayJdbcType =
getArrayJdbcType( typeConfiguration, dialect, Types.ARRAY, elementType, columnTypeInformation );
//noinspection unchecked,rawtypes //noinspection unchecked,rawtypes
return new BasicCollectionType( elementType, arrayJdbcType, collectionJavaType ); return new BasicCollectionType( elementType, arrayJdbcType, collectionJavaType );
}; };
if ( typeConfiguration.getBasicTypeRegistry().getRegisteredType( elementType.getName() ) == elementType ) { // if ( typeConfiguration.getBasicTypeRegistry().getRegisteredType( elementType.getName() ) == elementType ) {
return typeConfiguration.standardBasicTypeForJavaType( collectionJavaType.getJavaType(), creator ); // return typeConfiguration.standardBasicTypeForJavaType( collectionJavaType.getJavaType(), creator );
} // }
//noinspection unchecked // //noinspection unchecked
return creator.apply( (JavaType<Object>) (JavaType<?>) collectionJavaType ); // return creator.apply( (JavaType<Object>) (JavaType<?>) collectionJavaType );
return typeConfiguration.getBasicTypeRegistry().resolve(
collectionJavaType,
arrayJdbcType,
() -> new BasicCollectionType<>( elementType, arrayJdbcType, collectionJavaType )
);
} }
else { else {
final JavaType<Object> relationalJavaType = typeConfiguration.getJavaTypeRegistry().resolveDescriptor( final JavaType<Object> relationalJavaType = typeConfiguration.getJavaTypeRegistry().resolveDescriptor(
@ -131,7 +143,13 @@ public class BasicCollectionJavaType<C extends Collection<E>, E> extends Abstrac
//noinspection unchecked,rawtypes //noinspection unchecked,rawtypes
return new ConvertedBasicCollectionType( return new ConvertedBasicCollectionType(
elementType, elementType,
getArrayJdbcType( typeConfiguration, dialect, Types.ARRAY, elementType, columnTypeInformation ), getArrayJdbcType(
typeConfiguration,
dialect,
stdIndicators.getPreferredSqlTypeCodeForArray(),
elementType,
columnTypeInformation
),
collectionJavaType, collectionJavaType,
new CollectionConverter( valueConverter, collectionJavaType, relationalJavaType ) new CollectionConverter( valueConverter, collectionJavaType, relationalJavaType )
); );

View File

@ -0,0 +1,133 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.type.descriptor.jdbc;
import org.hibernate.Incubating;
import org.hibernate.TimeZoneStorageStrategy;
import org.hibernate.dialect.Dialect;
import org.hibernate.type.spi.TypeConfiguration;
import jakarta.persistence.EnumType;
import jakarta.persistence.TemporalType;
public class DelegatingJdbcTypeIndicators implements JdbcTypeIndicators {
private final JdbcTypeIndicators delegate;
public DelegatingJdbcTypeIndicators(JdbcTypeIndicators delegate) {
this.delegate = delegate;
}
@Override
public boolean isNationalized() {
return delegate.isNationalized();
}
@Override
public boolean isLob() {
return delegate.isLob();
}
@Override
public EnumType getEnumeratedType() {
return delegate.getEnumeratedType();
}
@Override
public TemporalType getTemporalPrecision() {
return delegate.getTemporalPrecision();
}
@Override
public int getPreferredSqlTypeCodeForBoolean() {
return delegate.getPreferredSqlTypeCodeForBoolean();
}
@Override
public int getPreferredSqlTypeCodeForDuration() {
return delegate.getPreferredSqlTypeCodeForDuration();
}
@Override
public int getPreferredSqlTypeCodeForUuid() {
return delegate.getPreferredSqlTypeCodeForUuid();
}
@Override
public int getPreferredSqlTypeCodeForInstant() {
return delegate.getPreferredSqlTypeCodeForInstant();
}
@Override
public int getPreferredSqlTypeCodeForArray() {
return delegate.getPreferredSqlTypeCodeForArray();
}
@Override
public long getColumnLength() {
return delegate.getColumnLength();
}
@Override
public int getColumnPrecision() {
return delegate.getColumnPrecision();
}
@Override
public int getColumnScale() {
return delegate.getColumnScale();
}
@Override
@Incubating
public Integer getExplicitJdbcTypeCode() {
return delegate.getExplicitJdbcTypeCode();
}
@Override
public TimeZoneStorageStrategy getDefaultTimeZoneStorageStrategy() {
return delegate.getDefaultTimeZoneStorageStrategy();
}
@Override
public JdbcType getJdbcType(int jdbcTypeCode) {
return delegate.getJdbcType( jdbcTypeCode );
}
@Override
public int resolveJdbcTypeCode(int jdbcTypeCode) {
return delegate.resolveJdbcTypeCode( jdbcTypeCode );
}
@Override
public TypeConfiguration getTypeConfiguration() {
return delegate.getTypeConfiguration();
}
public static int getZonedTimeSqlType(TimeZoneStorageStrategy storageStrategy) {
return JdbcTypeIndicators.getZonedTimeSqlType( storageStrategy );
}
public static int getZonedTimestampSqlType(TimeZoneStorageStrategy storageStrategy) {
return JdbcTypeIndicators.getZonedTimestampSqlType( storageStrategy );
}
@Override
public int getDefaultZonedTimeSqlType() {
return delegate.getDefaultZonedTimeSqlType();
}
@Override
public int getDefaultZonedTimestampSqlType() {
return delegate.getDefaultZonedTimestampSqlType();
}
@Override
public Dialect getDialect() {
return delegate.getDialect();
}
}

View File

@ -94,6 +94,24 @@ public interface DdlType extends Serializable {
return JdbcType.isLobOrLong( getSqlTypeCode() ); return JdbcType.isLobOrLong( getSqlTypeCode() );
} }
/**
* Return the database type corresponding to the given {@link SqlExpressible}
* that may be used as a target type in casting operations using the SQL
* {@code CAST()} function. The length is usually
* chosen to be the maximum possible length for the dialect.
*
* @see JavaType#getDefaultSqlScale(Dialect, JdbcType)
* @see JavaType#getDefaultSqlPrecision(Dialect, JdbcType)
* @see Dialect#getMaxVarcharLength()
*
* @return The SQL type name
*
* @since 6.3
*/
default String getCastTypeName(Size columnSize, SqlExpressible type, DdlTypeRegistry ddlTypeRegistry) {
return getCastTypeName( type, columnSize.getLength(), columnSize.getPrecision(), columnSize.getScale() );
}
/** /**
* Return the database type corresponding to the given {@link JdbcType} * Return the database type corresponding to the given {@link JdbcType}
* that may be used as a target type in casting operations using the SQL * that may be used as a target type in casting operations using the SQL
@ -106,7 +124,9 @@ public interface DdlType extends Serializable {
* @see Dialect#getMaxVarcharLength() * @see Dialect#getMaxVarcharLength()
* *
* @return The SQL type name * @return The SQL type name
* @deprecated Use {@link #getCastTypeName(Size, SqlExpressible, DdlTypeRegistry)} instead
*/ */
@Deprecated(forRemoval = true)
String getCastTypeName(JdbcType jdbcType, JavaType<?> javaType); String getCastTypeName(JdbcType jdbcType, JavaType<?> javaType);
/** /**
@ -121,7 +141,9 @@ public interface DdlType extends Serializable {
* @param scale the scale, or null, if unspecified * @param scale the scale, or null, if unspecified
* *
* @return The SQL type name * @return The SQL type name
* @deprecated Use {@link #getCastTypeName(Size, SqlExpressible, DdlTypeRegistry)} instead
*/ */
@Deprecated(forRemoval = true)
default String getCastTypeName(SqlExpressible type, Long length, Integer precision, Integer scale) { default String getCastTypeName(SqlExpressible type, Long length, Integer precision, Integer scale) {
return getCastTypeName( return getCastTypeName(
type.getJdbcMapping().getJdbcType(), type.getJdbcMapping().getJdbcType(),
@ -146,6 +168,8 @@ public interface DdlType extends Serializable {
* @see Dialect#getMaxVarcharLength() * @see Dialect#getMaxVarcharLength()
* *
* @return The SQL type name * @return The SQL type name
* @deprecated Use {@link #getCastTypeName(Size, SqlExpressible, DdlTypeRegistry)} instead
*/ */
@Deprecated(forRemoval = true)
String getCastTypeName(JdbcType jdbcType, JavaType<?> javaType, Long length, Integer precision, Integer scale); String getCastTypeName(JdbcType jdbcType, JavaType<?> javaType, Long length, Integer precision, Integer scale);
} }

View File

@ -8,6 +8,7 @@ package org.hibernate.type.descriptor.sql.internal;
import org.hibernate.dialect.Dialect; import org.hibernate.dialect.Dialect;
import org.hibernate.engine.jdbc.Size; import org.hibernate.engine.jdbc.Size;
import org.hibernate.metamodel.mapping.SqlExpressible;
import org.hibernate.type.BasicPluralType; import org.hibernate.type.BasicPluralType;
import org.hibernate.type.BasicType; import org.hibernate.type.BasicType;
import org.hibernate.type.Type; import org.hibernate.type.Type;
@ -21,8 +22,44 @@ import static java.sql.Types.ARRAY;
*/ */
public class ArrayDdlTypeImpl extends DdlTypeImpl { public class ArrayDdlTypeImpl extends DdlTypeImpl {
public ArrayDdlTypeImpl(Dialect dialect) { private final boolean castRawElementType;
public ArrayDdlTypeImpl(Dialect dialect, boolean castRawElementType) {
super( ARRAY, "array", dialect ); super( ARRAY, "array", dialect );
this.castRawElementType = castRawElementType;
}
@Override
public String getCastTypeName(Size columnSize, SqlExpressible type, DdlTypeRegistry ddlTypeRegistry) {
final BasicPluralType<?, ?> pluralType = (BasicPluralType<?, ?>) type;
final BasicPluralJavaType<?> javaTypeDescriptor = (BasicPluralJavaType<?>) pluralType.getJavaTypeDescriptor();
final BasicType<?> elementType = pluralType.getElementType();
String arrayElementTypeName = ddlTypeRegistry.getDescriptor( elementType.getJdbcType().getDdlTypeCode() )
.getCastTypeName(
dialect.getSizeStrategy().resolveSize(
elementType.getJdbcMapping().getJdbcType(),
elementType.getJavaTypeDescriptor(),
columnSize.getPrecision(),
columnSize.getScale(),
columnSize.getLength()
),
elementType,
ddlTypeRegistry
);
if ( castRawElementType ) {
final int paren = arrayElementTypeName.indexOf( '(' );
if ( paren > 0 ) {
final int parenEnd = arrayElementTypeName.lastIndexOf( ')' );
arrayElementTypeName = parenEnd + 1 == arrayElementTypeName.length()
? arrayElementTypeName.substring( 0, paren )
: ( arrayElementTypeName.substring( 0, paren ) + arrayElementTypeName.substring( parenEnd + 1 ) );
}
}
return dialect.getArrayTypeName(
javaTypeDescriptor.getElementJavaType().getJavaTypeClass().getSimpleName(),
arrayElementTypeName,
columnSize.getArrayLength()
);
} }
@Override @Override
@ -30,8 +67,7 @@ public class ArrayDdlTypeImpl extends DdlTypeImpl {
final BasicPluralType<?, ?> pluralType = (BasicPluralType<?, ?>) type; final BasicPluralType<?, ?> pluralType = (BasicPluralType<?, ?>) type;
final BasicPluralJavaType<?> javaTypeDescriptor = (BasicPluralJavaType<?>) pluralType.getJavaTypeDescriptor(); final BasicPluralJavaType<?> javaTypeDescriptor = (BasicPluralJavaType<?>) pluralType.getJavaTypeDescriptor();
final BasicType<?> elementType = pluralType.getElementType(); final BasicType<?> elementType = pluralType.getElementType();
final String arrayElementTypeName = final String arrayElementTypeName = ddlTypeRegistry.getTypeName(
ddlTypeRegistry.getTypeName(
elementType.getJdbcType().getDdlTypeCode(), elementType.getJdbcType().getDdlTypeCode(),
dialect.getSizeStrategy().resolveSize( dialect.getSizeStrategy().resolveSize(
elementType.getJdbcMapping().getJdbcType(), elementType.getJdbcMapping().getJdbcType(),

View File

@ -0,0 +1,148 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.orm.test.function.array;
import java.util.List;
import org.hibernate.boot.ResourceStreamLocator;
import org.hibernate.boot.spi.AdditionalMappingContributions;
import org.hibernate.boot.spi.AdditionalMappingContributor;
import org.hibernate.boot.spi.InFlightMetadataCollector;
import org.hibernate.boot.spi.MetadataBuildingContext;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.OracleArrayJdbcType;
import org.hibernate.dialect.OracleDialect;
import org.hibernate.dialect.SpannerDialect;
import org.hibernate.engine.jdbc.Size;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.java.ArrayJavaType;
import org.hibernate.type.descriptor.java.spi.JavaTypeRegistry;
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
import org.hibernate.type.spi.TypeConfiguration;
import org.hibernate.testing.orm.domain.StandardDomainModel;
import org.hibernate.testing.orm.domain.gambit.EntityOfBasics;
import org.hibernate.testing.orm.junit.BootstrapServiceRegistry;
import org.hibernate.testing.orm.junit.DialectFeatureChecks;
import org.hibernate.testing.orm.junit.DomainModel;
import org.hibernate.testing.orm.junit.RequiresDialectFeature;
import org.hibernate.testing.orm.junit.ServiceRegistry;
import org.hibernate.testing.orm.junit.SessionFactory;
import org.hibernate.testing.orm.junit.SessionFactoryScope;
import org.hibernate.testing.orm.junit.Setting;
import org.hibernate.testing.orm.junit.SkipForDialect;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
/**
* @author Christian Beikov
*/
@BootstrapServiceRegistry(
javaServices = @BootstrapServiceRegistry.JavaService(
role = AdditionalMappingContributor.class,
impl = ArrayAggregateTest.UdtContributor.class
)
)
// Make sure this stuff runs on a dedicated connection pool,
// otherwise we might run into ORA-21700: object does not exist or is marked for delete
// because the JDBC connection or database session caches something that should have been invalidated
@ServiceRegistry(settings = @Setting(name = AvailableSettings.CONNECTION_PROVIDER, value = ""))
@DomainModel(standardModels = StandardDomainModel.GAMBIT)
@SessionFactory
@RequiresDialectFeature(feature = DialectFeatureChecks.SupportsStructuralArrays.class)
@SkipForDialect(dialectClass = SpannerDialect.class, reason = "Doesn't support array_agg ordering yet")
@SkipForDialect(dialectClass = OracleDialect.class, majorVersion = 11, reason = "Oracle array_agg emulation requires json_arrayagg which was only added in Oracle 12")
public class ArrayAggregateTest {
public static class UdtContributor implements AdditionalMappingContributor {
@Override
public void contribute(
AdditionalMappingContributions contributions,
InFlightMetadataCollector metadata,
ResourceStreamLocator resourceStreamLocator,
MetadataBuildingContext buildingContext) {
final TypeConfiguration typeConfiguration = metadata.getTypeConfiguration();
final JavaTypeRegistry javaTypeRegistry = typeConfiguration.getJavaTypeRegistry();
final JdbcTypeRegistry jdbcTypeRegistry = typeConfiguration.getJdbcTypeRegistry();
new OracleArrayJdbcType(
jdbcTypeRegistry.getDescriptor( SqlTypes.VARCHAR ),
"StringArray"
).addAuxiliaryDatabaseObjects(
new ArrayJavaType<>( javaTypeRegistry.getDescriptor( String.class ) ),
Size.nil(),
metadata.getDatabase(),
typeConfiguration
);
}
}
@BeforeEach
public void prepareData(SessionFactoryScope scope) {
scope.inTransaction( em -> {
final EntityOfBasics e1 = new EntityOfBasics( 1 );
e1.setTheString( "abc" );
final EntityOfBasics e2 = new EntityOfBasics( 2 );
e2.setTheString( "def" );
final EntityOfBasics e3 = new EntityOfBasics( 3 );
em.persist( e1 );
em.persist( e2 );
em.persist( e3 );
} );
}
@AfterEach
public void cleanup(SessionFactoryScope scope) {
scope.inTransaction( em -> {
em.createMutationQuery( "delete from EntityOfBasics" ).executeUpdate();
} );
}
@Test
public void testEmpty(SessionFactoryScope scope) {
scope.inSession( em -> {
List<String[]> results = em.createQuery( "select array_agg(e.data) within group (order by e.id) from BasicEntity e", String[].class )
.getResultList();
assertEquals( 1, results.size() );
assertNull( results.get( 0 ) );
} );
}
@Test
public void testWithoutNull(SessionFactoryScope scope) {
scope.inSession( em -> {
List<String[]> results = em.createQuery( "select array_agg(e.theString) within group (order by e.theString) from EntityOfBasics e where e.theString is not null", String[].class )
.getResultList();
assertEquals( 1, results.size() );
assertArrayEquals( new String[]{ "abc", "def" }, results.get( 0 ) );
} );
}
@Test
public void testWithNull(SessionFactoryScope scope) {
scope.inSession( em -> {
List<String[]> results = em.createQuery( "select array_agg(e.theString) within group (order by e.theString asc nulls last) from EntityOfBasics e", String[].class )
.getResultList();
assertEquals( 1, results.size() );
assertArrayEquals( new String[]{ "abc", "def", null }, results.get( 0 ) );
} );
}
@Test
public void testCompareAgainstArray(SessionFactoryScope scope) {
scope.inSession( em -> {
List<String[]> results = em.createQuery( "select 1 where array('abc','def',null) is not distinct from (select array_agg(e.theString) within group (order by e.theString asc nulls last) from EntityOfBasics e)", String[].class )
.getResultList();
assertEquals( 1, results.size() );
} );
}
}

View File

@ -0,0 +1,83 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.orm.test.function.array;
import java.util.List;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.testing.orm.junit.DialectFeatureChecks;
import org.hibernate.testing.orm.junit.DomainModel;
import org.hibernate.testing.orm.junit.RequiresDialectFeature;
import org.hibernate.testing.orm.junit.ServiceRegistry;
import org.hibernate.testing.orm.junit.SessionFactory;
import org.hibernate.testing.orm.junit.SessionFactoryScope;
import org.hibernate.testing.orm.junit.Setting;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
/**
* @author Christian Beikov
*/
@DomainModel(annotatedClasses = EntityWithArrays.class)
@SessionFactory
@RequiresDialectFeature( feature = DialectFeatureChecks.SupportsStructuralArrays.class)
// Make sure this stuff runs on a dedicated connection pool,
// otherwise we might run into ORA-21700: object does not exist or is marked for delete
// because the JDBC connection or database session caches something that should have been invalidated
@ServiceRegistry(settings = @Setting(name = AvailableSettings.CONNECTION_PROVIDER, value = ""))
public class ArrayConstructorTest {
@BeforeEach
public void prepareData(SessionFactoryScope scope) {
scope.inTransaction( em -> {
em.persist( new EntityWithArrays( 1L, new String[]{} ) );
em.persist( new EntityWithArrays( 2L, new String[]{ "abc", null, "def" } ) );
em.persist( new EntityWithArrays( 3L, null ) );
} );
}
@AfterEach
public void cleanup(SessionFactoryScope scope) {
scope.inTransaction( em -> {
em.createMutationQuery( "delete from EntityWithArrays" ).executeUpdate();
} );
}
@Test
public void testEmpty(SessionFactoryScope scope) {
scope.inSession( em -> {
List<EntityWithArrays> results = em.createQuery( "from EntityWithArrays e where e.theArray = array()", EntityWithArrays.class )
.getResultList();
assertEquals( 1, results.size() );
assertEquals( 1L, results.get( 0 ).getId() );
} );
}
@Test
public void testNonExisting(SessionFactoryScope scope) {
scope.inSession( em -> {
List<EntityWithArrays> results = em.createQuery( "from EntityWithArrays e where e.theArray = array('abc')", EntityWithArrays.class )
.getResultList();
assertEquals( 0, results.size() );
} );
}
@Test
public void testMultipleArguments(SessionFactoryScope scope) {
scope.inSession( em -> {
List<EntityWithArrays> results = em.createQuery( "from EntityWithArrays e where e.theArray is not distinct from array('abc', null, 'def')", EntityWithArrays.class )
.getResultList();
assertEquals( 1, results.size() );
assertEquals( 2L, results.get( 0 ).getId() );
} );
}
}

View File

@ -0,0 +1,45 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.orm.test.function.array;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
@Entity
public class EntityWithArrays {
@Id
private Long id;
@Column(name = "the_array")
private String[] theArray;
public EntityWithArrays() {
}
public EntityWithArrays(Long id, String[] theArray) {
this.id = id;
this.theArray = theArray;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String[] getTheArray() {
return theArray;
}
public void setTheArray(String[] theArray) {
this.theArray = theArray;
}
}

View File

@ -16,7 +16,9 @@ import jakarta.persistence.criteria.Path;
import jakarta.persistence.criteria.Root; import jakarta.persistence.criteria.Root;
import java.util.Arrays; import java.util.Arrays;
import java.util.Map;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.OracleDialect; import org.hibernate.dialect.OracleDialect;
import org.hibernate.testing.orm.junit.SkipForDialect; import org.hibernate.testing.orm.junit.SkipForDialect;
import org.junit.Test; import org.junit.Test;
@ -34,6 +36,13 @@ import static org.junit.Assert.assertThat;
* @author Steve Ebersole * @author Steve Ebersole
*/ */
public class ParameterTest extends BaseEntityManagerFunctionalTestCase { public class ParameterTest extends BaseEntityManagerFunctionalTestCase {
@Override
protected void addConfigOptions(Map options) {
// Make sure this stuff runs on a dedicated connection pool,
// otherwise we might run into ORA-21700: object does not exist or is marked for delete
// because the JDBC connection or database session caches something that should have been invalidated
options.put( AvailableSettings.CONNECTION_PROVIDER, "" );
}
@Test @Test
public void testPrimitiveArrayParameterBinding() { public void testPrimitiveArrayParameterBinding() {

View File

@ -15,6 +15,7 @@ import jakarta.persistence.criteria.CriteriaQuery;
import jakarta.persistence.criteria.Root; import jakarta.persistence.criteria.Root;
import org.hibernate.cfg.AvailableSettings; import org.hibernate.cfg.AvailableSettings;
import org.hibernate.engine.jdbc.Size;
import org.hibernate.orm.test.jpa.BaseEntityManagerFunctionalTestCase; import org.hibernate.orm.test.jpa.BaseEntityManagerFunctionalTestCase;
import org.hibernate.query.sqm.CastType; import org.hibernate.query.sqm.CastType;
import org.hibernate.query.criteria.HibernateCriteriaBuilder; import org.hibernate.query.criteria.HibernateCriteriaBuilder;
@ -100,8 +101,9 @@ public abstract class AbstractCriteriaLiteralHandlingModeTest extends BaseEntity
"?2", "?2",
typeConfiguration.getDdlTypeRegistry().getDescriptor( SqlTypes.VARCHAR ) typeConfiguration.getDdlTypeRegistry().getDescriptor( SqlTypes.VARCHAR )
.getCastTypeName( .getCastTypeName(
typeConfiguration.getJdbcTypeRegistry().getDescriptor( SqlTypes.VARCHAR ), Size.nil(),
typeConfiguration.getJavaTypeRegistry().getDescriptor( String.class ) typeConfiguration.getBasicTypeForJavaType( String.class ),
typeConfiguration.getDdlTypeRegistry()
) )
) )
.replace( "?1", expression ); .replace( "?1", expression );

View File

@ -12,6 +12,7 @@ import java.util.Set;
import java.util.SortedSet; import java.util.SortedSet;
import java.util.TreeSet; import java.util.TreeSet;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.metamodel.mapping.JdbcMapping; import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.metamodel.mapping.internal.BasicAttributeMapping; import org.hibernate.metamodel.mapping.internal.BasicAttributeMapping;
import org.hibernate.metamodel.spi.MappingMetamodelImplementor; import org.hibernate.metamodel.spi.MappingMetamodelImplementor;
@ -20,8 +21,10 @@ import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.testing.jdbc.SQLStatementInspector; import org.hibernate.testing.jdbc.SQLStatementInspector;
import org.hibernate.testing.orm.junit.DomainModel; import org.hibernate.testing.orm.junit.DomainModel;
import org.hibernate.testing.orm.junit.Jira; import org.hibernate.testing.orm.junit.Jira;
import org.hibernate.testing.orm.junit.ServiceRegistry;
import org.hibernate.testing.orm.junit.SessionFactory; import org.hibernate.testing.orm.junit.SessionFactory;
import org.hibernate.testing.orm.junit.SessionFactoryScope; import org.hibernate.testing.orm.junit.SessionFactoryScope;
import org.hibernate.testing.orm.junit.Setting;
import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
@ -38,6 +41,10 @@ import static org.hamcrest.Matchers.equalTo;
*/ */
@DomainModel(annotatedClasses = BasicCollectionMappingTests.EntityOfCollections.class) @DomainModel(annotatedClasses = BasicCollectionMappingTests.EntityOfCollections.class)
@SessionFactory( useCollectingStatementInspector = true ) @SessionFactory( useCollectingStatementInspector = true )
// Make sure this stuff runs on a dedicated connection pool,
// otherwise we might run into ORA-21700: object does not exist or is marked for delete
// because the JDBC connection or database session caches something that should have been invalidated
@ServiceRegistry(settings = @Setting(name = AvailableSettings.CONNECTION_PROVIDER, value = ""))
public class BasicCollectionMappingTests { public class BasicCollectionMappingTests {
@Test @Test

View File

@ -8,7 +8,9 @@ package org.hibernate.orm.test.mapping.collections;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Map;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.orm.test.jpa.BaseEntityManagerFunctionalTestCase; import org.hibernate.orm.test.jpa.BaseEntityManagerFunctionalTestCase;
import org.junit.Test; import org.junit.Test;
@ -30,6 +32,14 @@ public class CollectionTest extends BaseEntityManagerFunctionalTestCase {
}; };
} }
@Override
protected void addConfigOptions(Map options) {
// Make sure this stuff runs on a dedicated connection pool,
// otherwise we might run into ORA-21700: object does not exist or is marked for delete
// because the JDBC connection or database session caches something that should have been invalidated
options.put( AvailableSettings.CONNECTION_PROVIDER, "" );
}
@Test @Test
public void testLifecycle() { public void testLifecycle() {
doInJPA(this::entityManagerFactory, entityManager -> { doInJPA(this::entityManagerFactory, entityManager -> {

View File

@ -9,7 +9,9 @@ package org.hibernate.orm.test.type;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.AbstractHANADialect; import org.hibernate.dialect.AbstractHANADialect;
import org.hibernate.dialect.HSQLDialect; import org.hibernate.dialect.HSQLDialect;
import org.hibernate.dialect.OracleDialect; import org.hibernate.dialect.OracleDialect;
@ -45,6 +47,14 @@ public class BasicListTest extends BaseNonConfigCoreFunctionalTestCase {
return new Class[]{ TableWithIntegerList.class }; return new Class[]{ TableWithIntegerList.class };
} }
@Override
protected void addSettings(Map<String, Object> settings) {
// Make sure this stuff runs on a dedicated connection pool,
// otherwise we might run into ORA-21700: object does not exist or is marked for delete
// because the JDBC connection or database session caches something that should have been invalidated
settings.put( AvailableSettings.CONNECTION_PROVIDER, "" );
}
public void startUp() { public void startUp() {
super.startUp(); super.startUp();
inTransaction( em -> { inTransaction( em -> {

View File

@ -8,9 +8,11 @@ package org.hibernate.orm.test.type;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.Map;
import java.util.SortedSet; import java.util.SortedSet;
import java.util.TreeSet; import java.util.TreeSet;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.AbstractHANADialect; import org.hibernate.dialect.AbstractHANADialect;
import org.hibernate.dialect.HSQLDialect; import org.hibernate.dialect.HSQLDialect;
import org.hibernate.dialect.OracleDialect; import org.hibernate.dialect.OracleDialect;
@ -46,6 +48,14 @@ public class BasicSortedSetTest extends BaseNonConfigCoreFunctionalTestCase {
return new Class[]{ TableWithIntegerSortedSet.class }; return new Class[]{ TableWithIntegerSortedSet.class };
} }
@Override
protected void addSettings(Map<String, Object> settings) {
// Make sure this stuff runs on a dedicated connection pool,
// otherwise we might run into ORA-21700: object does not exist or is marked for delete
// because the JDBC connection or database session caches something that should have been invalidated
settings.put( AvailableSettings.CONNECTION_PROVIDER, "" );
}
public void startUp() { public void startUp() {
super.startUp(); super.startUp();
inTransaction( em -> { inTransaction( em -> {

View File

@ -6,6 +6,9 @@
*/ */
package org.hibernate.orm.test.type; package org.hibernate.orm.test.type;
import java.util.Map;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.AbstractHANADialect; import org.hibernate.dialect.AbstractHANADialect;
import org.hibernate.dialect.HSQLDialect; import org.hibernate.dialect.HSQLDialect;
import org.hibernate.dialect.OracleDialect; import org.hibernate.dialect.OracleDialect;
@ -46,6 +49,14 @@ public class BooleanArrayTest extends BaseNonConfigCoreFunctionalTestCase {
return new Class[]{ TableWithBooleanArrays.class }; return new Class[]{ TableWithBooleanArrays.class };
} }
@Override
protected void addSettings(Map<String, Object> settings) {
// Make sure this stuff runs on a dedicated connection pool,
// otherwise we might run into ORA-21700: object does not exist or is marked for delete
// because the JDBC connection or database session caches something that should have been invalidated
settings.put( AvailableSettings.CONNECTION_PROVIDER, "" );
}
public void startUp() { public void startUp() {
super.startUp(); super.startUp();
inTransaction( em -> { inTransaction( em -> {

View File

@ -8,7 +8,9 @@ package org.hibernate.orm.test.type;
import java.sql.Date; import java.sql.Date;
import java.time.LocalDate; import java.time.LocalDate;
import java.util.Map;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.AbstractHANADialect; import org.hibernate.dialect.AbstractHANADialect;
import org.hibernate.dialect.HSQLDialect; import org.hibernate.dialect.HSQLDialect;
import org.hibernate.dialect.OracleDialect; import org.hibernate.dialect.OracleDialect;
@ -47,6 +49,14 @@ public class DateArrayTest extends BaseNonConfigCoreFunctionalTestCase {
return new Class[]{ TableWithDateArrays.class }; return new Class[]{ TableWithDateArrays.class };
} }
@Override
protected void addSettings(Map<String, Object> settings) {
// Make sure this stuff runs on a dedicated connection pool,
// otherwise we might run into ORA-21700: object does not exist or is marked for delete
// because the JDBC connection or database session caches something that should have been invalidated
settings.put( AvailableSettings.CONNECTION_PROVIDER, "" );
}
private LocalDate date1; private LocalDate date1;
private LocalDate date2; private LocalDate date2;
private LocalDate date3; private LocalDate date3;

View File

@ -6,6 +6,9 @@
*/ */
package org.hibernate.orm.test.type; package org.hibernate.orm.test.type;
import java.util.Map;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.AbstractHANADialect; import org.hibernate.dialect.AbstractHANADialect;
import org.hibernate.dialect.HSQLDialect; import org.hibernate.dialect.HSQLDialect;
import org.hibernate.dialect.OracleDialect; import org.hibernate.dialect.OracleDialect;
@ -43,6 +46,14 @@ public class DoubleArrayTest extends BaseNonConfigCoreFunctionalTestCase {
return new Class[]{ TableWithDoubleArrays.class }; return new Class[]{ TableWithDoubleArrays.class };
} }
@Override
protected void addSettings(Map<String, Object> settings) {
// Make sure this stuff runs on a dedicated connection pool,
// otherwise we might run into ORA-21700: object does not exist or is marked for delete
// because the JDBC connection or database session caches something that should have been invalidated
settings.put( AvailableSettings.CONNECTION_PROVIDER, "" );
}
public void startUp() { public void startUp() {
super.startUp(); super.startUp();
inTransaction( em -> { inTransaction( em -> {

View File

@ -6,6 +6,9 @@
*/ */
package org.hibernate.orm.test.type; package org.hibernate.orm.test.type;
import java.util.Map;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.AbstractHANADialect; import org.hibernate.dialect.AbstractHANADialect;
import org.hibernate.dialect.DB2Dialect; import org.hibernate.dialect.DB2Dialect;
import org.hibernate.dialect.DerbyDialect; import org.hibernate.dialect.DerbyDialect;
@ -45,6 +48,14 @@ public class EnumArrayTest extends BaseNonConfigCoreFunctionalTestCase {
return new Class[]{ TableWithEnumArrays.class }; return new Class[]{ TableWithEnumArrays.class };
} }
@Override
protected void addSettings(Map<String, Object> settings) {
// Make sure this stuff runs on a dedicated connection pool,
// otherwise we might run into ORA-21700: object does not exist or is marked for delete
// because the JDBC connection or database session caches something that should have been invalidated
settings.put( AvailableSettings.CONNECTION_PROVIDER, "" );
}
public void startUp() { public void startUp() {
super.startUp(); super.startUp();
inTransaction( em -> { inTransaction( em -> {

View File

@ -8,8 +8,10 @@ package org.hibernate.orm.test.type;
import java.util.EnumSet; import java.util.EnumSet;
import java.util.HashSet; import java.util.HashSet;
import java.util.Map;
import java.util.Set; import java.util.Set;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.AbstractHANADialect; import org.hibernate.dialect.AbstractHANADialect;
import org.hibernate.dialect.HSQLDialect; import org.hibernate.dialect.HSQLDialect;
import org.hibernate.dialect.OracleDialect; import org.hibernate.dialect.OracleDialect;
@ -50,6 +52,14 @@ public class EnumSetConverterTest extends BaseNonConfigCoreFunctionalTestCase {
return new Class[]{ TableWithEnumSetConverter.class }; return new Class[]{ TableWithEnumSetConverter.class };
} }
@Override
protected void addSettings(Map<String, Object> settings) {
// Make sure this stuff runs on a dedicated connection pool,
// otherwise we might run into ORA-21700: object does not exist or is marked for delete
// because the JDBC connection or database session caches something that should have been invalidated
settings.put( AvailableSettings.CONNECTION_PROVIDER, "" );
}
public void startUp() { public void startUp() {
super.startUp(); super.startUp();
inTransaction( em -> { inTransaction( em -> {

View File

@ -8,8 +8,10 @@ package org.hibernate.orm.test.type;
import java.util.EnumSet; import java.util.EnumSet;
import java.util.HashSet; import java.util.HashSet;
import java.util.Map;
import java.util.Set; import java.util.Set;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.AbstractHANADialect; import org.hibernate.dialect.AbstractHANADialect;
import org.hibernate.dialect.HSQLDialect; import org.hibernate.dialect.HSQLDialect;
import org.hibernate.dialect.OracleDialect; import org.hibernate.dialect.OracleDialect;
@ -49,6 +51,14 @@ public class EnumSetTest extends BaseNonConfigCoreFunctionalTestCase {
return new Class[]{ TableWithEnumSet.class }; return new Class[]{ TableWithEnumSet.class };
} }
@Override
protected void addSettings(Map<String, Object> settings) {
// Make sure this stuff runs on a dedicated connection pool,
// otherwise we might run into ORA-21700: object does not exist or is marked for delete
// because the JDBC connection or database session caches something that should have been invalidated
settings.put( AvailableSettings.CONNECTION_PROVIDER, "" );
}
public void startUp() { public void startUp() {
super.startUp(); super.startUp();
inTransaction( em -> { inTransaction( em -> {

View File

@ -6,6 +6,9 @@
*/ */
package org.hibernate.orm.test.type; package org.hibernate.orm.test.type;
import java.util.Map;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.AbstractHANADialect; import org.hibernate.dialect.AbstractHANADialect;
import org.hibernate.dialect.HSQLDialect; import org.hibernate.dialect.HSQLDialect;
import org.hibernate.dialect.OracleDialect; import org.hibernate.dialect.OracleDialect;
@ -43,6 +46,14 @@ public class FloatArrayTest extends BaseNonConfigCoreFunctionalTestCase {
return new Class[]{ TableWithFloatArrays.class }; return new Class[]{ TableWithFloatArrays.class };
} }
@Override
protected void addSettings(Map<String, Object> settings) {
// Make sure this stuff runs on a dedicated connection pool,
// otherwise we might run into ORA-21700: object does not exist or is marked for delete
// because the JDBC connection or database session caches something that should have been invalidated
settings.put( AvailableSettings.CONNECTION_PROVIDER, "" );
}
public void startUp() { public void startUp() {
super.startUp(); super.startUp();
inTransaction( em -> { inTransaction( em -> {

View File

@ -6,6 +6,9 @@
*/ */
package org.hibernate.orm.test.type; package org.hibernate.orm.test.type;
import java.util.Map;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.AbstractHANADialect; import org.hibernate.dialect.AbstractHANADialect;
import org.hibernate.dialect.HSQLDialect; import org.hibernate.dialect.HSQLDialect;
import org.hibernate.dialect.OracleDialect; import org.hibernate.dialect.OracleDialect;
@ -43,6 +46,14 @@ public class IntegerArrayTest extends BaseNonConfigCoreFunctionalTestCase {
return new Class[]{ TableWithIntegerArrays.class }; return new Class[]{ TableWithIntegerArrays.class };
} }
@Override
protected void addSettings(Map<String, Object> settings) {
// Make sure this stuff runs on a dedicated connection pool,
// otherwise we might run into ORA-21700: object does not exist or is marked for delete
// because the JDBC connection or database session caches something that should have been invalidated
settings.put( AvailableSettings.CONNECTION_PROVIDER, "" );
}
public void startUp() { public void startUp() {
super.startUp(); super.startUp();
inTransaction( em -> { inTransaction( em -> {

View File

@ -6,6 +6,9 @@
*/ */
package org.hibernate.orm.test.type; package org.hibernate.orm.test.type;
import java.util.Map;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.AbstractHANADialect; import org.hibernate.dialect.AbstractHANADialect;
import org.hibernate.dialect.HSQLDialect; import org.hibernate.dialect.HSQLDialect;
import org.hibernate.dialect.OracleDialect; import org.hibernate.dialect.OracleDialect;
@ -43,6 +46,14 @@ public class LongArrayTest extends BaseNonConfigCoreFunctionalTestCase {
return new Class[]{ TableWithLongArrays.class }; return new Class[]{ TableWithLongArrays.class };
} }
@Override
protected void addSettings(Map<String, Object> settings) {
// Make sure this stuff runs on a dedicated connection pool,
// otherwise we might run into ORA-21700: object does not exist or is marked for delete
// because the JDBC connection or database session caches something that should have been invalidated
settings.put( AvailableSettings.CONNECTION_PROVIDER, "" );
}
public void startUp() { public void startUp() {
super.startUp(); super.startUp();
inTransaction( em -> { inTransaction( em -> {

View File

@ -6,21 +6,32 @@
*/ */
package org.hibernate.orm.test.type; package org.hibernate.orm.test.type;
import jakarta.persistence.Entity; import org.hibernate.cfg.AvailableSettings;
import jakarta.persistence.Id; import org.hibernate.cfg.Configuration;
import org.hibernate.dialect.OracleDialect; import org.hibernate.dialect.OracleDialect;
import org.hibernate.testing.RequiresDialect; import org.hibernate.testing.RequiresDialect;
import org.hibernate.testing.TestForIssue; import org.hibernate.testing.TestForIssue;
import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase; import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import static org.hibernate.testing.transaction.TransactionUtil.doInHibernate; import static org.hibernate.testing.transaction.TransactionUtil.doInHibernate;
@RequiresDialect( OracleDialect.class ) @RequiresDialect( OracleDialect.class )
@TestForIssue( jiraKey = "HHH-10999") @TestForIssue( jiraKey = "HHH-10999")
public class OracleArrayTest extends BaseCoreFunctionalTestCase { public class OracleArrayTest extends BaseCoreFunctionalTestCase {
@Override
protected void configure(Configuration configuration) {
// Make sure this stuff runs on a dedicated connection pool,
// otherwise we might run into ORA-21700: object does not exist or is marked for delete
// because the JDBC connection or database session caches something that should have been invalidated
configuration.setProperty( AvailableSettings.CONNECTION_PROVIDER, "" );
}
@Test @Test
public void test() { public void test() {

View File

@ -6,11 +6,15 @@ import jakarta.persistence.GeneratedValue;
import jakarta.persistence.Id; import jakarta.persistence.Id;
import org.hibernate.annotations.Array; import org.hibernate.annotations.Array;
import org.hibernate.annotations.JdbcTypeCode; import org.hibernate.annotations.JdbcTypeCode;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.OracleDialect; import org.hibernate.dialect.OracleDialect;
import org.hibernate.testing.orm.junit.DomainModel; import org.hibernate.testing.orm.junit.DomainModel;
import org.hibernate.testing.orm.junit.RequiresDialect; import org.hibernate.testing.orm.junit.RequiresDialect;
import org.hibernate.testing.orm.junit.ServiceRegistry;
import org.hibernate.testing.orm.junit.SessionFactory; import org.hibernate.testing.orm.junit.SessionFactory;
import org.hibernate.testing.orm.junit.SessionFactoryScope; import org.hibernate.testing.orm.junit.SessionFactoryScope;
import org.hibernate.testing.orm.junit.Setting;
import org.hibernate.type.SqlTypes; import org.hibernate.type.SqlTypes;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
@ -23,6 +27,10 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.fail; import static org.junit.jupiter.api.Assertions.fail;
@SessionFactory @SessionFactory
// Make sure this stuff runs on a dedicated connection pool,
// otherwise we might run into ORA-21700: object does not exist or is marked for delete
// because the JDBC connection or database session caches something that should have been invalidated
@ServiceRegistry(settings = @Setting(name = AvailableSettings.CONNECTION_PROVIDER, value = ""))
@DomainModel(annotatedClasses = {OracleNestedTableTest.Container.class}) @DomainModel(annotatedClasses = {OracleNestedTableTest.Container.class})
@RequiresDialect(OracleDialect.class) @RequiresDialect(OracleDialect.class)
public class OracleNestedTableTest { public class OracleNestedTableTest {

View File

@ -6,11 +6,15 @@ import jakarta.persistence.GeneratedValue;
import jakarta.persistence.Id; import jakarta.persistence.Id;
import org.hibernate.annotations.Array; import org.hibernate.annotations.Array;
import org.hibernate.annotations.JdbcTypeCode; import org.hibernate.annotations.JdbcTypeCode;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.OracleDialect; import org.hibernate.dialect.OracleDialect;
import org.hibernate.testing.orm.junit.DomainModel; import org.hibernate.testing.orm.junit.DomainModel;
import org.hibernate.testing.orm.junit.RequiresDialect; import org.hibernate.testing.orm.junit.RequiresDialect;
import org.hibernate.testing.orm.junit.ServiceRegistry;
import org.hibernate.testing.orm.junit.SessionFactory; import org.hibernate.testing.orm.junit.SessionFactory;
import org.hibernate.testing.orm.junit.SessionFactoryScope; import org.hibernate.testing.orm.junit.SessionFactoryScope;
import org.hibernate.testing.orm.junit.Setting;
import org.hibernate.type.SqlTypes; import org.hibernate.type.SqlTypes;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
@ -24,6 +28,10 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.fail; import static org.junit.jupiter.api.Assertions.fail;
@SessionFactory @SessionFactory
// Make sure this stuff runs on a dedicated connection pool,
// otherwise we might run into ORA-21700: object does not exist or is marked for delete
// because the JDBC connection or database session caches something that should have been invalidated
@ServiceRegistry(settings = @Setting(name = AvailableSettings.CONNECTION_PROVIDER, value = ""))
@DomainModel(annotatedClasses = {OracleSqlArrayTest.Container.class}) @DomainModel(annotatedClasses = {OracleSqlArrayTest.Container.class})
@RequiresDialect(OracleDialect.class) @RequiresDialect(OracleDialect.class)
public class OracleSqlArrayTest { public class OracleSqlArrayTest {

View File

@ -6,6 +6,9 @@
*/ */
package org.hibernate.orm.test.type; package org.hibernate.orm.test.type;
import java.util.Map;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.AbstractHANADialect; import org.hibernate.dialect.AbstractHANADialect;
import org.hibernate.dialect.HSQLDialect; import org.hibernate.dialect.HSQLDialect;
import org.hibernate.dialect.OracleDialect; import org.hibernate.dialect.OracleDialect;
@ -43,6 +46,14 @@ public class ShortArrayTest extends BaseNonConfigCoreFunctionalTestCase {
return new Class[]{ TableWithShortArrays.class }; return new Class[]{ TableWithShortArrays.class };
} }
@Override
protected void addSettings(Map<String, Object> settings) {
// Make sure this stuff runs on a dedicated connection pool,
// otherwise we might run into ORA-21700: object does not exist or is marked for delete
// because the JDBC connection or database session caches something that should have been invalidated
settings.put( AvailableSettings.CONNECTION_PROVIDER, "" );
}
public void startUp() { public void startUp() {
super.startUp(); super.startUp();
inTransaction( em -> { inTransaction( em -> {

View File

@ -6,6 +6,9 @@
*/ */
package org.hibernate.orm.test.type; package org.hibernate.orm.test.type;
import java.util.Map;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.AbstractHANADialect; import org.hibernate.dialect.AbstractHANADialect;
import org.hibernate.dialect.HSQLDialect; import org.hibernate.dialect.HSQLDialect;
import org.hibernate.dialect.OracleDialect; import org.hibernate.dialect.OracleDialect;
@ -43,6 +46,14 @@ public class StringArrayTest extends BaseNonConfigCoreFunctionalTestCase {
return new Class[]{ TableWithStringArrays.class }; return new Class[]{ TableWithStringArrays.class };
} }
@Override
protected void addSettings(Map<String, Object> settings) {
// Make sure this stuff runs on a dedicated connection pool,
// otherwise we might run into ORA-21700: object does not exist or is marked for delete
// because the JDBC connection or database session caches something that should have been invalidated
settings.put( AvailableSettings.CONNECTION_PROVIDER, "" );
}
public void startUp() { public void startUp() {
super.startUp(); super.startUp();
inTransaction( em -> { inTransaction( em -> {

View File

@ -8,7 +8,9 @@ package org.hibernate.orm.test.type;
import java.sql.Time; import java.sql.Time;
import java.time.LocalTime; import java.time.LocalTime;
import java.util.Map;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.AbstractHANADialect; import org.hibernate.dialect.AbstractHANADialect;
import org.hibernate.dialect.HSQLDialect; import org.hibernate.dialect.HSQLDialect;
import org.hibernate.dialect.OracleDialect; import org.hibernate.dialect.OracleDialect;
@ -46,6 +48,14 @@ public class TimeArrayTest extends BaseNonConfigCoreFunctionalTestCase {
return new Class[]{ TableWithTimeArrays.class }; return new Class[]{ TableWithTimeArrays.class };
} }
@Override
protected void addSettings(Map<String, Object> settings) {
// Make sure this stuff runs on a dedicated connection pool,
// otherwise we might run into ORA-21700: object does not exist or is marked for delete
// because the JDBC connection or database session caches something that should have been invalidated
settings.put( AvailableSettings.CONNECTION_PROVIDER, "" );
}
private LocalTime time1; private LocalTime time1;
private LocalTime time2; private LocalTime time2;
private LocalTime time3; private LocalTime time3;

View File

@ -9,7 +9,9 @@ package org.hibernate.orm.test.type;
import java.sql.Timestamp; import java.sql.Timestamp;
import java.time.LocalDateTime; import java.time.LocalDateTime;
import java.time.Month; import java.time.Month;
import java.util.Map;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.AbstractHANADialect; import org.hibernate.dialect.AbstractHANADialect;
import org.hibernate.dialect.HSQLDialect; import org.hibernate.dialect.HSQLDialect;
import org.hibernate.dialect.OracleDialect; import org.hibernate.dialect.OracleDialect;
@ -47,6 +49,14 @@ public class TimestampArrayTest extends BaseNonConfigCoreFunctionalTestCase {
return new Class[]{ TableWithTimestampArrays.class }; return new Class[]{ TableWithTimestampArrays.class };
} }
@Override
protected void addSettings(Map<String, Object> settings) {
// Make sure this stuff runs on a dedicated connection pool,
// otherwise we might run into ORA-21700: object does not exist or is marked for delete
// because the JDBC connection or database session caches something that should have been invalidated
settings.put( AvailableSettings.CONNECTION_PROVIDER, "" );
}
private LocalDateTime time1; private LocalDateTime time1;
private LocalDateTime time2; private LocalDateTime time2;
private LocalDateTime time3; private LocalDateTime time3;

View File

@ -672,4 +672,10 @@ abstract public class DialectFeatureChecks {
return dialect.supportsCommentOn(); return dialect.supportsCommentOn();
} }
} }
public static class SupportsStructuralArrays implements DialectFeatureCheck {
public boolean apply(Dialect dialect) {
return dialect.getPreferredSqlTypeCodeForArray() != SqlTypes.VARBINARY;
}
}
} }