HHH-18796 Add JSON aggregate support for DB2

This commit is contained in:
Christian Beikov 2024-11-06 17:29:29 +01:00
parent 26a8a693cc
commit 4d6f9baa93
13 changed files with 645 additions and 70 deletions

View File

@ -439,7 +439,7 @@ public class DB2LegacyDialect extends Dialect {
functionFactory.jsonArray_db2();
functionFactory.jsonArrayAgg_db2();
functionFactory.jsonObjectAgg_db2();
functionFactory.jsonTable_db2();
functionFactory.jsonTable_db2( getMaximumSeriesSize() );
}
}
@ -459,7 +459,7 @@ public class DB2LegacyDialect extends Dialect {
functionFactory.xmlagg();
functionFactory.xmltable_db2();
functionFactory.unnest_emulated();
functionFactory.unnest_db2( getMaximumSeriesSize() );
if ( supportsRecursiveCTE() ) {
functionFactory.generateSeries_recursive( getMaximumSeriesSize(), false, true );
}
@ -1007,7 +1007,9 @@ public class DB2LegacyDialect extends Dialect {
@Override
public AggregateSupport getAggregateSupport() {
return DB2AggregateSupport.INSTANCE;
return getDB2Version().isSameOrAfter( 11 )
? DB2AggregateSupport.JSON_INSTANCE
: DB2AggregateSupport.INSTANCE;
}
@Override

View File

@ -410,7 +410,7 @@ public class DB2Dialect extends Dialect {
functionFactory.jsonArray_db2();
functionFactory.jsonArrayAgg_db2();
functionFactory.jsonObjectAgg_db2();
functionFactory.jsonTable_db2();
functionFactory.jsonTable_db2( getMaximumSeriesSize() );
}
functionFactory.xmlelement();
@ -429,7 +429,7 @@ public class DB2Dialect extends Dialect {
functionFactory.xmlagg();
functionFactory.xmltable_db2();
functionFactory.unnest_emulated();
functionFactory.unnest_db2( getMaximumSeriesSize() );
functionFactory.generateSeries_recursive( getMaximumSeriesSize(), false, true );
}
@ -1066,7 +1066,9 @@ public class DB2Dialect extends Dialect {
@Override
public AggregateSupport getAggregateSupport() {
return DB2AggregateSupport.INSTANCE;
return getDB2Version().isSameOrAfter( 11 )
? DB2AggregateSupport.JSON_INSTANCE
: DB2AggregateSupport.INSTANCE;
}
@Override

View File

@ -23,6 +23,7 @@ import org.hibernate.internal.util.StringHelper;
import org.hibernate.mapping.AggregateColumn;
import org.hibernate.mapping.Column;
import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.metamodel.mapping.SelectableMapping;
import org.hibernate.metamodel.mapping.SelectablePath;
import org.hibernate.metamodel.mapping.SqlExpressible;
@ -31,16 +32,36 @@ import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.tool.schema.extract.spi.ColumnTypeInformation;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
import org.hibernate.type.descriptor.sql.DdlType;
import org.hibernate.type.spi.TypeConfiguration;
import static org.hibernate.type.SqlTypes.ARRAY;
import static org.hibernate.type.SqlTypes.BINARY;
import static org.hibernate.type.SqlTypes.BLOB;
import static org.hibernate.type.SqlTypes.BOOLEAN;
import static org.hibernate.type.SqlTypes.JSON;
import static org.hibernate.type.SqlTypes.JSON_ARRAY;
import static org.hibernate.type.SqlTypes.LONG32VARBINARY;
import static org.hibernate.type.SqlTypes.SMALLINT;
import static org.hibernate.type.SqlTypes.STRUCT;
import static org.hibernate.type.SqlTypes.TIME;
import static org.hibernate.type.SqlTypes.TIMESTAMP;
import static org.hibernate.type.SqlTypes.TIMESTAMP_UTC;
import static org.hibernate.type.SqlTypes.TIMESTAMP_WITH_TIMEZONE;
import static org.hibernate.type.SqlTypes.VARBINARY;
public class DB2AggregateSupport extends AggregateSupportImpl {
public static final AggregateSupport INSTANCE = new DB2AggregateSupport();
public static final AggregateSupport INSTANCE = new DB2AggregateSupport( false );
public static final AggregateSupport JSON_INSTANCE = new DB2AggregateSupport( true );
private final boolean jsonSupport;
public DB2AggregateSupport(boolean jsonSupport) {
this.jsonSupport = jsonSupport;
}
@Override
public String aggregateComponentCustomReadExpression(
@ -51,12 +72,83 @@ public class DB2AggregateSupport extends AggregateSupportImpl {
int aggregateColumnTypeCode,
SqlTypedMapping column) {
switch ( aggregateColumnTypeCode ) {
case JSON:
case JSON_ARRAY:
if ( !jsonSupport ) {
break;
}
switch ( column.getJdbcMapping().getJdbcType().getDefaultSqlTypeCode() ) {
case BOOLEAN:
if ( SqlTypes.isNumericType( column.getJdbcMapping().getJdbcType().getDdlTypeCode() ) ) {
return template.replace(
placeholder,
"decode(json_value(" + aggregateParentReadExpression + ",'$." + columnExpression + "'),'true',1,'false',0)"
);
}
else {
return template.replace(
placeholder,
"decode(json_value(" + aggregateParentReadExpression + ",'$." + columnExpression + "'),'true',true,'false',false)"
);
}
case TIMESTAMP_WITH_TIMEZONE:
case TIMESTAMP_UTC:
return template.replace(
placeholder,
"cast(trim(trailing 'Z' from json_value(" + aggregateParentReadExpression + ",'$." + columnExpression + "' returning varchar(35))) as " + column.getColumnDefinition() + ")"
);
case BINARY:
case VARBINARY:
case LONG32VARBINARY:
case BLOB:
// We encode binary data as hex, so we have to decode here
return template.replace(
placeholder,
"hextoraw(json_value(" + aggregateParentReadExpression + ",'$." + columnExpression + "'))"
);
case JSON:
case JSON_ARRAY:
return template.replace(
placeholder,
"json_query(" + aggregateParentReadExpression + ",'$." + columnExpression + "')"
);
default:
return template.replace(
placeholder,
"json_value(" + aggregateParentReadExpression + ",'$." + columnExpression + "' returning " + column.getColumnDefinition() + ")"
);
}
case STRUCT:
return template.replace( placeholder, aggregateParentReadExpression + ".." + columnExpression );
}
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateColumnTypeCode );
}
private static String jsonCustomWriteExpression(String customWriteExpression, JdbcMapping jdbcMapping) {
final int sqlTypeCode = jdbcMapping.getJdbcType().getDefaultSqlTypeCode();
switch ( sqlTypeCode ) {
case BINARY:
case VARBINARY:
case LONG32VARBINARY:
case BLOB:
// We encode binary data as hex
return "hex(" + customWriteExpression + ")";
case ARRAY:
case JSON_ARRAY:
return "(" + customWriteExpression + ") format json";
// case BOOLEAN:
// return "(" + customWriteExpression + ")=true";
case TIME:
return "varchar_format(timestamp('1970-01-01'," + customWriteExpression + "),'HH24:MI:SS')";
case TIMESTAMP:
return "replace(varchar_format(" + customWriteExpression + ",'YYYY-MM-DD HH24:MI:SS.FF9'),' ','T')";
case TIMESTAMP_UTC:
return "replace(varchar_format(" + customWriteExpression + ",'YYYY-MM-DD HH24:MI:SS.FF9'),' ','T')||'Z'";
default:
return customWriteExpression;
}
}
@Override
public String aggregateComponentAssignmentExpression(
String aggregateParentAssignmentExpression,
@ -64,6 +156,13 @@ public class DB2AggregateSupport extends AggregateSupportImpl {
int aggregateColumnTypeCode,
Column column) {
switch ( aggregateColumnTypeCode ) {
case JSON:
case JSON_ARRAY:
if ( jsonSupport ) {
// For JSON we always have to replace the whole object
return aggregateParentAssignmentExpression;
}
break;
case STRUCT:
return aggregateParentAssignmentExpression + ".." + columnExpression;
}
@ -74,7 +173,16 @@ public class DB2AggregateSupport extends AggregateSupportImpl {
public String aggregateCustomWriteExpression(
AggregateColumn aggregateColumn,
List<Column> aggregatedColumns) {
switch ( aggregateColumn.getTypeCode() ) {
// We need to know what array this is STRUCT_ARRAY/JSON_ARRAY/XML_ARRAY,
// which we can easily get from the type code of the aggregate column
final int sqlTypeCode = aggregateColumn.getType().getJdbcType().getDefaultSqlTypeCode();
switch ( sqlTypeCode == SqlTypes.ARRAY ? aggregateColumn.getTypeCode() : sqlTypeCode ) {
case JSON:
case JSON_ARRAY:
if ( jsonSupport ) {
return null;
}
break;
case STRUCT:
final StringBuilder sb = new StringBuilder();
appendStructCustomWriteExpression( aggregateColumn, aggregatedColumns, sb );
@ -107,16 +215,21 @@ public class DB2AggregateSupport extends AggregateSupportImpl {
@Override
public int aggregateComponentSqlTypeCode(int aggregateColumnSqlTypeCode, int columnSqlTypeCode) {
if ( aggregateColumnSqlTypeCode == STRUCT && columnSqlTypeCode == BOOLEAN ) {
if ( aggregateColumnSqlTypeCode == STRUCT ) {
// DB2 doesn't support booleans in structs
return SMALLINT;
return columnSqlTypeCode == BOOLEAN ? SMALLINT : columnSqlTypeCode;
}
else if ( aggregateColumnSqlTypeCode == JSON ) {
return columnSqlTypeCode == ARRAY ? JSON_ARRAY : columnSqlTypeCode;
}
else {
return columnSqlTypeCode;
}
return columnSqlTypeCode;
}
@Override
public boolean requiresAggregateCustomWriteExpressionRenderer(int aggregateSqlTypeCode) {
return aggregateSqlTypeCode == STRUCT;
return aggregateSqlTypeCode == STRUCT || aggregateSqlTypeCode == JSON;
}
@Override
@ -126,12 +239,23 @@ public class DB2AggregateSupport extends AggregateSupportImpl {
TypeConfiguration typeConfiguration) {
final int aggregateSqlTypeCode = aggregateColumn.getJdbcMapping().getJdbcType().getDefaultSqlTypeCode();
switch ( aggregateSqlTypeCode ) {
case JSON:
if ( jsonSupport ) {
return jsonAggregateColumnWriter( aggregateColumn, columnsToUpdate );
}
break;
case STRUCT:
return structAggregateColumnWriter( aggregateColumn, columnsToUpdate, typeConfiguration );
}
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateSqlTypeCode );
}
private WriteExpressionRenderer jsonAggregateColumnWriter(
SelectableMapping aggregateColumn,
SelectableMapping[] columns) {
return new RootJsonWriteExpression( aggregateColumn, columns );
}
private WriteExpressionRenderer structAggregateColumnWriter(
SelectableMapping aggregateColumn,
SelectableMapping[] columns,
@ -473,4 +597,170 @@ public class DB2AggregateSupport extends AggregateSupportImpl {
|| columTypeLC.startsWith( "char" ) && columTypeLC.endsWith( " bit data" );
}
interface JsonWriteExpression {
void append(
SqlAppender sb,
String path,
SqlAstTranslator<?> translator,
AggregateColumnWriteExpression expression);
}
private static class AggregateJsonWriteExpression implements JsonWriteExpression {
private final LinkedHashMap<String, JsonWriteExpression> subExpressions = new LinkedHashMap<>();
protected void initializeSubExpressions(SelectableMapping aggregateColumn, SelectableMapping[] columns) {
for ( SelectableMapping column : columns ) {
final SelectablePath selectablePath = column.getSelectablePath();
final SelectablePath[] parts = selectablePath.getParts();
AggregateJsonWriteExpression currentAggregate = this;
for ( int i = 1; i < parts.length - 1; i++ ) {
currentAggregate = (AggregateJsonWriteExpression) currentAggregate.subExpressions.computeIfAbsent(
parts[i].getSelectableName(),
k -> new AggregateJsonWriteExpression()
);
}
final String customWriteExpression = column.getWriteExpression();
currentAggregate.subExpressions.put(
parts[parts.length - 1].getSelectableName(),
new BasicJsonWriteExpression(
column,
jsonCustomWriteExpression( customWriteExpression, column.getJdbcMapping() )
)
);
}
passThroughUnsetSubExpressions( aggregateColumn );
}
protected void passThroughUnsetSubExpressions(SelectableMapping aggregateColumn) {
final AggregateJdbcType aggregateJdbcType = (AggregateJdbcType) aggregateColumn.getJdbcMapping().getJdbcType();
final EmbeddableMappingType embeddableMappingType = aggregateJdbcType.getEmbeddableMappingType();
final int jdbcValueCount = embeddableMappingType.getJdbcValueCount();
for ( int i = 0; i < jdbcValueCount; i++ ) {
final SelectableMapping selectableMapping = embeddableMappingType.getJdbcValueSelectable( i );
final JsonWriteExpression jsonWriteExpression = subExpressions.get( selectableMapping.getSelectableName() );
if ( jsonWriteExpression == null ) {
subExpressions.put(
selectableMapping.getSelectableName(),
new PassThroughExpression( selectableMapping )
);
}
else if ( jsonWriteExpression instanceof AggregateJsonWriteExpression writeExpression ) {
writeExpression.passThroughUnsetSubExpressions( selectableMapping );
}
}
}
@Override
public void append(
SqlAppender sb,
String path,
SqlAstTranslator<?> translator,
AggregateColumnWriteExpression expression) {
sb.append( "json_object" );
char separator = '(';
for ( Map.Entry<String, JsonWriteExpression> entry : subExpressions.entrySet() ) {
final String column = entry.getKey();
final JsonWriteExpression value = entry.getValue();
final String subPath = "json_query(" + path + ",'$." + column + "') format json";
sb.append( separator );
if ( value instanceof AggregateJsonWriteExpression ) {
sb.append( '\'' );
sb.append( column );
sb.append( "' value coalesce(" );
value.append( sb, subPath, translator, expression );
sb.append( ",json_object())" );
}
else {
value.append( sb, subPath, translator, expression );
}
separator = ',';
}
sb.append( ')' );
}
}
private static class RootJsonWriteExpression extends AggregateJsonWriteExpression
implements WriteExpressionRenderer {
private final String path;
RootJsonWriteExpression(SelectableMapping aggregateColumn, SelectableMapping[] columns) {
this.path = aggregateColumn.getSelectionExpression();
initializeSubExpressions( aggregateColumn, columns );
}
@Override
public void render(
SqlAppender sqlAppender,
SqlAstTranslator<?> translator,
AggregateColumnWriteExpression aggregateColumnWriteExpression,
String qualifier) {
final String basePath;
if ( qualifier == null || qualifier.isBlank() ) {
basePath = path;
}
else {
basePath = qualifier + "." + path;
}
append( sqlAppender, basePath, translator, aggregateColumnWriteExpression );
}
}
private static class BasicJsonWriteExpression implements JsonWriteExpression {
private final SelectableMapping selectableMapping;
private final String customWriteExpressionStart;
private final String customWriteExpressionEnd;
BasicJsonWriteExpression(SelectableMapping selectableMapping, String customWriteExpression) {
this.selectableMapping = selectableMapping;
if ( customWriteExpression.equals( "?" ) ) {
this.customWriteExpressionStart = "";
this.customWriteExpressionEnd = "";
}
else {
final String[] parts = StringHelper.split( "?", customWriteExpression );
assert parts.length == 2;
this.customWriteExpressionStart = parts[0];
this.customWriteExpressionEnd = parts[1];
}
}
@Override
public void append(
SqlAppender sb,
String path,
SqlAstTranslator<?> translator,
AggregateColumnWriteExpression expression) {
sb.append( '\'' );
sb.append( selectableMapping.getSelectableName() );
sb.append( "' value " );
sb.append( customWriteExpressionStart );
// We use NO_UNTYPED here so that expressions which require type inference are casted explicitly,
// since we don't know how the custom write expression looks like where this is embedded,
// so we have to be pessimistic and avoid ambiguities
translator.render( expression.getValueExpression( selectableMapping ), SqlAstNodeRenderingMode.NO_UNTYPED );
sb.append( customWriteExpressionEnd );
}
}
private static class PassThroughExpression implements JsonWriteExpression {
private final SelectableMapping selectableMapping;
PassThroughExpression(SelectableMapping selectableMapping) {
this.selectableMapping = selectableMapping;
}
@Override
public void append(
SqlAppender sb,
String path,
SqlAstTranslator<?> translator,
AggregateColumnWriteExpression expression) {
sb.append( '\'' );
sb.append( selectableMapping.getSelectableName() );
sb.append( "' value " );
sb.append( path );
}
}
}

View File

@ -4225,6 +4225,13 @@ public class CommonFunctionFactory {
functionRegistry.register( "unnest", new HANAUnnestFunction() );
}
/**
* DB2 unnest() function
*/
public void unnest_db2(int maximumArraySize) {
functionRegistry.register( "unnest", new DB2UnnestFunction( maximumArraySize ) );
}
/**
* Standard generate_series() function
*/
@ -4305,8 +4312,8 @@ public class CommonFunctionFactory {
/**
* DB2 json_table() function
*/
public void jsonTable_db2() {
functionRegistry.register( "json_table", new DB2JsonTableFunction( typeConfiguration ) );
public void jsonTable_db2(int maximumSeriesSize) {
functionRegistry.register( "json_table", new DB2JsonTableFunction( maximumSeriesSize, typeConfiguration ) );
}
/**

View File

@ -126,7 +126,7 @@ public class CteGenerateSeriesFunction extends NumberSeriesGenerateSeriesFunctio
};
}
protected static class CteGenerateSeriesQueryTransformer extends NumberSeriesQueryTransformer {
public static class CteGenerateSeriesQueryTransformer extends NumberSeriesQueryTransformer {
public static final String NAME = "max_series";
protected final int maxSeriesSize;
@ -146,6 +146,10 @@ public class CteGenerateSeriesFunction extends NumberSeriesGenerateSeriesFunctio
}
protected CteStatement createSeriesCte(SqmToSqlAstConverter converter) {
return createSeriesCte( maxSeriesSize, converter );
}
public static CteStatement createSeriesCte(int maxSeriesSize, SqmToSqlAstConverter converter) {
final BasicType<Long> longType = converter.getCreationContext().getTypeConfiguration()
.getBasicTypeForJavaType( Long.class );
final Expression one = new UnparsedNumericLiteral<>( "1", NumericTypeCategory.LONG, longType );

View File

@ -0,0 +1,156 @@
/*
* SPDX-License-Identifier: LGPL-2.1-or-later
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.dialect.function.array;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.hibernate.dialect.Dialect;
import org.hibernate.dialect.function.CteGenerateSeriesFunction;
import org.hibernate.dialect.function.json.DB2JsonTableFunction;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.metamodel.mapping.BasicValuedModelPart;
import org.hibernate.metamodel.mapping.CollectionPart;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.metamodel.mapping.ModelPart;
import org.hibernate.metamodel.mapping.SqlTypedMapping;
import org.hibernate.query.derived.AnonymousTupleTableGroupProducer;
import org.hibernate.query.spi.QueryEngine;
import org.hibernate.query.sqm.function.SelfRenderingSqmSetReturningFunction;
import org.hibernate.query.sqm.sql.SqmToSqlAstConverter;
import org.hibernate.query.sqm.tree.SqmTypedNode;
import org.hibernate.spi.NavigablePath;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.from.TableGroup;
import org.hibernate.type.BasicPluralType;
import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.jdbc.JdbcLiteralFormatter;
import java.util.List;
/**
* DB2 unnest function.
* Unnesting JSON arrays requires more effort since DB2 doesn't support arrays in {@code json_table()}.
* See {@link org.hibernate.dialect.function.json.DB2JsonTableFunction} for more details.
*
* @see org.hibernate.dialect.function.json.DB2JsonTableFunction
*/
public class DB2UnnestFunction extends UnnestFunction {
private final int maximumArraySize;
public DB2UnnestFunction(int maximumArraySize) {
super( "v", "i" );
this.maximumArraySize = maximumArraySize;
}
@Override
protected <T> SelfRenderingSqmSetReturningFunction<T> generateSqmSetReturningFunctionExpression(List<? extends SqmTypedNode<?>> arguments, QueryEngine queryEngine) {
return new SelfRenderingSqmSetReturningFunction<>(
this,
this,
arguments,
getArgumentsValidator(),
getSetReturningTypeResolver(),
queryEngine.getCriteriaBuilder(),
getName()
) {
@Override
public TableGroup convertToSqlAst(NavigablePath navigablePath, String identifierVariable, boolean lateral, boolean canUseInnerJoins, boolean withOrdinality, SqmToSqlAstConverter walker) {
walker.registerQueryTransformer( new DB2JsonTableFunction.SeriesQueryTransformer( maximumArraySize ) );
return super.convertToSqlAst( navigablePath, identifierVariable, lateral, canUseInnerJoins, withOrdinality, walker );
}
};
}
@Override
protected void renderJsonTable(
SqlAppender sqlAppender,
Expression array,
BasicPluralType<?, ?> pluralType,
@Nullable SqlTypedMapping sqlTypedMapping,
AnonymousTupleTableGroupProducer tupleType,
String tableIdentifierVariable,
SqlAstTranslator<?> walker) {
sqlAppender.appendSql( "lateral(select " );
final ModelPart elementPart = tupleType.findSubPart( CollectionPart.Nature.ELEMENT.getName(), null );
if ( elementPart == null ) {
sqlAppender.append( "t.*" );
}
else {
final BasicValuedModelPart elementMapping = elementPart.asBasicValuedModelPart();
final boolean isBoolean = elementMapping.getSingleJdbcMapping().getJdbcType().isBoolean();
if ( isBoolean ) {
sqlAppender.appendSql( "decode(" );
}
sqlAppender.appendSql( "json_value('{\"a\":'||" );
array.accept( walker );
sqlAppender.appendSql( "||'}','$.a['||(i.i-1)||']'" );
if ( isBoolean ) {
sqlAppender.appendSql( ')' );
final JdbcMapping type = elementMapping.getSingleJdbcMapping();
//noinspection unchecked
final JdbcLiteralFormatter<Object> jdbcLiteralFormatter = type.getJdbcLiteralFormatter();
final SessionFactoryImplementor sessionFactory = walker.getSessionFactory();
final Dialect dialect = sessionFactory.getJdbcServices().getDialect();
final WrapperOptions wrapperOptions = sessionFactory.getWrapperOptions();
final Object trueValue = type.convertToRelationalValue( true );
final Object falseValue = type.convertToRelationalValue( false );
sqlAppender.append( ",'true'," );
jdbcLiteralFormatter.appendJdbcLiteral( sqlAppender, trueValue, dialect, wrapperOptions );
sqlAppender.append( ",'false'," );
jdbcLiteralFormatter.appendJdbcLiteral( sqlAppender, falseValue, dialect, wrapperOptions );
sqlAppender.append( ") " );
}
else {
sqlAppender.appendSql( " returning " );
sqlAppender.append( getDdlType( elementMapping, walker ) );
sqlAppender.append( ") " );
}
sqlAppender.append( elementMapping.getSelectionExpression() );
}
final ModelPart indexPart = tupleType.findSubPart( CollectionPart.Nature.INDEX.getName(), null );
if ( indexPart != null ) {
sqlAppender.appendSql( ",i.i " );
sqlAppender.append( indexPart.asBasicValuedModelPart().getSelectionExpression() );
}
sqlAppender.appendSql( " from " );
sqlAppender.appendSql( CteGenerateSeriesFunction.CteGenerateSeriesQueryTransformer.NAME );
sqlAppender.appendSql( " i" );
if ( elementPart == null ) {
sqlAppender.appendSql( " join json_table(json_query('{\"a\":'||" );
array.accept( walker );
sqlAppender.appendSql( "||'}','$.a['||(i.i-1)||']'),'strict $' columns(" );
tupleType.forEachSelectable( 0, (selectionIndex, selectableMapping) -> {
if ( !CollectionPart.Nature.INDEX.getName().equals( selectableMapping.getSelectableName() ) ) {
if ( selectionIndex == 0 ) {
sqlAppender.append( ' ' );
}
else {
sqlAppender.append( ',' );
}
sqlAppender.append( selectableMapping.getSelectionExpression() );
sqlAppender.append( ' ' );
sqlAppender.append( getDdlType( selectableMapping, walker ) );
sqlAppender.appendSql( " path '$." );
sqlAppender.append( selectableMapping.getSelectableName() );
sqlAppender.appendSql( '\'' );
}
} );
sqlAppender.appendSql( ") error on error) t on json_exists('{\"a\":'||" );
array.accept( walker );
sqlAppender.appendSql( "||'}','$.a['||(i.i-1)||']'))" );
}
else {
sqlAppender.appendSql( " where json_exists('{\"a\":'||" );
array.accept( walker );
sqlAppender.appendSql( "||'}','$.a['||(i.i-1)||']'))" );
}
}
}

View File

@ -6,9 +6,18 @@ package org.hibernate.dialect.function.json;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.hibernate.QueryException;
import org.hibernate.dialect.function.CteGenerateSeriesFunction;
import org.hibernate.query.derived.AnonymousTupleTableGroupProducer;
import org.hibernate.query.spi.QueryEngine;
import org.hibernate.query.sqm.function.SelfRenderingSqmSetReturningFunction;
import org.hibernate.query.sqm.sql.SqmToSqlAstConverter;
import org.hibernate.query.sqm.tree.SqmTypedNode;
import org.hibernate.query.sqm.tree.expression.SqmExpression;
import org.hibernate.query.sqm.tree.expression.SqmJsonTableFunction;
import org.hibernate.spi.NavigablePath;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.sql.ast.tree.cte.CteContainer;
import org.hibernate.sql.ast.tree.expression.CastTarget;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.expression.JsonExistsErrorBehavior;
@ -23,23 +32,60 @@ import org.hibernate.sql.ast.tree.expression.JsonTableQueryColumnDefinition;
import org.hibernate.sql.ast.tree.expression.JsonTableValueColumnDefinition;
import org.hibernate.sql.ast.tree.expression.JsonValueEmptyBehavior;
import org.hibernate.sql.ast.tree.expression.JsonValueErrorBehavior;
import org.hibernate.sql.ast.tree.expression.Literal;
import org.hibernate.sql.ast.tree.expression.QueryTransformer;
import org.hibernate.sql.ast.tree.from.FunctionTableGroup;
import org.hibernate.sql.ast.tree.from.TableGroup;
import org.hibernate.sql.ast.tree.select.QuerySpec;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.spi.TypeConfiguration;
import java.util.List;
/**
* DB2 json_table function.
* This implementation/emulation goes to great lengths to ensure Hibernate ORM can provide the same {@code json_table()}
* experience that other dialects provide also on DB2.
* The most notable limitation of the DB2 function is that it doesn't support JSON arrays,
* so this emulation uses a series CTE called {@code gen_} with 10_000 rows to join
* so this emulation uses a series CTE called {@code max_series} with 10_000 rows to join
* each array element queried with {@code json_query()} at the respective index via {@code json_table()} separately.
* Another notable limitation of the DB2 function is that it doesn't support nested column paths,
* which requires emulation by joining each nesting with a separate {@code json_table()}.
*/
public class DB2JsonTableFunction extends JsonTableFunction {
public DB2JsonTableFunction(TypeConfiguration typeConfiguration) {
private final int maximumSeriesSize;
public DB2JsonTableFunction(int maximumSeriesSize, TypeConfiguration typeConfiguration) {
super( typeConfiguration );
this.maximumSeriesSize = maximumSeriesSize;
}
@Override
protected <T> SelfRenderingSqmSetReturningFunction<T> generateSqmSetReturningFunctionExpression(List<? extends SqmTypedNode<?>> sqmArguments, QueryEngine queryEngine) {
//noinspection unchecked
return new SqmJsonTableFunction<>(
this,
this,
getArgumentsValidator(),
getSetReturningTypeResolver(),
queryEngine.getCriteriaBuilder(),
(SqmExpression<?>) sqmArguments.get( 0 ),
sqmArguments.size() > 1 ? (SqmExpression<String>) sqmArguments.get( 1 ) : null
) {
@Override
public TableGroup convertToSqlAst(NavigablePath navigablePath, String identifierVariable, boolean lateral, boolean canUseInnerJoins, boolean withOrdinality, SqmToSqlAstConverter walker) {
final FunctionTableGroup tableGroup = (FunctionTableGroup) super.convertToSqlAst( navigablePath, identifierVariable, lateral, canUseInnerJoins, withOrdinality, walker );
final JsonTableArguments arguments = JsonTableArguments.extract( tableGroup.getPrimaryTableReference().getFunctionExpression().getArguments() );
final Expression jsonPath = arguments.jsonPath();
final boolean isArray = !(jsonPath instanceof Literal literal)
|| isArrayAccess( (String) literal.getLiteralValue() );
if ( isArray || hasNestedArray( arguments.columnsClause() ) ) {
walker.registerQueryTransformer( new SeriesQueryTransformer( maximumSeriesSize ) );
}
return tableGroup;
}
};
}
@Override
@ -55,22 +101,13 @@ public class DB2JsonTableFunction extends JsonTableFunction {
final Expression jsonDocument = arguments.jsonDocument();
final Expression jsonPath = arguments.jsonPath();
final boolean isArray = isArrayAccess( jsonPath, walker );
sqlAppender.appendSql( "lateral(" );
if ( isArray || hasNestedArray( arguments.columnsClause() ) ) {
// DB2 doesn't support arrays in json_table(), so a series table to join individual elements is needed
sqlAppender.appendSql( "with gen_(v) as(select 0 from (values (0)) union all " );
sqlAppender.appendSql( "select i.v+1 from gen_ i where i.v<10000)" );
}
sqlAppender.appendSql( "select" );
sqlAppender.appendSql( "lateral(select" );
renderColumnSelects( sqlAppender, arguments.columnsClause(), 0, isArray );
sqlAppender.appendSql( " from " );
if ( isArray ) {
sqlAppender.appendSql( " from gen_ i join " );
}
else {
sqlAppender.appendSql( " from " );
sqlAppender.appendSql( CteGenerateSeriesFunction.CteGenerateSeriesQueryTransformer.NAME );
sqlAppender.appendSql( " i join " );
}
sqlAppender.appendSql( "json_table(" );
// DB2 json functions only work when passing object documents,
@ -87,8 +124,33 @@ public class DB2JsonTableFunction extends JsonTableFunction {
sqlAppender.appendSql( " error on error) t0" );
if ( isArray ) {
sqlAppender.appendSql( " on json_exists('{\"a\":'||" );
appendJsonDocument( sqlAppender, jsonPath, jsonDocument, arguments.passingClause(), isArray, walker );
sqlAppender.appendSql( "||'}','$.a['||i.v||']')" );
if ( jsonPath != null ) {
final String jsonPathString;
if ( arguments.passingClause() != null ) {
jsonPathString = JsonPathHelper.inlinedJsonPathIncludingPassingClause( jsonPath, arguments.passingClause(), walker );
}
else {
jsonPathString = walker.getLiteralValue( jsonPath );
}
if ( jsonPathString.endsWith( "[*]" ) ) {
jsonDocument.accept( walker );
sqlAppender.appendSql( "||'}'," );
final String adaptedJsonPath = jsonPathString.substring( 0, jsonPathString.length() - 3 );
sqlAppender.appendSingleQuoteEscapedString( adaptedJsonPath.replace( "$", "$.a" ) );
sqlAppender.appendSql( "||'['||(i.i-1)||']')" );
}
else {
sqlAppender.appendSql( "json_query('{\"a\":'||" );
jsonDocument.accept( walker );
sqlAppender.appendSql( "||'}'," );
sqlAppender.appendSingleQuoteEscapedString( jsonPathString.replace( "$", "$.a" ) );
sqlAppender.appendSql( " with wrapper)||'}','$.a['||(i.i-1)||']')" );
}
}
else {
jsonDocument.accept( walker );
sqlAppender.appendSql( "||'}','$.a['||(i.i-1)||']')" );
}
}
renderNestedColumnJoins( sqlAppender, arguments.columnsClause(), 0, walker );
sqlAppender.appendSql( ')' );
@ -97,27 +159,57 @@ public class DB2JsonTableFunction extends JsonTableFunction {
private static void appendJsonDocument(SqlAppender sqlAppender, Expression jsonPath, Expression jsonDocument, JsonPathPassingClause passingClause, boolean isArray, SqlAstTranslator<?> walker) {
if ( jsonPath != null ) {
sqlAppender.appendSql( "json_query(" );
jsonDocument.accept( walker );
sqlAppender.appendSql( ',' );
if ( passingClause != null ) {
JsonPathHelper.appendInlinedJsonPathIncludingPassingClause(
sqlAppender,
"",
jsonPath,
passingClause,
walker
);
if ( isArray ) {
final String jsonPathString;
if ( passingClause != null ) {
jsonPathString = JsonPathHelper.inlinedJsonPathIncludingPassingClause( jsonPath, passingClause, walker );
}
else {
jsonPathString = walker.getLiteralValue( jsonPath );
}
if ( jsonPathString.endsWith( "[*]" ) ) {
sqlAppender.appendSql( "'{\"a\":'||" );
jsonDocument.accept( walker );
sqlAppender.appendSql( "||'}'," );
final String adaptedJsonPath = jsonPathString.substring( 0, jsonPathString.length() - 3 );
sqlAppender.appendSingleQuoteEscapedString( adaptedJsonPath.replace( "$", "$.a" ) );
sqlAppender.appendSql( "||'['||(i.i-1)||']'" );
}
else {
sqlAppender.appendSql( "'{\"a\":'||" );
sqlAppender.appendSql( "json_query('{\"a\":'||" );
jsonDocument.accept( walker );
sqlAppender.appendSql( "||'}'," );
sqlAppender.appendSingleQuoteEscapedString( jsonPathString.replace( "$", "$.a" ) );
sqlAppender.appendSql( " with wrapper)||'}','$.a['||(i.i-1)||']'" );
}
}
else {
jsonPath.accept( walker );
}
if ( isArray ) {
sqlAppender.appendSql( " with wrapper" );
jsonDocument.accept( walker );
sqlAppender.appendSql( ',' );
if ( passingClause != null ) {
JsonPathHelper.appendInlinedJsonPathIncludingPassingClause(
sqlAppender,
"",
jsonPath,
passingClause,
walker
);
}
else {
jsonPath.accept( walker );
}
}
sqlAppender.appendSql( ')' );
}
else {
if ( isArray ) {
sqlAppender.appendSql( "json_query('{\"a\":'||" );
}
jsonDocument.accept( walker );
if ( isArray ) {
sqlAppender.appendSql( "||'}','$.a['||(i.i-1)||']')" );
}
}
}
@ -161,32 +253,33 @@ public class DB2JsonTableFunction extends JsonTableFunction {
sqlAppender.appendSql( " left join lateral (select" );
renderColumnSelects( sqlAppender, nestedColumnDefinition.columns(), nextClauseLevel, isArray );
sqlAppender.appendSql( " from" );
sqlAppender.appendSql( " from " );
if ( isArray ) {
// When the JSON path indicates that the document is an array,
// join the `gen_` CTE to be able to use the respective array element in json_table().
// join the `max_series` CTE to be able to use the respective array element in json_table().
// DB2 json functions only work when passing object documents,
// which is why results are packed in shell object `{"a":...}`
sqlAppender.appendSql( " gen_ i join json_table('{\"a\":'||json_query('{\"a\":'||t" );
sqlAppender.appendSql( CteGenerateSeriesFunction.CteGenerateSeriesQueryTransformer.NAME );
sqlAppender.appendSql( " i join json_table('{\"a\":'||json_query('{\"a\":'||t" );
sqlAppender.appendSql( clauseLevel );
sqlAppender.appendSql( ".nested_" );
sqlAppender.appendSql( nextClauseLevel );
sqlAppender.appendSql( "_||'}','$.a['||i.v||']')||'}','strict $'" );
sqlAppender.appendSql( "_||'}','$.a['||(i.i-1)||']')||'}','strict $'" );
// Since the query results are packed in a shell object `{"a":...}`,
// the JSON path for columns need to be prefixed with `$.a`
renderColumns( sqlAppender, nestedColumnDefinition.columns(), nextClauseLevel, "$.a", walker );
sqlAppender.appendSql( " error on error) t" );
sqlAppender.appendSql( nextClauseLevel );
// Emulation of arrays via `gen_` sequence requires a join condition to check if an array element exists
// Emulation of arrays via `max_series` sequence requires a join condition to check if an array element exists
sqlAppender.appendSql( " on json_exists('{\"a\":'||t" );
sqlAppender.appendSql( clauseLevel );
sqlAppender.appendSql( ".nested_" );
sqlAppender.appendSql( nextClauseLevel );
sqlAppender.appendSql( "_||'}','$.a['||i.v||']')" );
sqlAppender.appendSql( "_||'}','$.a['||(i.i-1)||']')" );
}
else {
sqlAppender.appendSql( " json_table(t" );
sqlAppender.appendSql( "json_table(t" );
sqlAppender.appendSql( clauseLevel );
sqlAppender.appendSql( ".nested_" );
sqlAppender.appendSql( nextClauseLevel );
@ -237,8 +330,7 @@ public class DB2JsonTableFunction extends JsonTableFunction {
// DB2 doesn't support the for ordinality syntax in json_table() since it has no support for array either
if ( isArray ) {
// If the document is an array, a series table with alias `i` is joined to emulate array support.
// Since the value of the series is 0 based, we add 1 to obtain the ordinality value
sqlAppender.appendSql( "i.v+1 " );
sqlAppender.appendSql( "i.i " );
}
else {
// The ordinality for non-array documents always is trivially 1
@ -435,4 +527,21 @@ public class DB2JsonTableFunction extends JsonTableFunction {
sqlAppender.appendSql( definition.name() );
sqlAppender.appendSql( " clob format json path '$'" );
}
public static class SeriesQueryTransformer implements QueryTransformer {
private final int maxSeriesSize;
public SeriesQueryTransformer(int maxSeriesSize) {
this.maxSeriesSize = maxSeriesSize;
}
@Override
public QuerySpec transform(CteContainer cteContainer, QuerySpec querySpec, SqmToSqlAstConverter converter) {
if ( cteContainer.getCteStatement( CteGenerateSeriesFunction.CteGenerateSeriesQueryTransformer.NAME ) == null ) {
cteContainer.addCteStatement( CteGenerateSeriesFunction.CteGenerateSeriesQueryTransformer.createSeriesCte( maxSeriesSize, converter ) );
}
return querySpec;
}
}
}

View File

@ -62,7 +62,7 @@ public class JsonTableFunction extends AbstractSqmSelfRenderingSetReturningFunct
"json_table",
new ArgumentTypesValidator(
StandardArgumentsValidators.between( 1, 2 ),
FunctionParameterType.JSON,
FunctionParameterType.IMPLICIT_JSON,
FunctionParameterType.STRING
),
setReturningFunctionTypeResolver,

View File

@ -971,7 +971,7 @@ public class BasicValue extends SimpleValue implements JdbcTypeIndicators, Resol
return aggregateColumn == null
? jdbcTypeCode
: getDialect().getAggregateSupport()
.aggregateComponentSqlTypeCode( aggregateColumn.getSqlTypeCode( getMetadata() ), jdbcTypeCode );
.aggregateComponentSqlTypeCode( aggregateColumn.getType().getJdbcType().getDefaultSqlTypeCode(), jdbcTypeCode );
}
@Override

View File

@ -238,7 +238,7 @@ public class JdbcDateJavaType extends AbstractTemporalJavaType<Date> {
return java.sql.Date.valueOf( accessor.query( LocalDate::from ) );
}
catch ( DateTimeParseException pe) {
throw new HibernateException( "could not parse time string " + charSequence, pe );
throw new HibernateException( "could not parse time string " + subSequence( charSequence, start, end ), pe );
}
}

View File

@ -234,7 +234,7 @@ public class JdbcTimeJavaType extends AbstractTemporalJavaType<Date> {
return java.sql.Time.valueOf( accessor.query( LocalTime::from ) );
}
catch ( DateTimeParseException pe) {
throw new HibernateException( "could not parse time string " + charSequence, pe );
throw new HibernateException( "could not parse time string " + subSequence( charSequence, start, end ), pe );
}
}

View File

@ -236,7 +236,7 @@ public class JdbcTimestampJavaType extends AbstractTemporalJavaType<Date> implem
return timestamp;
}
catch ( DateTimeParseException pe) {
throw new HibernateException( "could not parse timestamp string " + charSequence, pe );
throw new HibernateException( "could not parse timestamp string " + subSequence( charSequence, start, end ), pe );
}
}

View File

@ -13,15 +13,16 @@ import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeFormatterBuilder;
import java.time.format.DateTimeParseException;
import java.time.temporal.ChronoField;
import java.time.temporal.TemporalAccessor;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import org.hibernate.HibernateException;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.internal.util.CharSequenceHelper;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.jdbc.JdbcType;
@ -32,6 +33,7 @@ import jakarta.persistence.TemporalType;
import static java.time.format.DateTimeFormatter.ISO_LOCAL_DATE_TIME;
import static java.time.format.DateTimeFormatter.ISO_OFFSET_DATE_TIME;
import static org.hibernate.internal.util.CharSequenceHelper.subSequence;
/**
* Java type descriptor for the {@link OffsetDateTime} type.
@ -95,16 +97,19 @@ public class OffsetDateTimeJavaType extends AbstractTemporalJavaType<OffsetDateT
}
@Override
public OffsetDateTime fromEncodedString(CharSequence string, int start, int end) {
final TemporalAccessor temporalAccessor = PARSE_FORMATTER.parse(
CharSequenceHelper.subSequence( string, start, end )
);
if ( temporalAccessor.isSupported( ChronoField.OFFSET_SECONDS ) ) {
return OffsetDateTime.from( temporalAccessor );
public OffsetDateTime fromEncodedString(CharSequence charSequence, int start, int end) {
try {
final TemporalAccessor temporalAccessor = PARSE_FORMATTER.parse( subSequence( charSequence, start, end ) );
if ( temporalAccessor.isSupported( ChronoField.OFFSET_SECONDS ) ) {
return OffsetDateTime.from( temporalAccessor );
}
else {
// For databases that don't have timezone support, we encode timestamps at UTC, so allow parsing that as well
return LocalDateTime.from( temporalAccessor ).atOffset( ZoneOffset.UTC );
}
}
else {
// For databases that don't have timezone support, we encode timestamps at UTC, so allow parsing that as well
return LocalDateTime.from( temporalAccessor ).atOffset( ZoneOffset.UTC );
catch ( DateTimeParseException pe) {
throw new HibernateException( "could not parse timestamp string " + subSequence( charSequence, start, end ), pe );
}
}