HHH-18797 Add JSON aggregate support for HANA

This commit is contained in:
Christian Beikov 2024-11-11 17:35:25 +01:00
parent 4d6f9baa93
commit d973dcc060
11 changed files with 620 additions and 47 deletions

View File

@ -52,6 +52,8 @@ import org.hibernate.dialect.HANASqlAstTranslator;
import org.hibernate.dialect.NullOrdering;
import org.hibernate.dialect.OracleDialect;
import org.hibernate.dialect.RowLockStrategy;
import org.hibernate.dialect.aggregate.AggregateSupport;
import org.hibernate.dialect.aggregate.HANAAggregateSupport;
import org.hibernate.dialect.function.CommonFunctionFactory;
import org.hibernate.dialect.function.IntegralTimestampaddFunction;
import org.hibernate.dialect.identity.HANAIdentityColumnSupport;
@ -534,6 +536,11 @@ public class HANALegacyDialect extends Dialect {
};
}
@Override
public AggregateSupport getAggregateSupport() {
return HANAAggregateSupport.valueOf( this );
}
/**
* HANA has no extract() function, but we can emulate
* it using the appropriate named functions instead of

View File

@ -13,6 +13,8 @@ import org.hibernate.boot.model.FunctionContributions;
import org.hibernate.boot.model.TypeContributions;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.boot.model.relational.SqlStringGenerationContext;
import org.hibernate.dialect.aggregate.AggregateSupport;
import org.hibernate.dialect.aggregate.HANAAggregateSupport;
import org.hibernate.dialect.function.CommonFunctionFactory;
import org.hibernate.dialect.function.IntegralTimestampaddFunction;
import org.hibernate.dialect.identity.HANAIdentityColumnSupport;
@ -536,6 +538,11 @@ public class HANADialect extends Dialect {
};
}
@Override
public AggregateSupport getAggregateSupport() {
return HANAAggregateSupport.valueOf( this );
}
/**
* HANA has no extract() function, but we can emulate
* it using the appropriate named functions instead of

View File

@ -0,0 +1,507 @@
/*
* SPDX-License-Identifier: LGPL-2.1-or-later
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.dialect.aggregate;
import org.hibernate.dialect.Dialect;
import org.hibernate.dialect.function.json.HANAJsonValueFunction;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.mapping.AggregateColumn;
import org.hibernate.mapping.Column;
import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.metamodel.mapping.SelectableMapping;
import org.hibernate.metamodel.mapping.SelectablePath;
import org.hibernate.metamodel.mapping.SqlTypedMapping;
import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
import org.hibernate.type.spi.TypeConfiguration;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import static org.hibernate.type.SqlTypes.ARRAY;
import static org.hibernate.type.SqlTypes.BIGINT;
import static org.hibernate.type.SqlTypes.BINARY;
import static org.hibernate.type.SqlTypes.BLOB;
import static org.hibernate.type.SqlTypes.BOOLEAN;
import static org.hibernate.type.SqlTypes.DATE;
import static org.hibernate.type.SqlTypes.DECIMAL;
import static org.hibernate.type.SqlTypes.DOUBLE;
import static org.hibernate.type.SqlTypes.FLOAT;
import static org.hibernate.type.SqlTypes.INTEGER;
import static org.hibernate.type.SqlTypes.JSON;
import static org.hibernate.type.SqlTypes.JSON_ARRAY;
import static org.hibernate.type.SqlTypes.LONG32VARBINARY;
import static org.hibernate.type.SqlTypes.NUMERIC;
import static org.hibernate.type.SqlTypes.REAL;
import static org.hibernate.type.SqlTypes.SMALLINT;
import static org.hibernate.type.SqlTypes.TIME;
import static org.hibernate.type.SqlTypes.TIMESTAMP;
import static org.hibernate.type.SqlTypes.TIMESTAMP_UTC;
import static org.hibernate.type.SqlTypes.TINYINT;
import static org.hibernate.type.SqlTypes.UUID;
import static org.hibernate.type.SqlTypes.VARBINARY;
public class HANAAggregateSupport extends AggregateSupportImpl {
private static final AggregateSupport INSTANCE = new HANAAggregateSupport();
private static final String JSON_QUERY_START = "json_query(";
private static final String JSON_QUERY_JSON_END = "' error on error)";
private HANAAggregateSupport() {
}
public static AggregateSupport valueOf(Dialect dialect) {
return dialect.getVersion().isSameOrAfter( 2, 0, 40 ) ? INSTANCE : AggregateSupportImpl.INSTANCE;
}
@Override
public String aggregateComponentCustomReadExpression(
String template,
String placeholder,
String aggregateParentReadExpression,
String columnExpression,
int aggregateColumnTypeCode,
SqlTypedMapping column) {
switch ( aggregateColumnTypeCode ) {
case JSON:
case JSON_ARRAY:
final String parentPartExpression = determineParentPartExpression( aggregateParentReadExpression );
switch ( column.getJdbcMapping().getJdbcType().getDefaultSqlTypeCode() ) {
case BOOLEAN:
if ( SqlTypes.isNumericType( column.getJdbcMapping().getJdbcType().getDdlTypeCode() ) ) {
return template.replace(
placeholder,
"case json_value(" + parentPartExpression + columnExpression + "') when 'true' then 1 when 'false' then 0 end"
);
}
else {
return template.replace(
placeholder,
"case json_value(" + parentPartExpression + columnExpression + "') when 'true' then true when 'false' then false end"
);
}
case DATE:
case TIME:
case TIMESTAMP:
case TIMESTAMP_UTC:
return template.replace(
placeholder,
"cast(json_value(" + parentPartExpression + columnExpression + "') as " + column.getColumnDefinition() + ")"
);
case BINARY:
case VARBINARY:
case LONG32VARBINARY:
case BLOB:
// We encode binary data as hex, so we have to decode here
return template.replace(
placeholder,
"hextobin(json_value(" + parentPartExpression + columnExpression + "' error on error))"
);
case JSON:
case JSON_ARRAY:
return template.replace(
placeholder,
"json_query(" + parentPartExpression + columnExpression + "' error on error)"
);
case UUID:
if ( SqlTypes.isBinaryType( column.getJdbcMapping().getJdbcType().getDdlTypeCode() ) ) {
return template.replace(
placeholder,
"hextobin(json_value(" + parentPartExpression + columnExpression + "'))"
);
}
// Fall-through intended
default:
return template.replace(
placeholder,
"json_value(" + parentPartExpression + columnExpression + "' returning " + HANAJsonValueFunction.jsonValueReturningType(
column ) + " error on error)"
);
}
}
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateColumnTypeCode );
}
private static String determineParentPartExpression(String aggregateParentReadExpression) {
final String parentPartExpression;
if ( aggregateParentReadExpression.startsWith( JSON_QUERY_START ) && aggregateParentReadExpression.endsWith( JSON_QUERY_JSON_END ) ) {
parentPartExpression = aggregateParentReadExpression.substring( JSON_QUERY_START.length(), aggregateParentReadExpression.length() - JSON_QUERY_JSON_END.length() ) + ".";
}
else {
parentPartExpression = aggregateParentReadExpression + ",'$.";
}
return parentPartExpression;
}
private static String jsonCustomWriteExpression(String customWriteExpression, JdbcMapping jdbcMapping) {
final int sqlTypeCode = jdbcMapping.getJdbcType().getDefaultSqlTypeCode();
switch ( sqlTypeCode ) {
case UUID:
if ( !SqlTypes.isBinaryType( jdbcMapping.getJdbcType().getDdlTypeCode() ) ) {
return customWriteExpression;
}
// Fall-through intended
case BINARY:
case VARBINARY:
case LONG32VARBINARY:
case BLOB:
// We encode binary data as hex
return "bintohex(" + customWriteExpression + ")";
case TIMESTAMP:
return "to_varchar(" + customWriteExpression + ",'YYYY-MM-DD\"T\"HH24:MI:SS.FF9')";
case TIMESTAMP_UTC:
return "to_varchar(" + customWriteExpression + ",'YYYY-MM-DD\"T\"HH24:MI:SS.FF9\"Z\"')";
default:
return customWriteExpression;
}
}
@Override
public String aggregateComponentAssignmentExpression(
String aggregateParentAssignmentExpression,
String columnExpression,
int aggregateColumnTypeCode,
Column column) {
switch ( aggregateColumnTypeCode ) {
case JSON:
case JSON_ARRAY:
// For JSON we always have to replace the whole object
return aggregateParentAssignmentExpression;
}
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateColumnTypeCode );
}
@Override
public String aggregateCustomWriteExpression(
AggregateColumn aggregateColumn,
List<Column> aggregatedColumns) {
// We need to know what array this is STRUCT_ARRAY/JSON_ARRAY/XML_ARRAY,
// which we can easily get from the type code of the aggregate column
final int sqlTypeCode = aggregateColumn.getType().getJdbcType().getDefaultSqlTypeCode();
switch ( sqlTypeCode == SqlTypes.ARRAY ? aggregateColumn.getTypeCode() : sqlTypeCode ) {
case JSON:
case JSON_ARRAY:
return null;
}
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateColumn.getTypeCode() );
}
@Override
public int aggregateComponentSqlTypeCode(int aggregateColumnSqlTypeCode, int columnSqlTypeCode) {
if ( aggregateColumnSqlTypeCode == JSON ) {
return columnSqlTypeCode == ARRAY ? JSON_ARRAY : columnSqlTypeCode;
}
else {
return columnSqlTypeCode;
}
}
@Override
public boolean requiresAggregateCustomWriteExpressionRenderer(int aggregateSqlTypeCode) {
return aggregateSqlTypeCode == JSON;
}
@Override
public WriteExpressionRenderer aggregateCustomWriteExpressionRenderer(
SelectableMapping aggregateColumn,
SelectableMapping[] columnsToUpdate,
TypeConfiguration typeConfiguration) {
final int aggregateSqlTypeCode = aggregateColumn.getJdbcMapping().getJdbcType().getDefaultSqlTypeCode();
switch ( aggregateSqlTypeCode ) {
case JSON:
return jsonAggregateColumnWriter( aggregateColumn, columnsToUpdate );
}
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateSqlTypeCode );
}
private WriteExpressionRenderer jsonAggregateColumnWriter(
SelectableMapping aggregateColumn,
SelectableMapping[] columns) {
return new RootJsonWriteExpression( aggregateColumn, columns );
}
interface JsonWriteExpression {
boolean isAggregate();
void append(
SqlAppender sb,
String path,
SqlAstTranslator<?> translator,
AggregateColumnWriteExpression expression);
}
private static class AggregateJsonWriteExpression implements JsonWriteExpression {
private final SelectableMapping selectableMapping;
private final String columnDefinition;
private final LinkedHashMap<String, JsonWriteExpression> subExpressions = new LinkedHashMap<>();
private AggregateJsonWriteExpression(SelectableMapping selectableMapping, String columnDefinition) {
this.selectableMapping = selectableMapping;
this.columnDefinition = columnDefinition;
}
@Override
public boolean isAggregate() {
return true;
}
protected void initializeSubExpressions(SelectableMapping aggregateColumn, SelectableMapping[] columns) {
for ( SelectableMapping column : columns ) {
final SelectablePath selectablePath = column.getSelectablePath();
final SelectablePath[] parts = selectablePath.getParts();
AggregateJsonWriteExpression currentAggregate = this;
for ( int i = 1; i < parts.length - 1; i++ ) {
final AggregateJdbcType aggregateJdbcType = (AggregateJdbcType) currentAggregate.selectableMapping.getJdbcMapping().getJdbcType();
final EmbeddableMappingType embeddableMappingType = aggregateJdbcType.getEmbeddableMappingType();
final int selectableIndex = embeddableMappingType.getSelectableIndex( parts[i].getSelectableName() );
currentAggregate = (AggregateJsonWriteExpression) currentAggregate.subExpressions.computeIfAbsent(
parts[i].getSelectableName(),
k -> new AggregateJsonWriteExpression( embeddableMappingType.getSelectable( selectableIndex ), columnDefinition )
);
}
final String customWriteExpression = column.getWriteExpression();
currentAggregate.subExpressions.put(
parts[parts.length - 1].getSelectableName(),
new BasicJsonWriteExpression(
column,
jsonCustomWriteExpression( customWriteExpression, column.getJdbcMapping() )
)
);
}
passThroughUnsetSubExpressions( aggregateColumn );
}
protected void passThroughUnsetSubExpressions(SelectableMapping aggregateColumn) {
final AggregateJdbcType aggregateJdbcType = (AggregateJdbcType) aggregateColumn.getJdbcMapping().getJdbcType();
final EmbeddableMappingType embeddableMappingType = aggregateJdbcType.getEmbeddableMappingType();
final int jdbcValueCount = embeddableMappingType.getJdbcValueCount();
for ( int i = 0; i < jdbcValueCount; i++ ) {
final SelectableMapping selectableMapping = embeddableMappingType.getJdbcValueSelectable( i );
final JsonWriteExpression jsonWriteExpression = subExpressions.get( selectableMapping.getSelectableName() );
if ( jsonWriteExpression == null ) {
subExpressions.put(
selectableMapping.getSelectableName(),
new PassThroughExpression( selectableMapping )
);
}
else if ( jsonWriteExpression instanceof AggregateJsonWriteExpression writeExpression ) {
writeExpression.passThroughUnsetSubExpressions( selectableMapping );
}
}
}
@Override
public void append(
SqlAppender sb,
String path,
SqlAstTranslator<?> translator,
AggregateColumnWriteExpression expression) {
final int aggregateCount = determineAggregateCount();
if ( aggregateCount != 0 ) {
sb.append( "(trim(trailing '}' from " );
}
sb.append( "(select" );
if ( aggregateCount != subExpressions.size() ) {
char separator = ' ';
for ( Map.Entry<String, JsonWriteExpression> entry : subExpressions.entrySet() ) {
final String column = entry.getKey();
final JsonWriteExpression value = entry.getValue();
if ( !value.isAggregate() ) {
sb.append( separator );
value.append( sb, path, translator, expression );
sb.append( ' ' );
sb.appendDoubleQuoteEscapedString( column );
separator = ',';
}
}
sb.append( " from sys.dummy for json('arraywrap'='no','omitnull'='no')" );
sb.append( " returns " );
sb.append( columnDefinition );
}
else {
sb.append( " cast('{}' as " );
sb.append( columnDefinition );
sb.append( ") jsonresult from sys.dummy" );
}
sb.append( ')' );
if ( aggregateCount != 0 ) {
sb.append( ')' );
final String parentPartExpression = determineParentPartExpression( path );
String separator = aggregateCount == subExpressions.size() ? " " : ",";
for ( Map.Entry<String, JsonWriteExpression> entry : subExpressions.entrySet() ) {
final String column = entry.getKey();
final JsonWriteExpression value = entry.getValue();
if ( value.isAggregate() ) {
sb.append( "||'" );
sb.append( separator );
sb.append( '"' );
sb.append( column );
sb.append( "\":'||" );
if ( value instanceof AggregateJsonWriteExpression ) {
final String subPath = "json_query(" + parentPartExpression + column + "' error on error)";
value.append( sb, subPath, translator, expression );
}
else {
sb.append( "coalesce(" );
value.append( sb, path, translator, expression );
sb.append( ",'null')" );
}
separator = ",";
}
}
sb.append( "||'}')" );
}
}
private int determineAggregateCount() {
int count = 0;
for ( Map.Entry<String, JsonWriteExpression> entry : subExpressions.entrySet() ) {
if ( entry.getValue().isAggregate() ) {
count++;
}
}
return count;
}
}
private static class RootJsonWriteExpression extends AggregateJsonWriteExpression
implements WriteExpressionRenderer {
private final String path;
RootJsonWriteExpression(SelectableMapping aggregateColumn, SelectableMapping[] columns) {
super( aggregateColumn, aggregateColumn.getColumnDefinition() );
path = aggregateColumn.getSelectionExpression();
initializeSubExpressions( aggregateColumn, columns );
}
@Override
public void render(
SqlAppender sqlAppender,
SqlAstTranslator<?> translator,
AggregateColumnWriteExpression aggregateColumnWriteExpression,
String qualifier) {
final String basePath;
if ( qualifier == null || qualifier.isBlank() ) {
basePath = path;
}
else {
basePath = qualifier + "." + path;
}
append( sqlAppender, basePath, translator, aggregateColumnWriteExpression );
}
}
private static class BasicJsonWriteExpression implements JsonWriteExpression {
private final SelectableMapping selectableMapping;
private final String customWriteExpressionStart;
private final String customWriteExpressionEnd;
BasicJsonWriteExpression(SelectableMapping selectableMapping, String customWriteExpression) {
this.selectableMapping = selectableMapping;
if ( customWriteExpression.equals( "?" ) ) {
this.customWriteExpressionStart = "";
this.customWriteExpressionEnd = "";
}
else {
final String[] parts = StringHelper.split( "?", customWriteExpression );
assert parts.length == 2;
this.customWriteExpressionStart = parts[0];
this.customWriteExpressionEnd = parts[1];
}
}
@Override
public boolean isAggregate() {
return selectableMapping.getJdbcMapping().getJdbcType().isJson();
}
@Override
public void append(
SqlAppender sb,
String path,
SqlAstTranslator<?> translator,
AggregateColumnWriteExpression expression) {
sb.append( customWriteExpressionStart );
// We use NO_UNTYPED here so that expressions which require type inference are casted explicitly,
// since we don't know how the custom write expression looks like where this is embedded,
// so we have to be pessimistic and avoid ambiguities
translator.render( expression.getValueExpression( selectableMapping ), SqlAstNodeRenderingMode.NO_UNTYPED );
sb.append( customWriteExpressionEnd );
}
}
private static class PassThroughExpression implements JsonWriteExpression {
private final SelectableMapping selectableMapping;
PassThroughExpression(SelectableMapping selectableMapping) {
this.selectableMapping = selectableMapping;
}
@Override
public boolean isAggregate() {
return selectableMapping.getJdbcMapping().getJdbcType().isJson();
}
@Override
public void append(
SqlAppender sb,
String path,
SqlAstTranslator<?> translator,
AggregateColumnWriteExpression expression) {
final String parentPartExpression = determineParentPartExpression( path );
switch ( selectableMapping.getJdbcMapping().getJdbcType().getDefaultSqlTypeCode() ) {
case BOOLEAN:
sb.append( "case json_value(" );
sb.append( parentPartExpression );
sb.append( selectableMapping.getSelectableName() );
if ( SqlTypes.isNumericType( selectableMapping.getJdbcMapping().getJdbcType().getDdlTypeCode() ) ) {
sb.append( "') when 'true' then 1 when 'false' then 0 end" );
}
else {
sb.append( "') when 'true' then true when 'false' then false end" );
}
break;
case TINYINT:
case SMALLINT:
case INTEGER:
case BIGINT:
case FLOAT:
case REAL:
case DOUBLE:
case DECIMAL:
case NUMERIC:
sb.append( "json_value(" );
sb.append( parentPartExpression );
sb.append( selectableMapping.getSelectableName() );
sb.append( "' returning " );
sb.append( HANAJsonValueFunction.jsonValueReturningType( selectableMapping ) );
sb.append( " error on error)" );
break;
case JSON:
case JSON_ARRAY:
sb.append( "json_query(" );
sb.append( parentPartExpression );
sb.append( selectableMapping.getSelectableName() );
sb.append( "' error on error)" );
break;
default:
sb.append( "json_value(" );
sb.append( parentPartExpression );
sb.append( selectableMapping.getSelectableName() );
sb.append( "' error on error)" );
break;
}
}
}
}

View File

@ -25,6 +25,7 @@ import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.from.TableGroup;
import org.hibernate.type.BasicPluralType;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.jdbc.JdbcLiteralFormatter;
@ -106,7 +107,7 @@ public class DB2UnnestFunction extends UnnestFunction {
}
else {
sqlAppender.appendSql( " returning " );
sqlAppender.append( getDdlType( elementMapping, walker ) );
sqlAppender.append( getDdlType( elementMapping, SqlTypes.JSON_ARRAY, walker ) );
sqlAppender.append( ") " );
}
@ -136,10 +137,10 @@ public class DB2UnnestFunction extends UnnestFunction {
}
sqlAppender.append( selectableMapping.getSelectionExpression() );
sqlAppender.append( ' ' );
sqlAppender.append( getDdlType( selectableMapping, walker ) );
sqlAppender.append( getDdlType( selectableMapping, SqlTypes.JSON_ARRAY, walker ) );
sqlAppender.appendSql( " path '$." );
sqlAppender.append( selectableMapping.getSelectableName() );
sqlAppender.appendSql( '\'' );
sqlAppender.appendSql( "' error on error" );
}
} );
sqlAppender.appendSql( ") error on error) t on json_exists('{\"a\":'||" );

View File

@ -11,7 +11,7 @@ import java.util.Set;
import org.hibernate.QueryException;
import org.hibernate.dialect.XmlHelper;
import org.hibernate.dialect.function.json.ExpressionTypeHelper;
import org.hibernate.engine.jdbc.Size;
import org.hibernate.dialect.function.json.HANAJsonValueFunction;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.metamodel.mapping.CollectionPart;
import org.hibernate.metamodel.mapping.EmbeddableValuedModelPart;
@ -54,7 +54,7 @@ import org.hibernate.sql.ast.tree.select.QuerySpec;
import org.hibernate.sql.ast.tree.select.SelectStatement;
import org.hibernate.sql.results.internal.SqlSelectionImpl;
import org.hibernate.type.BasicPluralType;
import org.hibernate.type.BasicType;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.Type;
import org.hibernate.type.descriptor.java.BasicPluralJavaType;
import org.hibernate.type.descriptor.sql.spi.DdlTypeRegistry;
@ -74,7 +74,6 @@ public class HANAUnnestFunction extends UnnestFunction {
protected <T> SelfRenderingSqmSetReturningFunction<T> generateSqmSetReturningFunctionExpression(
List<? extends SqmTypedNode<?>> arguments,
QueryEngine queryEngine) {
//noinspection unchecked
return new SelfRenderingSqmSetReturningFunction<>(
this,
this,
@ -357,7 +356,7 @@ public class HANAUnnestFunction extends UnnestFunction {
}
else {
sqlAppender.append( ' ' );
sqlAppender.append( getDdlType( selectableMapping, walker ) );
sqlAppender.append( getDdlType( selectableMapping, SqlTypes.XML_ARRAY, walker ) );
sqlAppender.appendSql( " path '" );
sqlAppender.appendSql( selectableMapping.getSelectableName() );
sqlAppender.appendSql( "'" );
@ -378,7 +377,7 @@ public class HANAUnnestFunction extends UnnestFunction {
}
else {
sqlAppender.append( ' ' );
sqlAppender.append( getDdlType( selectableMapping, walker ) );
sqlAppender.append( getDdlType( selectableMapping, SqlTypes.XML_ARRAY, walker ) );
sqlAppender.appendSql( " path '" );
sqlAppender.appendSql( "." );
sqlAppender.appendSql( "'" );
@ -445,6 +444,15 @@ public class HANAUnnestFunction extends UnnestFunction {
}
}
@Override
protected String getDdlType(SqlTypedMapping sqlTypedMapping, int containerSqlTypeCode, SqlAstTranslator<?> translator) {
final String ddlType = super.getDdlType( sqlTypedMapping, containerSqlTypeCode, translator );
if ( containerSqlTypeCode == SqlTypes.JSON_ARRAY ) {
return HANAJsonValueFunction.jsonValueReturningType( ddlType );
}
return ddlType;
}
@Override
protected void renderJsonTable(
SqlAppender sqlAppender,
@ -454,12 +462,6 @@ public class HANAUnnestFunction extends UnnestFunction {
AnonymousTupleTableGroupProducer tupleType,
String tableIdentifierVariable,
SqlAstTranslator<?> walker) {
final BasicType<?> elementType = pluralType.getElementType();
final String columnType = walker.getSessionFactory().getTypeConfiguration().getDdlTypeRegistry().getTypeName(
elementType.getJdbcType().getDdlTypeCode(),
sqlTypedMapping == null ? Size.nil() : sqlTypedMapping.toSize(),
elementType
);
sqlAppender.appendSql( "json_table(" );
array.accept( walker );
@ -474,18 +476,14 @@ public class HANAUnnestFunction extends UnnestFunction {
sqlAppender.appendSql( "'," );
}
sqlAppender.appendSql( "nested path '$.v' columns (" );
sqlAppender.append( tupleType.getColumnNames().get( 0 ) );
sqlAppender.appendSql( ' ' );
sqlAppender.append( columnType );
sqlAppender.appendSql( " path '$')))" );
sqlAppender.appendSql( "nested path '$.v' columns" );
renderJsonTableColumns( sqlAppender, tupleType, walker, true );
sqlAppender.appendSql( "))" );
}
else {
sqlAppender.appendSql( ",'$[*]' columns(" );
sqlAppender.append( tupleType.getColumnNames().get( 0 ) );
sqlAppender.appendSql( ' ' );
sqlAppender.append( columnType );
sqlAppender.appendSql( " path '$'))" );
sqlAppender.appendSql( ",'$[*]' columns" );
renderJsonTableColumns( sqlAppender, tupleType, walker, true );
sqlAppender.appendSql( ")" );
}
}
@ -519,9 +517,11 @@ public class HANAUnnestFunction extends UnnestFunction {
separator = ',';
}
sqlAppender.appendSql( " from sys.dummy for json('arraywrap'='no')))||" );
sqlAppender.appendSql( "',\"v\":'||" );
sqlAppender.appendSql( "',\"v\":'||case when " );
argument.accept( walker );
sqlAppender.appendSql( "||'}'" );
sqlAppender.appendSql( " not like '[]' then " );
argument.accept( walker );
sqlAppender.appendSql( " end||'}'" );
}
@Override

View File

@ -13,6 +13,7 @@ import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.type.BasicPluralType;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.java.BasicPluralJavaType;
import org.checkerframework.checker.nullness.qual.Nullable;
@ -60,7 +61,7 @@ public class SQLServerUnnestFunction extends UnnestFunction {
}
sqlAppender.append( selectableMapping.getSelectionExpression() );
sqlAppender.append( ' ' );
sqlAppender.append( getDdlType( selectableMapping, walker ) );
sqlAppender.append( getDdlType( selectableMapping, SqlTypes.JSON_ARRAY, walker ) );
sqlAppender.appendSql( " path '$." );
sqlAppender.append( selectableMapping.getSelectableName() );
sqlAppender.appendSql( '\'' );
@ -79,7 +80,7 @@ public class SQLServerUnnestFunction extends UnnestFunction {
}
sqlAppender.append( selectableMapping.getSelectionExpression() );
sqlAppender.append( ' ' );
sqlAppender.append( getDdlType( selectableMapping, walker ) );
sqlAppender.append( getDdlType( selectableMapping, SqlTypes.JSON_ARRAY, walker ) );
sqlAppender.appendSql( " path '$'" );
}
} );
@ -120,7 +121,7 @@ public class SQLServerUnnestFunction extends UnnestFunction {
sqlAppender.appendSql( "t.v.value('count(for $a in . return $a/../" );
sqlAppender.appendSql( collectionTags.elementName() );
sqlAppender.appendSql( "[.<<$a])+1','" );
sqlAppender.append( getDdlType( selectableMapping, walker ) );
sqlAppender.append( getDdlType( selectableMapping, SqlTypes.XML_ARRAY, walker ) );
sqlAppender.appendSql( "') " );
sqlAppender.appendSql( selectableMapping.getSelectionExpression() );
}
@ -128,7 +129,7 @@ public class SQLServerUnnestFunction extends UnnestFunction {
sqlAppender.appendSql( "t.v.value('");
sqlAppender.appendSql( selectableMapping.getSelectableName() );
sqlAppender.appendSql( "/text()[1]','" );
sqlAppender.append( getDdlType( selectableMapping, walker ) );
sqlAppender.append( getDdlType( selectableMapping, SqlTypes.XML_ARRAY, walker ) );
sqlAppender.appendSql( "') " );
sqlAppender.appendSql( selectableMapping.getSelectionExpression() );
}
@ -146,13 +147,13 @@ public class SQLServerUnnestFunction extends UnnestFunction {
sqlAppender.appendSql( "t.v.value('count(for $a in . return $a/../" );
sqlAppender.appendSql( collectionTags.elementName() );
sqlAppender.appendSql( "[.<<$a])+1','" );
sqlAppender.append( getDdlType( selectableMapping, walker ) );
sqlAppender.append( getDdlType( selectableMapping, SqlTypes.XML_ARRAY, walker ) );
sqlAppender.appendSql( "') " );
sqlAppender.appendSql( selectableMapping.getSelectionExpression() );
}
else {
sqlAppender.appendSql( "t.v.value('text()[1]','" );
sqlAppender.append( getDdlType( selectableMapping, walker ) );
sqlAppender.append( getDdlType( selectableMapping, SqlTypes.XML_ARRAY, walker ) );
sqlAppender.appendSql( "') " );
sqlAppender.appendSql( selectableMapping.getSelectionExpression() );
}

View File

@ -12,6 +12,7 @@ import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.type.BasicPluralType;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.java.BasicPluralJavaType;
import org.checkerframework.checker.nullness.qual.Nullable;
@ -59,7 +60,7 @@ public class SybaseASEUnnestFunction extends UnnestFunction {
}
else {
sqlAppender.append( ' ' );
sqlAppender.append( getDdlType( selectableMapping, walker ) );
sqlAppender.append( getDdlType( selectableMapping, SqlTypes.XML_ARRAY, walker ) );
sqlAppender.appendSql( " path '" );
sqlAppender.appendSql( selectableMapping.getSelectableName() );
sqlAppender.appendSql( "'" );
@ -80,7 +81,7 @@ public class SybaseASEUnnestFunction extends UnnestFunction {
}
else {
sqlAppender.append( ' ' );
sqlAppender.append( getDdlType( selectableMapping, walker ) );
sqlAppender.append( getDdlType( selectableMapping, SqlTypes.XML_ARRAY, walker ) );
sqlAppender.appendSql( " path '" );
sqlAppender.appendSql( "." );
sqlAppender.appendSql( "'" );

View File

@ -66,7 +66,7 @@ public class UnnestFunction extends AbstractSqmSelfRenderingSetReturningFunction
}
}
protected String getDdlType(SqlTypedMapping sqlTypedMapping, SqlAstTranslator<?> translator) {
protected String getDdlType(SqlTypedMapping sqlTypedMapping, int containerSqlTypeCode, SqlAstTranslator<?> translator) {
final String columnDefinition = sqlTypedMapping.getColumnDefinition();
if ( columnDefinition != null ) {
return columnDefinition;
@ -88,11 +88,16 @@ public class UnnestFunction extends AbstractSqmSelfRenderingSetReturningFunction
SqlAstTranslator<?> walker) {
sqlAppender.appendSql( "json_table(" );
array.accept( walker );
sqlAppender.appendSql( ",'$[*]' columns(" );
sqlAppender.appendSql( ",'$[*]' columns" );
renderJsonTableColumns( sqlAppender, tupleType, walker, false );
sqlAppender.appendSql( ')' );
}
protected void renderJsonTableColumns(SqlAppender sqlAppender, AnonymousTupleTableGroupProducer tupleType, SqlAstTranslator<?> walker, boolean errorOnError) {
if ( tupleType.findSubPart( CollectionPart.Nature.ELEMENT.getName(), null ) == null ) {
tupleType.forEachSelectable( 0, (selectionIndex, selectableMapping) -> {
if ( selectionIndex == 0 ) {
sqlAppender.append( ' ' );
sqlAppender.append( '(' );
}
else {
sqlAppender.append( ',' );
@ -103,17 +108,20 @@ public class UnnestFunction extends AbstractSqmSelfRenderingSetReturningFunction
sqlAppender.append( " for ordinality" );
}
else {
sqlAppender.append( getDdlType( selectableMapping, walker ) );
sqlAppender.append( getDdlType( selectableMapping, SqlTypes.JSON_ARRAY, walker ) );
sqlAppender.appendSql( " path '$." );
sqlAppender.append( selectableMapping.getSelectableName() );
sqlAppender.appendSql( '\'' );
if ( errorOnError ) {
sqlAppender.appendSql( " error on error" );
}
}
} );
}
else {
tupleType.forEachSelectable( 0, (selectionIndex, selectableMapping) -> {
if ( selectionIndex == 0 ) {
sqlAppender.append( ' ' );
sqlAppender.append( '(' );
}
else {
sqlAppender.append( ',' );
@ -124,12 +132,15 @@ public class UnnestFunction extends AbstractSqmSelfRenderingSetReturningFunction
}
else {
sqlAppender.append( ' ' );
sqlAppender.append( getDdlType( selectableMapping, walker ) );
sqlAppender.append( getDdlType( selectableMapping, SqlTypes.JSON_ARRAY, walker ) );
sqlAppender.appendSql( " path '$'" );
if ( errorOnError ) {
sqlAppender.appendSql( " error on error" );
}
}
} );
}
sqlAppender.appendSql( "))" );
sqlAppender.appendSql( ')' );
}
protected void renderXmlTable(
@ -165,7 +176,7 @@ public class UnnestFunction extends AbstractSqmSelfRenderingSetReturningFunction
}
else {
sqlAppender.append( ' ' );
sqlAppender.append( getDdlType( selectableMapping, walker ) );
sqlAppender.append( getDdlType( selectableMapping, SqlTypes.XML_ARRAY, walker ) );
sqlAppender.appendSql( " path '" );
sqlAppender.appendSql( selectableMapping.getSelectableName() );
sqlAppender.appendSql( "/text()" );
@ -187,7 +198,7 @@ public class UnnestFunction extends AbstractSqmSelfRenderingSetReturningFunction
}
else {
sqlAppender.append( ' ' );
sqlAppender.append( getDdlType( selectableMapping, walker ) );
sqlAppender.append( getDdlType( selectableMapping, SqlTypes.XML_ARRAY, walker ) );
sqlAppender.appendSql( " path '" );
sqlAppender.appendSql( "text()" );
sqlAppender.appendSql( "'" );

View File

@ -37,7 +37,7 @@ public class HANAJsonObjectAggFunction extends JsonObjectAggFunction {
throw new QueryException( "Can't emulate json_objectagg 'with unique keys' clause." );
}
sqlAppender.appendSql( "'{'||string_agg(" );
renderArgument( sqlAppender, arguments.key(), arguments.nullBehavior(), translator );
renderArgument( sqlAppender, arguments.key(), JsonNullBehavior.NULL, translator );
sqlAppender.appendSql( "||':'||" );
if ( caseWrapper ) {
if ( arguments.nullBehavior() != JsonNullBehavior.ABSENT ) {
@ -76,8 +76,11 @@ public class HANAJsonObjectAggFunction extends JsonObjectAggFunction {
}
sqlAppender.appendSql( "json_query((select " );
arg.accept( translator );
sqlAppender.appendSql(
" V from sys.dummy for json('arraywrap'='no','omitnull'='no') returns nvarchar(" + Integer.MAX_VALUE + ")),'$.V')" );
sqlAppender.appendSql( " V from sys.dummy for json('arraywrap'='no'" );
if ( nullBehavior != JsonNullBehavior.NULL ) {
sqlAppender.appendSql( ",'omitnull'='no'" );
}
sqlAppender.appendSql( ") returns nvarchar(" + Integer.MAX_VALUE + ")),'$.V')" );
if ( nullBehavior != JsonNullBehavior.NULL ) {
sqlAppender.appendSql( ",'null')" );
}

View File

@ -7,6 +7,7 @@ package org.hibernate.dialect.function.json;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.metamodel.mapping.SqlTypedMapping;
import org.hibernate.query.ReturnableType;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
@ -14,6 +15,8 @@ import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.jdbc.JdbcLiteralFormatter;
import org.hibernate.type.spi.TypeConfiguration;
import static org.hibernate.sql.ast.spi.AbstractSqlAstTranslator.getCastTypeName;
/**
* HANA json_value function.
*/
@ -52,11 +55,34 @@ public class HANAJsonValueFunction extends JsonValueFunction {
}
}
public static String jsonValueReturningType(SqlTypedMapping column) {
final String columnDefinition = column.getColumnDefinition();
assert columnDefinition != null;
return jsonValueReturningType( columnDefinition );
}
public static String jsonValueReturningType(String columnDefinition) {
final int parenthesisIndex = columnDefinition.indexOf( '(' );
final String baseName = parenthesisIndex == -1
? columnDefinition
: columnDefinition.substring( 0, parenthesisIndex );
return switch ( baseName ) {
case "real", "float", "double", "decimal" -> "decimal";
case "tinyint", "smallint" -> "integer";
case "clob" -> "varchar(5000)";
case "nclob" -> "nvarchar(5000)";
default -> columnDefinition;
};
}
@Override
protected void renderReturningClause(SqlAppender sqlAppender, JsonValueArguments arguments, SqlAstTranslator<?> walker) {
// No return type for booleans, this is handled via decode
if ( arguments.returningType() != null && !isEncodedBoolean( arguments.returningType().getJdbcMapping() ) ) {
super.renderReturningClause( sqlAppender, arguments, walker );
sqlAppender.appendSql( " returning " );
sqlAppender.appendSql( jsonValueReturningType(
getCastTypeName( arguments.returningType(), walker.getSessionFactory().getTypeConfiguration() )
) );
}
}

View File

@ -4,6 +4,7 @@
*/
package org.hibernate.dialect.function.xml;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.hibernate.QueryException;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.spi.SessionFactoryImplementor;
@ -29,6 +30,7 @@ import org.hibernate.query.sqm.tree.expression.SqmExpression;
import org.hibernate.query.sqm.tree.expression.SqmXmlTableFunction;
import org.hibernate.spi.NavigablePath;
import org.hibernate.sql.Template;
import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.internal.ColumnQualifierCollectorSqlAstWalker;
import org.hibernate.sql.ast.spi.FromClauseAccess;
@ -408,6 +410,13 @@ public class HANAXmlTableFunction extends XmlTableFunction {
renderDefaultExpression( definition.defaultExpression(), sqlAppender, walker );
}
protected void renderDefaultExpression(@Nullable Expression expression, SqlAppender sqlAppender, SqlAstTranslator<?> walker) {
if ( expression != null ) {
sqlAppender.appendSql( " default " );
sqlAppender.appendSingleQuoteEscapedString( walker.getLiteralValue( expression ) );
}
}
static boolean isBoolean(JdbcMapping type) {
return type.getJdbcType().isBoolean();
}