HHH-18792 Add JSON aggregate support for H2

This commit is contained in:
Christian Beikov 2024-10-31 18:05:45 +01:00
parent 7fec477bc2
commit f7c5ff28f0
6 changed files with 466 additions and 32 deletions

View File

@ -19,6 +19,8 @@ import org.hibernate.PessimisticLockException;
import org.hibernate.QueryTimeoutException;
import org.hibernate.boot.model.FunctionContributions;
import org.hibernate.boot.model.TypeContributions;
import org.hibernate.dialect.aggregate.AggregateSupport;
import org.hibernate.dialect.aggregate.H2AggregateSupport;
import org.hibernate.dialect.function.CommonFunctionFactory;
import org.hibernate.dialect.identity.H2FinalTableIdentityColumnSupport;
import org.hibernate.dialect.identity.IdentityColumnSupport;
@ -250,6 +252,11 @@ public class H2Dialect extends Dialect {
jdbcTypeRegistry.addDescriptor( OrdinalEnumJdbcType.INSTANCE );
}
@Override
public AggregateSupport getAggregateSupport() {
return H2AggregateSupport.valueOf( this );
}
@Override
public int getDefaultStatementBatchSize() {
return 15;

View File

@ -0,0 +1,356 @@
/*
* SPDX-License-Identifier: LGPL-2.1-or-later
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.dialect.aggregate;
import java.util.LinkedHashMap;
import java.util.Map;
import org.hibernate.dialect.Dialect;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.mapping.Column;
import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.metamodel.mapping.SelectableMapping;
import org.hibernate.metamodel.mapping.SelectablePath;
import org.hibernate.metamodel.mapping.SqlTypedMapping;
import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.type.BasicPluralType;
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
import org.hibernate.type.spi.TypeConfiguration;
import org.checkerframework.checker.nullness.qual.Nullable;
import static org.hibernate.type.SqlTypes.ARRAY;
import static org.hibernate.type.SqlTypes.BINARY;
import static org.hibernate.type.SqlTypes.JSON;
import static org.hibernate.type.SqlTypes.JSON_ARRAY;
import static org.hibernate.type.SqlTypes.LONG32VARBINARY;
import static org.hibernate.type.SqlTypes.VARBINARY;
public class H2AggregateSupport extends AggregateSupportImpl {
private static final AggregateSupport INSTANCE = new H2AggregateSupport();
public static @Nullable AggregateSupport valueOf(Dialect dialect) {
return dialect.getVersion().isSameOrAfter( 2, 2, 220 )
? H2AggregateSupport.INSTANCE
: null;
}
@Override
public String aggregateComponentCustomReadExpression(
String template,
String placeholder,
String aggregateParentReadExpression,
String columnExpression,
int aggregateColumnTypeCode,
SqlTypedMapping column) {
switch ( aggregateColumnTypeCode ) {
case JSON_ARRAY:
case JSON:
switch ( column.getJdbcMapping().getJdbcType().getDefaultSqlTypeCode() ) {
case JSON:
case JSON_ARRAY:
return template.replace(
placeholder,
"(" + aggregateParentReadExpression + ").\"" + columnExpression + "\""
);
case BINARY:
case VARBINARY:
case LONG32VARBINARY:
// We encode binary data as hex, so we have to decode here
return template.replace(
placeholder,
hexDecodeExpression( queryExpression( "(" + aggregateParentReadExpression + ").\"" + columnExpression + "\"" ), column.getColumnDefinition() )
);
case ARRAY:
final BasicPluralType<?, ?> pluralType = (BasicPluralType<?, ?>) column.getJdbcMapping();
final String elementTypeName = getElementTypeName( column.getColumnDefinition() );
switch ( pluralType.getElementType().getJdbcType().getDefaultSqlTypeCode() ) {
case BINARY:
case VARBINARY:
case LONG32VARBINARY:
// We encode binary data as hex, so we have to decode here
return template.replace(
placeholder,
"(select array_agg(" + hexDecodeExpression( queryExpression( "(" + aggregateParentReadExpression + ").\"" + columnExpression + "\"[i.x]" ), elementTypeName ) + ") from system_range(1,10000) i where i.x<=coalesce(array_length((" + aggregateParentReadExpression + ").\"" + columnExpression + "\"),0))"
);
default:
return template.replace(
placeholder,
"(select array_agg(" + valueExpression( "(" + aggregateParentReadExpression + ").\"" + columnExpression + "\"[i.x]", elementTypeName ) + ") from system_range(1,10000) i where i.x<=coalesce(array_length((" + aggregateParentReadExpression + ").\"" + columnExpression + "\"),0))"
);
}
default:
return template.replace(
placeholder,
columnExpression( aggregateParentReadExpression, columnExpression, column.getColumnDefinition() )
);
}
}
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateColumnTypeCode );
}
private static String getElementTypeName(String arrayTypeName) {
final String elementTypeName = arrayTypeName.substring( 0, arrayTypeName.lastIndexOf( " array" ) );
// Doing array_agg on clob produces funky results
return elementTypeName.equals( "clob" ) ? "varchar" : elementTypeName;
}
private static String columnExpression(String aggregateParentReadExpression, String columnExpression, String columnType) {
return valueExpression( "(" + aggregateParentReadExpression + ").\"" + columnExpression + "\"", columnType );
}
private static String hexDecodeExpression(String valueExpression, String columnType) {
return "cast(hextoraw(regexp_replace(" + valueExpression + ",'([0-9a-f][0-9a-f])','00$1')) as " + columnType + ")";
}
private static String valueExpression(String valueExpression, String columnType) {
return "cast(" + queryExpression( valueExpression ) + " as " + columnType + ')';
}
private static String queryExpression(String valueExpression) {
// First we produce a SQL null if we see a JSON null
// Next, we replace quotes that surround the value
// Finally, we undo escaping that was done to a string
return "stringdecode(regexp_replace(nullif(" + valueExpression + ",JSON'null'),'^\"(.*)\"$','$1'))";
}
private static String jsonCustomWriteExpression(String customWriteExpression, JdbcMapping jdbcMapping) {
final int sqlTypeCode = jdbcMapping.getJdbcType().getDefaultSqlTypeCode();
switch ( sqlTypeCode ) {
case BINARY:
case VARBINARY:
case LONG32VARBINARY:
// We encode binary data as hex
return "rawtohex(" + customWriteExpression + ")";
case ARRAY:
final BasicPluralType<?, ?> pluralType = (BasicPluralType<?, ?>) jdbcMapping;
switch ( pluralType.getElementType().getJdbcType().getDefaultSqlTypeCode() ) {
case BINARY:
case VARBINARY:
case LONG32VARBINARY:
// We encode binary data as hex
return "(select array_agg(rawtohex(t.c1)) from unnest(" + customWriteExpression + ") t)";
default:
return customWriteExpression;
}
default:
return customWriteExpression;
}
}
@Override
public String aggregateComponentAssignmentExpression(
String aggregateParentAssignmentExpression,
String columnExpression,
int aggregateColumnTypeCode,
Column column) {
switch ( aggregateColumnTypeCode ) {
case JSON:
case JSON_ARRAY:
// For JSON we always have to replace the whole object
return aggregateParentAssignmentExpression;
}
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateColumnTypeCode );
}
@Override
public boolean requiresAggregateCustomWriteExpressionRenderer(int aggregateSqlTypeCode) {
switch ( aggregateSqlTypeCode ) {
case JSON:
return true;
}
return false;
}
@Override
public WriteExpressionRenderer aggregateCustomWriteExpressionRenderer(
SelectableMapping aggregateColumn,
SelectableMapping[] columnsToUpdate,
TypeConfiguration typeConfiguration) {
final int aggregateSqlTypeCode = aggregateColumn.getJdbcMapping().getJdbcType().getDefaultSqlTypeCode();
switch ( aggregateSqlTypeCode ) {
case JSON:
return jsonAggregateColumnWriter( aggregateColumn, columnsToUpdate );
}
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateSqlTypeCode );
}
private WriteExpressionRenderer jsonAggregateColumnWriter(
SelectableMapping aggregateColumn,
SelectableMapping[] columns) {
return new RootJsonWriteExpression( aggregateColumn, columns );
}
interface JsonWriteExpression {
void append(
SqlAppender sb,
String path,
SqlAstTranslator<?> translator,
AggregateColumnWriteExpression expression);
}
private static class AggregateJsonWriteExpression implements JsonWriteExpression {
private final LinkedHashMap<String, JsonWriteExpression> subExpressions = new LinkedHashMap<>();
protected void initializeSubExpressions(SelectableMapping aggregateColumn, SelectableMapping[] columns) {
for ( SelectableMapping column : columns ) {
final SelectablePath selectablePath = column.getSelectablePath();
final SelectablePath[] parts = selectablePath.getParts();
AggregateJsonWriteExpression currentAggregate = this;
for ( int i = 1; i < parts.length - 1; i++ ) {
currentAggregate = (AggregateJsonWriteExpression) currentAggregate.subExpressions.computeIfAbsent(
parts[i].getSelectableName(),
k -> new AggregateJsonWriteExpression()
);
}
final String customWriteExpression = column.getWriteExpression();
currentAggregate.subExpressions.put(
parts[parts.length - 1].getSelectableName(),
new BasicJsonWriteExpression(
column,
jsonCustomWriteExpression( customWriteExpression, column.getJdbcMapping() )
)
);
}
passThroughUnsetSubExpressions( aggregateColumn );
}
protected void passThroughUnsetSubExpressions(SelectableMapping aggregateColumn) {
final AggregateJdbcType aggregateJdbcType = (AggregateJdbcType) aggregateColumn.getJdbcMapping().getJdbcType();
final EmbeddableMappingType embeddableMappingType = aggregateJdbcType.getEmbeddableMappingType();
final int jdbcValueCount = embeddableMappingType.getJdbcValueCount();
for ( int i = 0; i < jdbcValueCount; i++ ) {
final SelectableMapping selectableMapping = embeddableMappingType.getJdbcValueSelectable( i );
final JsonWriteExpression jsonWriteExpression = subExpressions.get( selectableMapping.getSelectableName() );
if ( jsonWriteExpression == null ) {
subExpressions.put(
selectableMapping.getSelectableName(),
new PassThroughExpression( selectableMapping )
);
}
else if ( jsonWriteExpression instanceof AggregateJsonWriteExpression writeExpression ) {
writeExpression.passThroughUnsetSubExpressions( selectableMapping );
}
}
}
@Override
public void append(
SqlAppender sb,
String path,
SqlAstTranslator<?> translator,
AggregateColumnWriteExpression expression) {
sb.append( "json_object" );
char separator = '(';
for ( Map.Entry<String, JsonWriteExpression> entry : subExpressions.entrySet() ) {
final String column = entry.getKey();
final JsonWriteExpression value = entry.getValue();
final String subPath = path + ".\"" + column + "\"";
sb.append( separator );
if ( value instanceof AggregateJsonWriteExpression ) {
sb.append( '\'' );
sb.append( column );
sb.append( "':coalesce(" );
value.append( sb, subPath, translator, expression );
sb.append( ",json_object())" );
}
else {
value.append( sb, subPath, translator, expression );
}
separator = ',';
}
sb.append( ')' );
}
}
private static class RootJsonWriteExpression extends AggregateJsonWriteExpression
implements WriteExpressionRenderer {
private final String path;
RootJsonWriteExpression(SelectableMapping aggregateColumn, SelectableMapping[] columns) {
this.path = aggregateColumn.getSelectionExpression();
initializeSubExpressions( aggregateColumn, columns );
}
@Override
public void render(
SqlAppender sqlAppender,
SqlAstTranslator<?> translator,
AggregateColumnWriteExpression aggregateColumnWriteExpression,
String qualifier) {
final String basePath;
if ( qualifier == null || qualifier.isBlank() ) {
basePath = path;
}
else {
basePath = qualifier + "." + path;
}
append( sqlAppender, "(" + basePath + ")", translator, aggregateColumnWriteExpression );
}
}
private static class BasicJsonWriteExpression implements JsonWriteExpression {
private final SelectableMapping selectableMapping;
private final String customWriteExpressionStart;
private final String customWriteExpressionEnd;
BasicJsonWriteExpression(SelectableMapping selectableMapping, String customWriteExpression) {
this.selectableMapping = selectableMapping;
if ( customWriteExpression.equals( "?" ) ) {
this.customWriteExpressionStart = "";
this.customWriteExpressionEnd = "";
}
else {
final String[] parts = StringHelper.split( "?", customWriteExpression );
assert parts.length == 2;
this.customWriteExpressionStart = parts[0];
this.customWriteExpressionEnd = parts[1];
}
}
@Override
public void append(
SqlAppender sb,
String path,
SqlAstTranslator<?> translator,
AggregateColumnWriteExpression expression) {
sb.append( '\'' );
sb.append( selectableMapping.getSelectableName() );
sb.append( "':" );
sb.append( customWriteExpressionStart );
// We use NO_UNTYPED here so that expressions which require type inference are casted explicitly,
// since we don't know how the custom write expression looks like where this is embedded,
// so we have to be pessimistic and avoid ambiguities
translator.render( expression.getValueExpression( selectableMapping ), SqlAstNodeRenderingMode.NO_UNTYPED );
sb.append( customWriteExpressionEnd );
}
}
private static class PassThroughExpression implements JsonWriteExpression {
private final SelectableMapping selectableMapping;
PassThroughExpression(SelectableMapping selectableMapping) {
this.selectableMapping = selectableMapping;
}
@Override
public void append(
SqlAppender sb,
String path,
SqlAstTranslator<?> translator,
AggregateColumnWriteExpression expression) {
sb.append( '\'' );
sb.append( selectableMapping.getSelectableName() );
sb.append( "':" );
sb.append( path );
}
}
}

View File

@ -45,11 +45,20 @@ public class H2JsonQueryFunction extends JsonQueryFunction {
arguments.jsonPath(),
arguments.passingClause(),
arguments.wrapMode(),
arguments.emptyBehavior(),
walker
);
}
static void appendJsonQuery(SqlAppender sqlAppender, Expression jsonDocument, boolean isJsonType, Expression jsonPathExpression, @Nullable JsonPathPassingClause passingClause, @Nullable JsonQueryWrapMode wrapMode, SqlAstTranslator<?> walker) {
static void appendJsonQuery(
SqlAppender sqlAppender,
Expression jsonDocument,
boolean isJsonType,
Expression jsonPathExpression,
@Nullable JsonPathPassingClause passingClause,
@Nullable JsonQueryWrapMode wrapMode,
@Nullable JsonQueryEmptyBehavior emptyBehavior,
SqlAstTranslator<?> walker) {
final String jsonPath;
try {
jsonPath = walker.getLiteralValue( jsonPathExpression );
@ -57,16 +66,27 @@ public class H2JsonQueryFunction extends JsonQueryFunction {
catch (Exception ex) {
throw new QueryException( "H2 json_query only support literal json paths, but got " + jsonPathExpression );
}
appendJsonQuery( sqlAppender, jsonDocument, isJsonType, jsonPath, passingClause, wrapMode, walker );
appendJsonQuery( sqlAppender, jsonDocument, isJsonType, jsonPath, passingClause, wrapMode, emptyBehavior, walker );
}
static void appendJsonQuery(SqlAppender sqlAppender, Expression jsonDocument, boolean isJsonType, String jsonPath, @Nullable JsonPathPassingClause passingClause, @Nullable JsonQueryWrapMode wrapMode, SqlAstTranslator<?> walker) {
static void appendJsonQuery(
SqlAppender sqlAppender,
Expression jsonDocument,
boolean isJsonType,
String jsonPath,
@Nullable JsonPathPassingClause passingClause,
@Nullable JsonQueryWrapMode wrapMode,
@Nullable JsonQueryEmptyBehavior emptyBehavior,
SqlAstTranslator<?> walker) {
if ( emptyBehavior == JsonQueryEmptyBehavior.EMPTY_ARRAY || emptyBehavior == JsonQueryEmptyBehavior.EMPTY_OBJECT ) {
sqlAppender.appendSql( "coalesce(" );
}
if ( wrapMode == JsonQueryWrapMode.WITH_WRAPPER ) {
sqlAppender.appendSql( "'['||" );
}
sqlAppender.appendSql( "stringdecode(btrim(nullif(" );
sqlAppender.appendSql( "cast(" );
sqlAppender.appendSql( "stringdecode(regexp_replace(nullif(" );
H2JsonValueFunction.renderJsonPath(
sqlAppender,
jsonDocument,
@ -75,10 +95,15 @@ public class H2JsonQueryFunction extends JsonQueryFunction {
jsonPath,
passingClause
);
sqlAppender.appendSql( " as varchar)" );
sqlAppender.appendSql( ",'null'),'\"'))");
sqlAppender.appendSql( ",JSON'null'),'^\"(.*)\"$','$1'))");
if ( wrapMode == JsonQueryWrapMode.WITH_WRAPPER ) {
sqlAppender.appendSql( "||']'" );
}
if ( emptyBehavior == JsonQueryEmptyBehavior.EMPTY_ARRAY ) {
sqlAppender.appendSql( ",'[]')" );
}
else if ( emptyBehavior == JsonQueryEmptyBehavior.EMPTY_OBJECT ) {
sqlAppender.appendSql( ",'{}')" );
}
}
}

View File

@ -63,6 +63,7 @@ import java.util.Collections;
import java.util.List;
import java.util.Set;
/**
* H2 json_table function.
* <p>
@ -662,15 +663,26 @@ public class H2JsonTableFunction extends JsonTableFunction {
private String castValueExpression(String baseReadExpression, CastTarget castTarget, @Nullable Literal defaultExpression, SqmToSqlAstConverter converter) {
final StringBuilder sb = new StringBuilder( baseReadExpression.length() + 200 );
sb.append( "cast(stringdecode(btrim(nullif(" );
if ( defaultExpression != null ) {
sb.append( "coalesce(" );
}
final boolean hexDecoding = H2JsonValueFunction.needsHexDecoding( castTarget.getJdbcMapping() );
sb.append( "cast(" );
if ( hexDecoding ) {
// We encode binary data as hex, so we have to decode here
sb.append( "hextoraw(regexp_replace(" );
}
sb.append( "stringdecode(regexp_replace(nullif(" );
sb.append( baseReadExpression );
sb.append( " as varchar)" );
sb.append( ",JSON'null'),'^\"(.*)\"$','$1'))" );
if ( hexDecoding ) {
sb.append( ",'([0-9a-f][0-9a-f])','00$1'))" );
}
sb.append( " as " );
sb.append( determineColumnType( castTarget, converter.getCreationContext().getTypeConfiguration() ) );
sb.append( ')' );
if ( defaultExpression != null ) {
sb.append( ",cast(" );
sb.append( ',' );
//noinspection unchecked
final String sqlLiteral = defaultExpression.getJdbcMapping().getJdbcLiteralFormatter().toJdbcLiteral(
defaultExpression.getLiteralValue(),
@ -678,13 +690,8 @@ public class H2JsonTableFunction extends JsonTableFunction {
converter.getCreationContext().getSessionFactory().getWrapperOptions()
);
sb.append( sqlLiteral );
sb.append( " as varchar))" );
sb.append( ')' );
}
sb.append( ",'null'),'\"'))");
sb.append( " as " );
sb.append( determineColumnType( castTarget, converter.getCreationContext().getTypeConfiguration() ) );
sb.append( ')' );
return sb.toString();
}
@ -703,18 +710,25 @@ public class H2JsonTableFunction extends JsonTableFunction {
private String castQueryExpression(String baseReadExpression, JsonQueryEmptyBehavior emptyBehavior, JsonQueryWrapMode wrapMode, SqmToSqlAstConverter converter) {
final StringBuilder sb = new StringBuilder( baseReadExpression.length() + 200 );
if ( emptyBehavior == JsonQueryEmptyBehavior.EMPTY_ARRAY || emptyBehavior == JsonQueryEmptyBehavior.EMPTY_OBJECT ) {
sb.append( "coalesce(" );
}
if ( wrapMode == JsonQueryWrapMode.WITH_WRAPPER ) {
sb.append( "'['||" );
}
sb.append( "stringdecode(btrim(nullif(" );
sb.append( "cast(" );
sb.append( "stringdecode(regexp_replace(nullif(" );
sb.append( baseReadExpression );
sb.append( " as varchar)" );
sb.append( ",'null'),'\"'))");
sb.append( ",JSON'null'),'^\"(.*)\"$','$1'))");
if ( wrapMode == JsonQueryWrapMode.WITH_WRAPPER ) {
sb.append( "||']'" );
}
if ( emptyBehavior == JsonQueryEmptyBehavior.EMPTY_ARRAY ) {
sb.append( ",'[]')" );
}
else if ( emptyBehavior == JsonQueryEmptyBehavior.EMPTY_OBJECT ) {
sb.append( ",'{}')" );
}
return sb.toString();
}

View File

@ -8,6 +8,7 @@ import java.util.List;
import org.hibernate.QueryException;
import org.hibernate.internal.util.QuotingHelper;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.query.ReturnableType;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
@ -20,6 +21,10 @@ import org.hibernate.type.spi.TypeConfiguration;
import org.checkerframework.checker.nullness.qual.Nullable;
import static org.hibernate.type.SqlTypes.BINARY;
import static org.hibernate.type.SqlTypes.LONG32VARBINARY;
import static org.hibernate.type.SqlTypes.VARBINARY;
/**
* H2 json_value function.
*/
@ -45,8 +50,20 @@ public class H2JsonValueFunction extends JsonValueFunction {
final Expression defaultExpression = arguments.emptyBehavior() == null
? null
: arguments.emptyBehavior().getDefaultExpression();
if ( defaultExpression != null ) {
sqlAppender.appendSql( "coalesce(" );
}
final boolean hexDecoding;
if ( arguments.returningType() != null ) {
hexDecoding = H2JsonValueFunction.needsHexDecoding( arguments.returningType().getJdbcMapping() );
sqlAppender.appendSql( "cast(" );
if ( hexDecoding ) {
// We encode binary data as hex, so we have to decode here
sqlAppender.appendSql( "hextoraw(regexp_replace(" );
}
}
else {
hexDecoding = false;
}
final String jsonPath;
try {
@ -56,11 +73,7 @@ public class H2JsonValueFunction extends JsonValueFunction {
throw new QueryException( "H2 json_value only support literal json paths, but got " + arguments.jsonPath() );
}
sqlAppender.appendSql( "stringdecode(btrim(nullif(" );
if ( defaultExpression != null ) {
sqlAppender.appendSql( "coalesce(" );
}
sqlAppender.appendSql( "cast(" );
sqlAppender.appendSql( "stringdecode(regexp_replace(nullif(" );
renderJsonPath(
sqlAppender,
arguments.jsonDocument(),
@ -69,19 +82,28 @@ public class H2JsonValueFunction extends JsonValueFunction {
jsonPath,
arguments.passingClause()
);
sqlAppender.appendSql( " as varchar)" );
if ( defaultExpression != null ) {
sqlAppender.appendSql( ",cast(" );
defaultExpression.accept( walker );
sqlAppender.appendSql( " as varchar))" );
}
sqlAppender.appendSql( ",'null'),'\"'))");
sqlAppender.appendSql( ",JSON'null'),'^\"(.*)\"$','$1'))");
if ( arguments.returningType() != null ) {
if ( hexDecoding ) {
sqlAppender.appendSql( ",'([0-9a-f][0-9a-f])','00$1'))" );
}
sqlAppender.appendSql( " as " );
arguments.returningType().accept( walker );
sqlAppender.appendSql( ')' );
}
if ( defaultExpression != null ) {
sqlAppender.appendSql( ',' );
defaultExpression.accept( walker );
sqlAppender.appendSql( ')' );
}
}
public static boolean needsHexDecoding(JdbcMapping jdbcMapping) {
return switch ( jdbcMapping.getJdbcType().getDefaultSqlTypeCode() ) {
case BINARY, VARBINARY, LONG32VARBINARY -> true;
default -> false;
};
}
public static void renderJsonPath(

View File

@ -54,6 +54,11 @@ public class EnumJdbcType implements JdbcType {
return "ENUM";
}
@Override
public Class<?> getPreferredJavaTypeClass(WrapperOptions options) {
return String.class;
}
@Override
public <X> ValueBinder<X> getBinder(JavaType<X> javaType) {
return new BasicBinder<>( javaType, this ) {
@ -68,6 +73,11 @@ public class EnumJdbcType implements JdbcType {
throws SQLException {
st.setString( name, getJavaType().unwrap( value, String.class, options ) );
}
@Override
public Object getBindValue(X value, WrapperOptions options) throws SQLException {
return getJavaType().unwrap( value, String.class, options );
}
};
}