HHH-15862 Support basic array values in aggregate components

This commit is contained in:
Christian Beikov 2024-05-03 18:18:57 +02:00
parent fec6f862d9
commit d1b30e1ab8
133 changed files with 8171 additions and 3260 deletions

View File

@ -32,6 +32,7 @@ import org.hibernate.dialect.OracleJdbcHelper;
import org.hibernate.dialect.OracleJsonJdbcType;
import org.hibernate.dialect.OracleReflectionStructJdbcType;
import org.hibernate.dialect.OracleTypes;
import org.hibernate.dialect.OracleUserDefinedTypeExporter;
import org.hibernate.dialect.OracleXmlJdbcType;
import org.hibernate.dialect.Replacer;
import org.hibernate.dialect.RowLockStrategy;
@ -67,6 +68,7 @@ import org.hibernate.exception.spi.TemplatedViolatedConstraintNameExtractor;
import org.hibernate.exception.spi.ViolatedConstraintNameExtractor;
import org.hibernate.internal.util.JdbcExceptionHelper;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.mapping.UserDefinedType;
import org.hibernate.metamodel.mapping.EntityMappingType;
import org.hibernate.metamodel.spi.RuntimeModelCreationContext;
import org.hibernate.procedure.internal.StandardCallableStatementSupport;
@ -93,6 +95,7 @@ import org.hibernate.sql.exec.spi.JdbcOperation;
import org.hibernate.tool.schema.extract.internal.SequenceInformationExtractorOracleDatabaseImpl;
import org.hibernate.tool.schema.extract.spi.ColumnTypeInformation;
import org.hibernate.tool.schema.extract.spi.SequenceInformationExtractor;
import org.hibernate.tool.schema.spi.Exporter;
import org.hibernate.type.JavaObjectType;
import org.hibernate.type.NullType;
import org.hibernate.type.SqlTypes;
@ -104,6 +107,7 @@ import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.OracleJsonBlobJdbcType;
import org.hibernate.type.descriptor.jdbc.NullJdbcType;
import org.hibernate.type.descriptor.jdbc.ObjectNullAsNullTypeJdbcType;
import org.hibernate.type.descriptor.jdbc.SqlTypedJdbcType;
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
import org.hibernate.type.descriptor.sql.internal.ArrayDdlTypeImpl;
import org.hibernate.type.descriptor.sql.internal.DdlTypeImpl;
@ -177,6 +181,7 @@ public class OracleLegacyDialect extends Dialect {
private final LimitHandler limitHandler = supportsFetchClause( FetchClauseType.ROWS_ONLY )
? Oracle12LimitHandler.INSTANCE
: new LegacyOracleLimitHandler( getVersion() );
private final OracleUserDefinedTypeExporter userDefinedTypeExporter = new OracleUserDefinedTypeExporter( this );
private final UniqueDelegate uniqueDelegate = new CreateTableUniqueDelegate(this);
public OracleLegacyDialect() {
@ -760,12 +765,12 @@ public class OracleLegacyDialect extends Dialect {
jdbcTypeCode = SqlTypes.GEOMETRY;
}
else {
final AggregateJdbcType aggregateDescriptor = jdbcTypeRegistry.findAggregateDescriptor(
final SqlTypedJdbcType descriptor = jdbcTypeRegistry.findSqlTypedDescriptor(
// Skip the schema
columnTypeName.substring( columnTypeName.indexOf( '.' ) + 1 )
);
if ( aggregateDescriptor != null ) {
return aggregateDescriptor;
if ( descriptor != null ) {
return descriptor;
}
}
break;
@ -777,6 +782,15 @@ public class OracleLegacyDialect extends Dialect {
ColumnTypeInformation.EMPTY
);
}
else {
final SqlTypedJdbcType descriptor = jdbcTypeRegistry.findSqlTypedDescriptor(
// Skip the schema
columnTypeName.substring( columnTypeName.indexOf( '.' ) + 1 )
);
if ( descriptor != null ) {
return descriptor;
}
}
break;
case Types.NUMERIC:
if ( precision > 8 // precision of 0 means something funny
@ -828,7 +842,7 @@ public class OracleLegacyDialect extends Dialect {
@Override
public String getArrayTypeName(String javaElementTypeName, String elementTypeName, Integer maxLength) {
return javaElementTypeName + "Array";
return ( javaElementTypeName == null ? elementTypeName : javaElementTypeName ) + "Array";
}
@Override
@ -837,6 +851,11 @@ public class OracleLegacyDialect extends Dialect {
return ARRAY;
}
@Override
public Exporter<UserDefinedType> getUserDefinedTypeExporter() {
return userDefinedTypeExporter;
}
@Override
public void contributeTypes(TypeContributions typeContributions, ServiceRegistry serviceRegistry) {
super.contributeTypes( typeContributions, serviceRegistry );

View File

@ -560,7 +560,7 @@ public class OracleLegacySqlAstTranslator<T extends JdbcOperation> extends Abstr
appendSql( ')' );
break;
case SqlTypes.ARRAY:
final String arrayTypeName = ( (OracleArrayJdbcType) jdbcType ).getTypeName();
final String arrayTypeName = ( (OracleArrayJdbcType) jdbcType ).getSqlTypeName();
switch ( operator ) {
case DISTINCT_FROM:
case NOT_DISTINCT_FROM:

View File

@ -82,6 +82,7 @@ import org.hibernate.type.descriptor.jdbc.BlobJdbcType;
import org.hibernate.type.descriptor.jdbc.ClobJdbcType;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.ObjectNullAsBinaryTypeJdbcType;
import org.hibernate.type.descriptor.jdbc.SqlTypedJdbcType;
import org.hibernate.type.descriptor.jdbc.XmlJdbcType;
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
import org.hibernate.type.descriptor.sql.internal.ArrayDdlTypeImpl;
@ -339,12 +340,23 @@ public class PostgreSQLLegacyDialect extends Dialect {
ColumnTypeInformation.EMPTY
);
}
final SqlTypedJdbcType elementDescriptor = jdbcTypeRegistry.findSqlTypedDescriptor( componentTypeName );
if ( elementDescriptor != null ) {
return jdbcTypeRegistry.resolveTypeConstructorDescriptor(
jdbcTypeCode,
elementDescriptor,
ColumnTypeInformation.EMPTY
);
}
}
break;
case STRUCT:
final AggregateJdbcType aggregateDescriptor = jdbcTypeRegistry.findAggregateDescriptor( columnTypeName );
if ( aggregateDescriptor != null ) {
return aggregateDescriptor;
final SqlTypedJdbcType descriptor = jdbcTypeRegistry.findSqlTypedDescriptor(
// Skip the schema
columnTypeName.substring( columnTypeName.indexOf( '.' ) + 1 )
);
if ( descriptor != null ) {
return descriptor;
}
break;
}

View File

@ -58,6 +58,7 @@ import org.hibernate.mapping.PersistentClass;
import org.hibernate.mapping.PrimaryKey;
import org.hibernate.mapping.Property;
import org.hibernate.mapping.Table;
import org.hibernate.mapping.UserDefinedObjectType;
import org.hibernate.mapping.UserDefinedType;
import org.hibernate.procedure.spi.NamedCallableQueryMemento;
import org.hibernate.query.internal.NamedObjectRepositoryImpl;
@ -448,13 +449,16 @@ public class MetadataImpl implements MetadataImplementor, Serializable {
}
}
for ( UserDefinedType userDefinedType : namespace.getUserDefinedTypes() ) {
if ( userDefinedType.getColumns().size() > 1 ) {
final List<Column> userDefinedTypeColumns = columnOrderingStrategy.orderUserDefinedTypeColumns(
userDefinedType,
this
);
if ( userDefinedTypeColumns != null ) {
userDefinedType.reorderColumns( userDefinedTypeColumns );
if ( userDefinedType instanceof UserDefinedObjectType ) {
final UserDefinedObjectType objectType = (UserDefinedObjectType) userDefinedType;
if ( objectType.getColumns().size() > 1 ) {
final List<Column> objectTypeColumns = columnOrderingStrategy.orderUserDefinedTypeColumns(
objectType,
this
);
if ( objectTypeColumns != null ) {
objectType.reorderColumns( objectTypeColumns );
}
}
}
}

View File

@ -38,16 +38,16 @@ public final class AggregateComponentBinder {
Component component,
PropertyHolder propertyHolder,
PropertyData inferredData,
XClass returnedClassOrElement,
XClass componentXClass,
AnnotatedColumns columns,
MetadataBuildingContext context) {
if ( isAggregate( inferredData.getProperty(), inferredData.getClassOrElement() ) ) {
if ( isAggregate( inferredData.getProperty(), componentXClass ) ) {
validateComponent( component, BinderHelper.getPath( propertyHolder, inferredData ) );
final InFlightMetadataCollector metadataCollector = context.getMetadataCollector();
final TypeConfiguration typeConfiguration = metadataCollector.getTypeConfiguration();
// Determine a struct name if this is a struct through some means
final String structName = determineStructName( columns, inferredData, returnedClassOrElement );
final String structName = determineStructName( columns, inferredData, componentXClass );
// We must register a special JavaType for the embeddable which can provide a recommended JdbcType
typeConfiguration.getJavaTypeRegistry().resolveDescriptor(
@ -55,12 +55,12 @@ public final class AggregateComponentBinder {
() -> new EmbeddableAggregateJavaType<>( component.getComponentClass(), structName )
);
component.setStructName( structName );
component.setStructColumnNames( determineStructAttributeNames( inferredData, returnedClassOrElement ) );
component.setStructColumnNames( determineStructAttributeNames( inferredData, componentXClass ) );
// Determine the aggregate column
BasicValueBinder basicValueBinder = new BasicValueBinder( BasicValueBinder.Kind.ATTRIBUTE, component, context );
basicValueBinder.setPropertyName( inferredData.getPropertyName() );
basicValueBinder.setReturnedClassName( inferredData.getClassOrElementName() );
basicValueBinder.setReturnedClassName( inferredData.getPropertyClass().getName() );
basicValueBinder.setColumns( columns );
basicValueBinder.setPersistentClassName( propertyHolder.getClassName() );
basicValueBinder.setType(
@ -71,9 +71,24 @@ public final class AggregateComponentBinder {
);
final BasicValue propertyValue = basicValueBinder.make();
final AggregateColumn aggregateColumn = (AggregateColumn) propertyValue.getColumn();
aggregateColumn.setSqlType( structName );
if ( structName != null ) {
aggregateColumn.setSqlTypeCode( SqlTypes.STRUCT );
if ( structName != null && aggregateColumn.getSqlType() == null ) {
if ( inferredData.getProperty().isArray() || inferredData.getProperty().isCollection() ) {
aggregateColumn.setSqlTypeCode( getStructPluralSqlTypeCode( context ) );
aggregateColumn.setSqlType(
context.getMetadataCollector()
.getDatabase()
.getDialect()
.getArrayTypeName(
null,
structName,
null
)
);
}
else {
aggregateColumn.setSqlTypeCode( SqlTypes.STRUCT );
aggregateColumn.setSqlType( structName );
}
}
component.setAggregateColumn( aggregateColumn );
@ -81,13 +96,25 @@ public final class AggregateComponentBinder {
new AggregateComponentSecondPass(
propertyHolder,
component,
returnedClassOrElement,
componentXClass,
context
)
);
}
}
private static int getStructPluralSqlTypeCode(MetadataBuildingContext context) {
final int arrayTypeCode = context.getPreferredSqlTypeCodeForArray();
switch ( arrayTypeCode ) {
case SqlTypes.ARRAY:
return SqlTypes.STRUCT_ARRAY;
case SqlTypes.TABLE:
return SqlTypes.STRUCT_TABLE;
default:
throw new IllegalArgumentException( "Unsupported array type code: " + arrayTypeCode );
}
}
private static void validateComponent(Component component, String basePath) {
for ( Property property : component.getProperties() ) {
final Value value = property.getValue();
@ -122,7 +149,9 @@ public final class AggregateComponentBinder {
return struct.name();
}
final JdbcTypeCode jdbcTypeCode = property.getAnnotation( JdbcTypeCode.class );
if ( jdbcTypeCode != null && jdbcTypeCode.value() == SqlTypes.STRUCT && columns != null ) {
if ( jdbcTypeCode != null
&& ( jdbcTypeCode.value() == SqlTypes.STRUCT || jdbcTypeCode.value() == SqlTypes.STRUCT_ARRAY || jdbcTypeCode.value() == SqlTypes.STRUCT_TABLE )
&& columns != null ) {
final List<AnnotatedColumn> columnList = columns.getColumns();
if ( columnList.size() == 1 && columnList.get( 0 ).getSqlType() != null ) {
return columnList.get( 0 ).getSqlType();
@ -163,6 +192,10 @@ public final class AggregateComponentBinder {
case SqlTypes.STRUCT:
case SqlTypes.JSON:
case SqlTypes.SQLXML:
case SqlTypes.STRUCT_ARRAY:
case SqlTypes.STRUCT_TABLE:
case SqlTypes.JSON_ARRAY:
case SqlTypes.XML_ARRAY:
return true;
}
}

View File

@ -11,7 +11,6 @@ import java.util.List;
import java.util.Map;
import java.util.TreeSet;
import org.hibernate.AnnotationException;
import org.hibernate.MappingException;
import org.hibernate.annotations.Comment;
import org.hibernate.annotations.common.reflection.XClass;
@ -26,18 +25,16 @@ import org.hibernate.dialect.aggregate.AggregateSupport;
import org.hibernate.internal.util.ReflectHelper;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.mapping.AggregateColumn;
import org.hibernate.mapping.BasicValue;
import org.hibernate.mapping.Column;
import org.hibernate.mapping.Component;
import org.hibernate.mapping.PersistentClass;
import org.hibernate.mapping.Property;
import org.hibernate.mapping.Selectable;
import org.hibernate.mapping.UserDefinedType;
import org.hibernate.mapping.UserDefinedObjectType;
import org.hibernate.mapping.Value;
import org.hibernate.metamodel.internal.EmbeddableHelper;
import org.hibernate.sql.Template;
import org.hibernate.type.BasicPluralType;
import org.hibernate.type.BasicType;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.spi.TypeConfiguration;
/**
@ -47,17 +44,17 @@ public class AggregateComponentSecondPass implements SecondPass {
private final PropertyHolder propertyHolder;
private final Component component;
private final XClass returnedClassOrElement;
private final XClass componentXClass;
private final MetadataBuildingContext context;
public AggregateComponentSecondPass(
PropertyHolder propertyHolder,
Component component,
XClass returnedClassOrElement,
XClass componentXClass,
MetadataBuildingContext context) {
this.propertyHolder = propertyHolder;
this.component = component;
this.returnedClassOrElement = returnedClassOrElement;
this.componentXClass = componentXClass;
this.context = context;
}
@ -86,15 +83,15 @@ public class AggregateComponentSecondPass implements SecondPass {
if ( structName != null ) {
final Namespace defaultNamespace = database.getDefaultNamespace();
final Identifier udtName = Identifier.toIdentifier( structName );
final UserDefinedType udt = new UserDefinedType( "orm", defaultNamespace, udtName );
final Comment comment = returnedClassOrElement.getAnnotation( Comment.class );
final UserDefinedObjectType udt = new UserDefinedObjectType( "orm", defaultNamespace, udtName );
final Comment comment = componentXClass.getAnnotation( Comment.class );
if ( comment != null ) {
udt.setComment( comment.value() );
}
for ( org.hibernate.mapping.Column aggregatedColumn : aggregatedColumns ) {
udt.addColumn( aggregatedColumn );
}
final UserDefinedType registeredUdt = defaultNamespace.createUserDefinedType(
final UserDefinedObjectType registeredUdt = defaultNamespace.createUserDefinedType(
udtName,
name -> udt
);
@ -187,7 +184,7 @@ public class AggregateComponentSecondPass implements SecondPass {
propertyHolder.getTable().getColumns().removeAll( aggregatedColumns );
}
private void orderColumns(UserDefinedType userDefinedType) {
private void orderColumns(UserDefinedObjectType userDefinedType) {
final Class<?> componentClass = component.getComponentClass();
final int[] originalOrder = component.sortProperties();
final String[] structColumnNames = component.getStructColumnNames();
@ -296,18 +293,6 @@ public class AggregateComponentSecondPass implements SecondPass {
validateSupportedColumnTypes( StringHelper.qualify( basePath, property.getName() ), subComponent );
}
}
else if ( value instanceof BasicValue ) {
final BasicType<?> basicType = (BasicType<?>) value.getType();
if ( basicType instanceof BasicPluralType<?, ?> ) {
// todo: see HHH-15862
throw new AnnotationException(
"Property '" + StringHelper.qualify( basePath, property.getName() )
+ "' uses not yet supported array mapping type in component class '"
+ component.getComponentClassName()
+ "'. Aggregate components currently may only contain simple basic values and components of simple basic values."
);
}
}
}
}
@ -326,7 +311,7 @@ public class AggregateComponentSecondPass implements SecondPass {
} while ( aggregateColumn != null );
}
private void validateEqual(UserDefinedType udt1, UserDefinedType udt2) {
private void validateEqual(UserDefinedObjectType udt1, UserDefinedObjectType udt2) {
if ( udt1.getColumnSpan() != udt2.getColumnSpan() ) {
throw new MappingException(
String.format(
@ -350,7 +335,7 @@ public class AggregateComponentSecondPass implements SecondPass {
String.format(
"Struct [%s] of class [%s] is defined by multiple components with different mappings [%s] and [%s] for column [%s]",
udt1.getName(),
returnedClassOrElement.getName(),
componentXClass.getName(),
column1.getSqlType(),
column2.getSqlType(),
column1.getCanonicalName()
@ -365,7 +350,7 @@ public class AggregateComponentSecondPass implements SecondPass {
"Struct [%s] is defined by multiple components %s but some columns are missing in [%s]: %s",
udt1.getName(),
findComponentClasses(),
returnedClassOrElement.getName(),
componentXClass.getName(),
missingColumns
)
);

View File

@ -91,7 +91,7 @@ public class ComponentPropertyHolder extends AbstractPropertyHolder {
}
else {
this.embeddedAttributeName = "";
this.attributeConversionInfoMap = processAttributeConversions( inferredData.getClassOrElement() );
this.attributeConversionInfoMap = processAttributeConversions( inferredData.getClassOrPluralElement() );
}
}

View File

@ -151,7 +151,7 @@ public class EmbeddableBinder {
|| returnedClass.isAnnotationPresent( Embeddable.class ) && !property.isAnnotationPresent( Convert.class );
}
private static Component bindEmbeddable(
public static Component bindEmbeddable(
PropertyData inferredData,
PropertyHolder propertyHolder,
AccessType propertyAccessor,
@ -350,7 +350,7 @@ public class EmbeddableBinder {
final XClass returnedClassOrElement;
if ( compositeUserTypeClass == null ) {
compositeUserType = null;
returnedClassOrElement = inferredData.getClassOrElement();
returnedClassOrElement = inferredData.getClassOrPluralElement();
}
else {
compositeUserType = compositeUserType( compositeUserTypeClass, context );
@ -440,7 +440,7 @@ public class EmbeddableBinder {
new PropertyContainer( returnedClassOrElement, annotatedClass, propertyAccessor );
addElementsOfClass( classElements, container, context);
//add elements of the embeddable's mapped superclasses
XClass superClass = annotatedClass.getSuperclass();
XClass superClass = returnedClassOrElement.getSuperclass();
while ( isValidSuperclass( superClass, isIdClass ) ) {
//FIXME: proper support of type variables incl var resolved at upper levels
final PropertyContainer superContainer =
@ -610,16 +610,19 @@ public class EmbeddableBinder {
component.setEmbedded( isComponentEmbedded );
//yuk
component.setTable( propertyHolder.getTable() );
final XClass embeddableClass;
//FIXME shouldn't identifier mapper use getClassOrElementName? Need to be checked.
if ( isIdentifierMapper
|| isComponentEmbedded && inferredData.getPropertyName() == null ) {
component.setComponentClassName( component.getOwner().getClassName() );
embeddableClass = inferredData.getClassOrElement();
}
else {
component.setComponentClassName( inferredData.getClassOrElementName() );
embeddableClass = inferredData.getClassOrPluralElement();
component.setComponentClassName( embeddableClass.getName() );
}
component.setCustomInstantiator( customInstantiatorImpl );
final Constructor<?> constructor = resolveInstantiator( inferredData.getClassOrElement(), context );
final Constructor<?> constructor = resolveInstantiator( embeddableClass, context );
if ( constructor != null ) {
component.setInstantiator( constructor, constructor.getAnnotation( Instantiator.class ).value() );
}
@ -646,7 +649,7 @@ public class EmbeddableBinder {
return null;
}
private static Class<? extends EmbeddableInstantiator> determineCustomInstantiator(
public static Class<? extends EmbeddableInstantiator> determineCustomInstantiator(
XProperty property,
XClass returnedClass,
MetadataBuildingContext context) {

View File

@ -119,6 +119,7 @@ public class PropertyBinder {
private AnnotatedColumns columns;
private PropertyHolder holder;
private Value value;
private Component componentElement;
private boolean insertable = true;
private boolean updatable = true;
private String cascade;
@ -190,6 +191,10 @@ public class PropertyBinder {
this.value = value;
}
public void setComponentElement(Component componentElement) {
this.componentElement = componentElement;
}
public void setCascade(String cascadeStrategy) {
this.cascade = cascadeStrategy;
}
@ -1058,30 +1063,73 @@ public class PropertyBinder {
resolveCompositeUserType( inferredData, context );
if ( isComposite || compositeUserType != null ) {
propertyBinder = createCompositeBinder(
propertyHolder,
inferredData,
entityBinder,
isIdentifierMapper,
isComponentEmbedded,
context,
inheritanceStatePerClass,
property,
actualColumns,
returnedClass,
propertyBinder,
isOverridden,
compositeUserType
);
if ( property.isArray() && property.getElementClass() != null
&& isEmbedded( property, property.getElementClass() ) ) {
// This is a special kind of basic aggregate component array type
propertyBinder.setComponentElement(
EmbeddableBinder.bindEmbeddable(
inferredData,
propertyHolder,
entityBinder.getPropertyAccessor( property ),
entityBinder,
isIdentifierMapper,
context,
isComponentEmbedded,
propertyBinder.isId(),
inheritanceStatePerClass,
null,
null,
EmbeddableBinder.determineCustomInstantiator( property, returnedClass, context ),
compositeUserType,
null,
columns
)
);
propertyBinder.setColumns( actualColumns );
propertyBinder.makePropertyValueAndBind();
}
else {
propertyBinder = createCompositeBinder(
propertyHolder,
inferredData,
entityBinder,
isIdentifierMapper,
isComponentEmbedded,
context,
inheritanceStatePerClass,
property,
actualColumns,
returnedClass,
propertyBinder,
isOverridden,
compositeUserType
);
}
}
else if ( property.isCollection() && property.getElementClass() != null
&& isEmbedded( property, property.getElementClass() ) ) {
// This is a special kind of basic aggregate component array type
// todo: see HHH-15830
throw new AnnotationException(
"Property '" + BinderHelper.getPath( propertyHolder, inferredData )
+ "' is mapped as basic aggregate component array, but this is not yet supported."
propertyBinder.setComponentElement(
EmbeddableBinder.bindEmbeddable(
inferredData,
propertyHolder,
entityBinder.getPropertyAccessor( property ),
entityBinder,
isIdentifierMapper,
context,
isComponentEmbedded,
propertyBinder.isId(),
inheritanceStatePerClass,
null,
null,
EmbeddableBinder.determineCustomInstantiator( property, property.getElementClass(), context ),
compositeUserType,
null,
columns
)
);
propertyBinder.setColumns( actualColumns );
propertyBinder.makePropertyValueAndBind();
}
else {
createBasicBinder(

View File

@ -86,6 +86,19 @@ public class PropertyInferredData implements PropertyData {
}
}
@Override
public XClass getClassOrPluralElement() throws MappingException {
if ( property.isAnnotationPresent( Target.class ) ) {
return reflectionManager.toXClass( property.getAnnotation( Target.class ).value() );
}
else if ( property.isCollection() ) {
return property.getElementClass();
}
else {
return property.getClassOrElementClass();
}
}
@Override
public String getClassOrElementName() throws MappingException {
return getClassOrElement().getName();

View File

@ -44,6 +44,11 @@ public class PropertyPreloadedData implements PropertyData {
return getPropertyClass();
}
@Override
public XClass getClassOrPluralElement() throws MappingException {
return getPropertyClass();
}
@Override
public XClass getPropertyClass() throws MappingException {
return returnedClass;

View File

@ -30,6 +30,11 @@ public class WrappedInferredData implements PropertyData {
return wrappedInferredData.getClassOrElement();
}
@Override
public XClass getClassOrPluralElement() throws MappingException {
return wrappedInferredData.getClassOrPluralElement();
}
@Override
public String getClassOrElementName() throws MappingException {
return wrappedInferredData.getClassOrElementName();

View File

@ -27,6 +27,7 @@ public class InferredBasicValueResolution<J,T> implements BasicValue.Resolution<
private final JdbcMapping jdbcMapping;
private final BasicType<J> legacyType;
private BasicType<?> updatedType;
public InferredBasicValueResolution(
JdbcMapping jdbcMapping,
@ -45,12 +46,12 @@ public class InferredBasicValueResolution<J,T> implements BasicValue.Resolution<
@Override
public JdbcMapping getJdbcMapping() {
return jdbcMapping;
return updatedType == null ? jdbcMapping : updatedType;
}
@Override
public BasicType<J> getLegacyResolvedBasicType() {
return legacyType;
return updatedType == null ? legacyType : (BasicType<J>) updatedType;
}
@Override
@ -65,17 +66,24 @@ public class InferredBasicValueResolution<J,T> implements BasicValue.Resolution<
@Override
public JdbcType getJdbcType() {
return jdbcType;
return updatedType == null ? jdbcType : updatedType.getJdbcType();
}
@Override
public BasicValueConverter<J,T> getValueConverter() {
//noinspection unchecked
return (BasicValueConverter<J, T>) jdbcMapping.getValueConverter();
return updatedType == null
? (BasicValueConverter<J, T>) jdbcMapping.getValueConverter()
: (BasicValueConverter<J, T>) updatedType.getValueConverter();
}
@Override
public MutabilityPlan<J> getMutabilityPlan() {
return mutabilityPlan;
}
@Override
public void updateResolution(BasicType<?> type) {
this.updatedType = type;
}
}

View File

@ -14,11 +14,11 @@ import org.hibernate.dialect.temptable.TemporaryTableColumn;
import org.hibernate.mapping.Column;
import org.hibernate.mapping.Constraint;
import org.hibernate.mapping.Table;
import org.hibernate.mapping.UserDefinedType;
import org.hibernate.mapping.UserDefinedObjectType;
/**
* A pluggable contract that allows ordering of columns within {@link org.hibernate.mapping.Table},
* {@link org.hibernate.mapping.Constraint} and {@link org.hibernate.mapping.UserDefinedType}.
* {@link org.hibernate.mapping.Constraint} and {@link UserDefinedObjectType}.
* <p>
* An {@linkplain ColumnOrderingStrategy} may be selected using the configuration property
* {@value org.hibernate.cfg.AvailableSettings#COLUMN_ORDERING_STRATEGY}.
@ -46,7 +46,7 @@ public interface ColumnOrderingStrategy {
* Orders the columns of the user defined type.
* May return null if columns were not ordered.
*/
List<Column> orderUserDefinedTypeColumns(UserDefinedType userDefinedType, Metadata metadata);
List<Column> orderUserDefinedTypeColumns(UserDefinedObjectType userDefinedType, Metadata metadata);
/**
* Orders the columns of the temporary table.

View File

@ -13,7 +13,7 @@ import org.hibernate.dialect.temptable.TemporaryTableColumn;
import org.hibernate.mapping.Column;
import org.hibernate.mapping.Constraint;
import org.hibernate.mapping.Table;
import org.hibernate.mapping.UserDefinedType;
import org.hibernate.mapping.UserDefinedObjectType;
/**
* A no-op implementation.
@ -32,7 +32,7 @@ public class ColumnOrderingStrategyLegacy implements ColumnOrderingStrategy {
}
@Override
public List<Column> orderUserDefinedTypeColumns(UserDefinedType userDefinedType, Metadata metadata) {
public List<Column> orderUserDefinedTypeColumns(UserDefinedObjectType userDefinedType, Metadata metadata) {
return null;
}

View File

@ -8,7 +8,6 @@ package org.hibernate.boot.model.relational;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
@ -21,7 +20,7 @@ import org.hibernate.mapping.Constraint;
import org.hibernate.mapping.PrimaryKey;
import org.hibernate.mapping.Table;
import org.hibernate.mapping.UniqueKey;
import org.hibernate.mapping.UserDefinedType;
import org.hibernate.mapping.UserDefinedObjectType;
import static java.lang.Math.log;
import static org.hibernate.type.SqlTypes.*;
@ -42,7 +41,7 @@ public class ColumnOrderingStrategyStandard implements ColumnOrderingStrategy {
}
@Override
public List<Column> orderUserDefinedTypeColumns(UserDefinedType userDefinedType, Metadata metadata) {
public List<Column> orderUserDefinedTypeColumns(UserDefinedObjectType userDefinedType, Metadata metadata) {
return orderColumns( userDefinedType.getColumns(), metadata );
}

View File

@ -20,6 +20,7 @@ import java.util.TreeMap;
import java.util.function.Function;
import org.hibernate.HibernateException;
import org.hibernate.Incubating;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.boot.model.naming.PhysicalNamingStrategy;
import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
@ -29,7 +30,13 @@ import org.hibernate.mapping.AggregateColumn;
import org.hibernate.mapping.Column;
import org.hibernate.mapping.DenormalizedTable;
import org.hibernate.mapping.Table;
import org.hibernate.mapping.UserDefinedArrayType;
import org.hibernate.mapping.UserDefinedObjectType;
import org.hibernate.mapping.UserDefinedType;
import org.hibernate.type.BasicType;
import org.hibernate.type.descriptor.jdbc.ArrayJdbcType;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.SqlTypedJdbcType;
/**
* Represents a namespace (named schema/catalog pair) with a Database and manages objects defined within.
@ -162,29 +169,50 @@ public class Namespace {
return sequence;
}
@Incubating
public Collection<UserDefinedType> getUserDefinedTypes() {
return udts.values();
}
@Incubating
public List<UserDefinedType> getDependencyOrderedUserDefinedTypes() {
final var orderedUdts = new LinkedHashMap<Identifier, UserDefinedType>( udts.size() );
final var udtDependencies = new HashMap<Identifier, Set<Identifier>>( udts.size() );
for ( var entry : udts.entrySet() ) {
final var dependencies = new HashSet<Identifier>();
final UserDefinedType udt = entry.getValue();
for ( Column udtColumn : udt.getColumns() ) {
if ( udtColumn.getSqlTypeCode() == Types.STRUCT ) {
final String structName = ( (AggregateColumn) udtColumn ).getComponent().getStructName();
dependencies.add( Identifier.toIdentifier( structName ) );
if ( udt instanceof UserDefinedObjectType ) {
for ( Column udtColumn : ( (UserDefinedObjectType) udt ).getColumns() ) {
final JdbcType jdbcType = ( (BasicType<?>) udtColumn.getValue().getType() ).getJdbcType();
if ( jdbcType instanceof SqlTypedJdbcType ) {
dependencies.add( Identifier.toIdentifier( ( (SqlTypedJdbcType) jdbcType ).getSqlTypeName() ) );
}
else if ( jdbcType instanceof ArrayJdbcType ) {
final JdbcType elementJdbcType = ( (ArrayJdbcType) jdbcType ).getElementJdbcType();
if ( elementJdbcType instanceof SqlTypedJdbcType ) {
dependencies.add( Identifier.toIdentifier( ( (SqlTypedJdbcType) elementJdbcType ).getSqlTypeName() ) );
}
}
}
if ( dependencies.isEmpty() ) {
// The UDTs without dependencies are added directly
orderedUdts.put( udt.getNameIdentifier(), udt );
}
else {
// For the rest we record the direct dependencies
udtDependencies.put( entry.getKey(), dependencies );
}
}
if ( dependencies.isEmpty() ) {
// The UDTs without dependencies are added directly
orderedUdts.put( udt.getNameIdentifier(), udt );
}
else {
// For the rest we record the direct dependencies
udtDependencies.put( entry.getKey(), dependencies );
else if ( udt instanceof UserDefinedArrayType ) {
final Identifier elementTypeName = Identifier.toIdentifier( ( (UserDefinedArrayType) udt ).getElementTypeName() );
if ( udts.get( elementTypeName ) instanceof UserDefinedObjectType ) {
dependencies.add( elementTypeName );
udtDependencies.put( entry.getKey(), dependencies );
}
else {
// No need to worry about dependency ordering with respect to types we don't know
orderedUdts.put( udt.getNameIdentifier(), udt );
}
}
}
// Traverse the dependency sets
@ -207,16 +235,31 @@ public class Namespace {
}
/**
* Returns the UDT with the specified logical UDT name.
* Returns the object UDT with the specified logical UDT name.
*
* @param logicalTypeName - the logical name of the UDT
*
* @return the table with the specified UDT name,
* or null if there is no table with the specified
* @return the object UDT with the specified UDT name,
* or null if there is no UDT with the specified
* UDT name.
*/
public UserDefinedType locateUserDefinedType(Identifier logicalTypeName) {
return udts.get( logicalTypeName );
@Incubating
public UserDefinedObjectType locateUserDefinedType(Identifier logicalTypeName) {
return (UserDefinedObjectType) udts.get( logicalTypeName );
}
/**
* Returns the array UDT with the specified logical UDT name.
*
* @param logicalTypeName - the logical name of the UDT
*
* @return the array UDT with the specified UDT name,
* or null if there is no UDT with the specified
* UDT name.
*/
@Incubating
public UserDefinedArrayType locateUserDefinedArrayType(Identifier logicalTypeName) {
return (UserDefinedArrayType) udts.get( logicalTypeName );
}
/**
@ -226,14 +269,35 @@ public class Namespace {
*
* @return the created UDT.
*/
public UserDefinedType createUserDefinedType(Identifier logicalTypeName, Function<Identifier,UserDefinedType> creator) {
@Incubating
public UserDefinedObjectType createUserDefinedType(Identifier logicalTypeName, Function<Identifier, UserDefinedObjectType> creator) {
final UserDefinedType existing = udts.get( logicalTypeName );
if ( existing != null ) {
return existing;
return (UserDefinedObjectType) existing;
}
final Identifier physicalTableName = physicalNamingStrategy.toPhysicalTypeName( logicalTypeName, jdbcEnvironment );
final UserDefinedType type = creator.apply( physicalTableName );
final UserDefinedObjectType type = creator.apply( physicalTableName );
udts.put( logicalTypeName, type );
return type;
}
/**
* Creates a mapping UDT instance.
*
* @param logicalTypeName The logical UDT name
*
* @return the created UDT.
*/
@Incubating
public UserDefinedArrayType createUserDefinedArrayType(Identifier logicalTypeName, Function<Identifier, UserDefinedArrayType> creator) {
final UserDefinedType existing = udts.get( logicalTypeName );
if ( existing != null ) {
return (UserDefinedArrayType) existing;
}
final Identifier physicalTableName = physicalNamingStrategy.toPhysicalTypeName( logicalTypeName, jdbcEnvironment );
final UserDefinedArrayType type = creator.apply( physicalTableName );
udts.put( logicalTypeName, type );
return type;

View File

@ -32,6 +32,11 @@ public interface PropertyData {
*/
XClass getClassOrElement() throws MappingException;
/**
* Returns the returned class itself or the element type if an array or collection
*/
XClass getClassOrPluralElement() throws MappingException;
/**
* Return the class itself
*/

View File

@ -6,6 +6,7 @@
*/
package org.hibernate.dialect;
import java.lang.reflect.Array;
import java.sql.CallableStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
@ -32,14 +33,16 @@ import org.hibernate.metamodel.mapping.SelectableMapping;
import org.hibernate.metamodel.mapping.internal.EmbeddedAttributeMapping;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.sql.ast.spi.StringBuilderSqlAppender;
import org.hibernate.type.BasicPluralType;
import org.hibernate.type.BasicType;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.ValueExtractor;
import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.java.IntegerJavaType;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.java.PrimitiveByteArrayJavaType;
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
import org.hibernate.type.descriptor.jdbc.BasicExtractor;
import org.hibernate.type.descriptor.jdbc.StructJdbcType;
import org.hibernate.type.spi.TypeConfiguration;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsDate;
@ -55,7 +58,7 @@ import static org.hibernate.type.descriptor.DateTimeUtils.appendAsTimestampWithM
*
* @author Christian Beikov
*/
public abstract class AbstractPostgreSQLStructJdbcType implements AggregateJdbcType {
public abstract class AbstractPostgreSQLStructJdbcType implements StructJdbcType {
private static final DateTimeFormatter LOCAL_DATE_TIME;
static {
@ -111,7 +114,8 @@ public abstract class AbstractPostgreSQLStructJdbcType implements AggregateJdbcT
return SqlTypes.STRUCT;
}
public String getTypeName() {
@Override
public String getStructTypeName() {
return typeName;
}
@ -273,7 +277,7 @@ public abstract class AbstractPostgreSQLStructJdbcType implements AggregateJdbcT
private int deserializeStruct(
String string,
int begin,
int quoteLevel,
int quotes,
Object[] values,
boolean returnEmbeddable,
WrapperOptions options) throws SQLException {
@ -285,9 +289,44 @@ public abstract class AbstractPostgreSQLStructJdbcType implements AggregateJdbcT
for ( int i = start; i < string.length(); i++ ) {
final char c = string.charAt( i );
switch ( c ) {
case '\\':
if ( inQuote ) {
final int expectedQuoteCount = 1 << quotes;
if ( repeatsChar( string, i, expectedQuoteCount, '\\' ) ) {
if ( isDoubleQuote( string, i + expectedQuoteCount, expectedQuoteCount ) ) {
// Skip quote escaping as that will be unescaped later
if ( escapingSb == null ) {
escapingSb = new StringBuilder();
}
escapingSb.append( string, start, i );
escapingSb.append( '"' );
// Move forward to the last quote
i += expectedQuoteCount + expectedQuoteCount - 1;
start = i + 1;
continue;
}
else {
assert repeatsChar( string, i + expectedQuoteCount, expectedQuoteCount, '\\' );
// Don't create an escaping string builder for binary literals
if ( i != start || !isBinary( column ) ) {
// Skip quote escaping as that will be unescaped later
if ( escapingSb == null ) {
escapingSb = new StringBuilder();
}
escapingSb.append( string, start, i );
escapingSb.append( '\\' );
start = i + expectedQuoteCount + expectedQuoteCount;
}
// Move forward to the last backslash
i += expectedQuoteCount + expectedQuoteCount - 1;
continue;
}
}
}
// Fall-through since a backslash is an escaping mechanism for a start quote within arrays
case '"':
if ( inQuote ) {
if ( repeatsChar( string, i, 1 << ( quoteLevel + 1 ), '"' ) ) {
if ( isDoubleQuote( string, i, 1 << ( quotes + 1 ) ) ) {
// Skip quote escaping as that will be unescaped later
if ( escapingSb == null ) {
escapingSb = new StringBuilder();
@ -295,11 +334,11 @@ public abstract class AbstractPostgreSQLStructJdbcType implements AggregateJdbcT
escapingSb.append( string, start, i );
escapingSb.append( '"' );
// Move forward to the last quote
i += ( 1 << ( quoteLevel + 1 ) ) - 1;
i += ( 1 << ( quotes + 1 ) ) - 1;
start = i + 1;
continue;
}
assert repeatsChar( string, i, 1 << quoteLevel, '"' );
assert isDoubleQuote( string, i, 1 << quotes );
final JdbcMapping jdbcMapping = getJdbcValueSelectable( column ).getJdbcMapping();
switch ( jdbcMapping.getJdbcType().getDefaultSqlTypeCode() ) {
case SqlTypes.DATE:
@ -363,7 +402,7 @@ public abstract class AbstractPostgreSQLStructJdbcType implements AggregateJdbcT
case SqlTypes.VARBINARY:
case SqlTypes.LONGVARBINARY:
case SqlTypes.LONG32VARBINARY:
final int backslashes = 1 << ( quoteLevel + 1 );
final int backslashes = 1 << ( quotes + 1 );
assert repeatsChar( string, start, backslashes, '\\' );
final int xCharPosition = start + backslashes;
assert string.charAt( xCharPosition ) == 'x';
@ -398,7 +437,7 @@ public abstract class AbstractPostgreSQLStructJdbcType implements AggregateJdbcT
column++;
inQuote = false;
// move forward the index by 2 ^ quoteLevel to point to the next char after the quote
i += 1 << quoteLevel;
i += 1 << quotes;
if ( string.charAt( i ) == ')' ) {
// Return the end position if this is the last element
assert column == values.length;
@ -409,8 +448,8 @@ public abstract class AbstractPostgreSQLStructJdbcType implements AggregateJdbcT
}
else {
// This is a start quote, so move forward the index to the last quote
final int expectedQuotes = Math.max( 1, 1 << quoteLevel );
assert repeatsChar( string, i, expectedQuotes, '"' );
final int expectedQuotes = 1 << quotes;
assert isDoubleQuote( string, i, expectedQuotes );
i += expectedQuotes - 1;
if ( string.charAt( i + 1 ) == '(' ) {
// This could be a nested struct
@ -422,7 +461,7 @@ public abstract class AbstractPostgreSQLStructJdbcType implements AggregateJdbcT
final int subEnd = structJdbcType.deserializeStruct(
string,
i + 1,
quoteLevel + 1,
quotes + 1,
subValues,
returnEmbeddable,
options
@ -453,7 +492,42 @@ public abstract class AbstractPostgreSQLStructJdbcType implements AggregateJdbcT
column++;
// The subEnd points to the first character after the ')',
// so move forward the index to point to the next char after quotes
assert repeatsChar( string, subEnd, expectedQuotes, '"' );
assert isDoubleQuote( string, subEnd, expectedQuotes );
i = subEnd + expectedQuotes;
if ( string.charAt( i ) == ')' ) {
// Return the end position if this is the last element
assert column == values.length;
return i + 1;
}
// at this point, we must see a comma to indicate the next element
assert string.charAt( i ) == ',';
}
else {
inQuote = true;
}
}
else if ( string.charAt( i + 1 ) == '{' ) {
// This could be a quoted array
final JdbcMapping jdbcMapping = getJdbcValueSelectable( column ).getJdbcMapping();
if ( jdbcMapping instanceof BasicPluralType<?, ?> ) {
final BasicPluralType<?, ?> pluralType = (BasicPluralType<?, ?>) jdbcMapping;
final ArrayList<Object> arrayList = new ArrayList<>();
//noinspection unchecked
final int subEnd = deserializeArray(
string,
i + 1,
quotes + 1,
arrayList,
(BasicType<Object>) pluralType.getElementType(),
returnEmbeddable,
options
);
assert string.charAt( subEnd - 1 ) == '}';
values[column] = pluralType.getJdbcJavaType().wrap( arrayList, options );
column++;
// The subEnd points to the first character after the ')',
// so move forward the index to point to the next char after quotes
assert isDoubleQuote( string, subEnd, expectedQuotes );
i = subEnd + expectedQuotes;
if ( string.charAt( i ) == ')' ) {
// Return the end position if this is the last element
@ -489,7 +563,7 @@ public abstract class AbstractPostgreSQLStructJdbcType implements AggregateJdbcT
}
else if ( jdbcMapping.getJavaTypeDescriptor().getJavaTypeClass().isEnum()
&& jdbcMapping.getJdbcType().isInteger() ) {
values[column] = fromRawObject(
values[column] = fromRawObject(
jdbcMapping,
IntegerJavaType.INSTANCE.fromEncodedString( string, start, i ),
options
@ -515,11 +589,412 @@ public abstract class AbstractPostgreSQLStructJdbcType implements AggregateJdbcT
values[column] = null;
}
else {
values[column] = fromString(
column,
final JdbcMapping jdbcMapping = getJdbcValueSelectable( column ).getJdbcMapping();
if ( jdbcMapping.getJdbcType().getDefaultSqlTypeCode() == SqlTypes.BOOLEAN ) {
values[column] = fromRawObject(
jdbcMapping,
string.charAt( start ) == 't',
options
);
}
else if ( jdbcMapping.getJavaTypeDescriptor().getJavaTypeClass().isEnum()
&& jdbcMapping.getJdbcType().isInteger() ) {
values[column] = fromRawObject(
jdbcMapping,
IntegerJavaType.INSTANCE.fromEncodedString( string, start, i ),
options
);
}
else {
values[column] = fromString(
jdbcMapping,
string,
start,
i
);
}
}
}
return i + 1;
}
break;
case '{':
if ( !inQuote ) {
final BasicPluralType<?, ?> pluralType = (BasicPluralType<?, ?>) getJdbcValueSelectable( column ).getJdbcMapping();
final ArrayList<Object> arrayList = new ArrayList<>();
//noinspection unchecked
i = deserializeArray(
string,
i,
quotes + 1,
arrayList,
(BasicType<Object>) pluralType.getElementType(),
returnEmbeddable,
options
);
assert string.charAt( i - 1 ) == '}';
values[column] = pluralType.getJdbcJavaType().wrap( arrayList, options );
column++;
if ( string.charAt( i ) == ')' ) {
// Return the end position if this is the last element
assert column == values.length;
return i + 1;
}
// at this point, we must see a comma to indicate the next element
assert string.charAt( i ) == ',';
start = i + 1;
}
break;
}
}
throw new IllegalArgumentException( "Struct not properly formed: " + string.substring( start ) );
}
private boolean isBinary(int column) {
return isBinary( getJdbcValueSelectable( column ).getJdbcMapping() );
}
private static boolean isBinary(JdbcMapping jdbcMapping) {
switch ( jdbcMapping.getJdbcType().getDefaultSqlTypeCode() ) {
case SqlTypes.BINARY:
case SqlTypes.VARBINARY:
case SqlTypes.LONGVARBINARY:
case SqlTypes.LONG32VARBINARY:
return true;
}
return false;
}
private int deserializeArray(
String string,
int begin,
int quotes,
ArrayList<Object> values,
BasicType<Object> elementType,
boolean returnEmbeddable,
WrapperOptions options) throws SQLException {
boolean inQuote = false;
StringBuilder escapingSb = null;
assert string.charAt( begin ) == '{';
int start = begin + 1;
for ( int i = start; i < string.length(); i++ ) {
final char c = string.charAt( i );
switch ( c ) {
case '\\':
if ( inQuote ) {
final int expectedQuoteCount = 1 << quotes;
if ( repeatsChar( string, i, expectedQuoteCount, '\\' ) ) {
if ( isDoubleQuote( string, i + expectedQuoteCount, expectedQuoteCount ) ) {
// Skip quote escaping as that will be unescaped later
if ( escapingSb == null ) {
escapingSb = new StringBuilder();
}
escapingSb.append( string, start, i );
escapingSb.append( '"' );
// Move forward to the last quote
i += expectedQuoteCount + expectedQuoteCount - 1;
start = i + 1;
continue;
}
else {
assert repeatsChar( string, i + expectedQuoteCount, expectedQuoteCount, '\\' );
// Don't create an escaping string builder for binary literals
if ( i != start || !isBinary( elementType ) ) {
// Skip quote escaping as that will be unescaped later
if ( escapingSb == null ) {
escapingSb = new StringBuilder();
}
escapingSb.append( string, start, i );
escapingSb.append( '\\' );
start = i + expectedQuoteCount + expectedQuoteCount;
}
// Move forward to the last backslash
i += expectedQuoteCount + expectedQuoteCount - 1;
continue;
}
}
}
// Fall-through since a backslash is an escaping mechanism for a start quote within arrays
case '"':
if ( inQuote ) {
if ( isDoubleQuote( string, i, 1 << ( quotes + 1 ) ) ) {
// Skip quote escaping as that will be unescaped later
if ( escapingSb == null ) {
escapingSb = new StringBuilder();
}
escapingSb.append( string, start, i );
escapingSb.append( '"' );
// Move forward to the last quote
i += ( 1 << ( quotes + 1 ) ) - 1;
start = i + 1;
continue;
}
assert isDoubleQuote( string, i, 1 << quotes );
switch ( elementType.getJdbcType().getDefaultSqlTypeCode() ) {
case SqlTypes.DATE:
values.add(
fromRawObject(
elementType,
parseDate(
CharSequenceHelper.subSequence(
string,
start,
i
)
),
options
)
);
break;
case SqlTypes.TIME:
case SqlTypes.TIME_WITH_TIMEZONE:
case SqlTypes.TIME_UTC:
values.add(
fromRawObject(
elementType,
parseTime(
CharSequenceHelper.subSequence(
string,
start,
i
)
),
options
)
);
break;
case SqlTypes.TIMESTAMP:
values.add(
fromRawObject(
elementType,
parseTimestamp(
CharSequenceHelper.subSequence(
string,
start,
i
),
elementType.getJdbcJavaType()
),
options
)
);
break;
case SqlTypes.TIMESTAMP_WITH_TIMEZONE:
case SqlTypes.TIMESTAMP_UTC:
values.add(
fromRawObject(
elementType,
parseTimestampWithTimeZone(
CharSequenceHelper.subSequence(
string,
start,
i
),
elementType.getJdbcJavaType()
),
options
)
);
break;
case SqlTypes.BINARY:
case SqlTypes.VARBINARY:
case SqlTypes.LONGVARBINARY:
case SqlTypes.LONG32VARBINARY:
final int backslashes = 1 << ( quotes + 1 );
assert repeatsChar( string, start, backslashes, '\\' );
final int xCharPosition = start + backslashes;
assert string.charAt( xCharPosition ) == 'x';
values.add(
fromString(
elementType,
string,
xCharPosition + 1,
i
)
);
break;
default:
if ( escapingSb == null || escapingSb.length() == 0 ) {
values.add(
fromString(
elementType,
string,
start,
i
)
);
}
else {
escapingSb.append( string, start, i );
values.add(
fromString(
elementType,
escapingSb,
0,
escapingSb.length()
)
);
escapingSb.setLength( 0 );
}
break;
}
inQuote = false;
// move forward the index by 2 ^ quotes to point to the next char after the quote
i += 1 << quotes;
if ( string.charAt( i ) == '}' ) {
// Return the end position if this is the last element
return i + 1;
}
// at this point, we must see a comma to indicate the next element
assert string.charAt( i ) == ',';
}
else {
// This is a start quote, so move forward the index to the last quote
final int expectedQuotes = 1 << quotes;
assert isDoubleQuote( string, i, expectedQuotes );
i += expectedQuotes - 1;
if ( string.charAt( i + 1 ) == '(' ) {
// This could be a nested struct
if ( elementType.getJdbcType() instanceof AbstractPostgreSQLStructJdbcType ) {
final AbstractPostgreSQLStructJdbcType structJdbcType;
structJdbcType = (AbstractPostgreSQLStructJdbcType) elementType.getJdbcType();
final Object[] subValues = new Object[structJdbcType.embeddableMappingType.getJdbcValueCount()];
final int subEnd = structJdbcType.deserializeStruct(
string,
start,
i
i + 1,
quotes + 1,
subValues,
returnEmbeddable,
options
);
if ( returnEmbeddable ) {
final Object[] attributeValues = structJdbcType.getAttributeValues(
structJdbcType.embeddableMappingType,
structJdbcType.orderMapping,
subValues,
options
);
final Object subValue = structJdbcType.embeddableMappingType.getRepresentationStrategy()
.getInstantiator()
.instantiate( () -> attributeValues, options.getSessionFactory() );
values.add( subValue );
}
else {
if ( structJdbcType.inverseOrderMapping != null ) {
StructHelper.orderJdbcValues(
structJdbcType.embeddableMappingType,
structJdbcType.inverseOrderMapping,
subValues.clone(),
subValues
);
}
values.add( subValues );
}
// The subEnd points to the first character after the '}',
// so move forward the index to point to the next char after quotes
assert isDoubleQuote( string, subEnd, expectedQuotes );
i = subEnd + expectedQuotes;
if ( string.charAt( i ) == '}' ) {
// Return the end position if this is the last element
return i + 1;
}
// at this point, we must see a comma to indicate the next element
assert string.charAt( i ) == ',';
}
else {
inQuote = true;
}
}
else {
inQuote = true;
}
}
start = i + 1;
switch ( elementType.getJdbcType().getDefaultSqlTypeCode() ) {
case SqlTypes.BINARY:
case SqlTypes.VARBINARY:
case SqlTypes.LONGVARBINARY:
case SqlTypes.LONG32VARBINARY:
// Skip past the backslashes in the binary literal, this will be handled later
final int backslashes = 1 << ( quotes + 1 );
assert repeatsChar( string, start, backslashes, '\\' );
i += backslashes;
break;
}
break;
case ',':
if ( !inQuote ) {
if ( start == i ) {
values.add( null );
}
else {
if ( elementType.getJdbcType().getDefaultSqlTypeCode() == SqlTypes.BOOLEAN ) {
values.add(
fromRawObject(
elementType,
string.charAt( start ) == 't',
options
)
);
}
else if ( elementType.getJavaTypeDescriptor().getJavaTypeClass().isEnum()
&& elementType.getJdbcType().isInteger() ) {
values.add(
fromRawObject(
elementType,
IntegerJavaType.INSTANCE.fromEncodedString( string, start, i ),
options
)
);
}
else {
values.add(
fromString(
elementType,
string,
start,
i
)
);
}
}
start = i + 1;
}
break;
case '}':
if ( !inQuote ) {
if ( start == i ) {
values.add( null );
}
else {
if ( elementType.getJdbcType().getDefaultSqlTypeCode() == SqlTypes.BOOLEAN ) {
values.add(
fromRawObject(
elementType,
string.charAt( start ) == 't',
options
)
);
}
else if ( elementType.getJavaTypeDescriptor().getJavaTypeClass().isEnum()
&& elementType.getJdbcType().isInteger() ) {
values.add(
fromRawObject(
elementType,
IntegerJavaType.INSTANCE.fromEncodedString( string, start, i ),
options
)
);
}
else {
values.add(
fromString(
elementType,
string,
start,
i
)
);
}
}
@ -529,7 +1004,7 @@ public abstract class AbstractPostgreSQLStructJdbcType implements AggregateJdbcT
}
}
throw new IllegalArgumentException( "Struct not properly formed: " + string.substring( start ) );
throw new IllegalArgumentException( "Array not properly formed: " + string.substring( start ) );
}
private SelectableMapping getJdbcValueSelectable(int jdbcValueSelectableIndex) {
@ -568,11 +1043,11 @@ public abstract class AbstractPostgreSQLStructJdbcType implements AggregateJdbcT
return embeddableMappingType.getJdbcValueSelectable( jdbcValueSelectableIndex );
}
private static boolean repeatsChar(String string, int start, int times, char c) {
private static boolean repeatsChar(String string, int start, int times, char expectedChar) {
final int end = start + times;
if ( end < string.length() ) {
for ( ; start < end; start++ ) {
if ( string.charAt( start ) != c ) {
if ( string.charAt( start ) != expectedChar ) {
return false;
}
}
@ -581,6 +1056,38 @@ public abstract class AbstractPostgreSQLStructJdbcType implements AggregateJdbcT
return false;
}
private static boolean isDoubleQuote(String string, int start, int escapes) {
if ( escapes == 1 ) {
return string.charAt( start ) == '"';
}
assert ( escapes & 1 ) == 0 : "Only an even number of escapes allowed";
final int end = start + escapes;
if ( end < string.length() ) {
for ( ; start < end; start += 2 ) {
final char c1 = string.charAt( start );
final char c2 = string.charAt( start + 1 );
switch ( c1 ) {
case '\\':
// After a backslash, another backslash or a double quote may follow
if ( c2 != '\\' && c2 != '"' ) {
return false;
}
break;
case '"':
// After a double quote, only another double quote may follow
if ( c2 != '"' ) {
return false;
}
break;
default:
return false;
}
}
return string.charAt( end - 1 ) == '"';
}
return false;
}
private Object fromString(
int selectableIndex,
String string,
@ -664,13 +1171,17 @@ public abstract class AbstractPostgreSQLStructJdbcType implements AggregateJdbcT
public Object[] extractJdbcValues(Object rawJdbcValue, WrapperOptions options) throws SQLException {
assert embeddableMappingType != null;
final Object[] array = new Object[embeddableMappingType.getJdbcValueCount()];
deserializeStruct( (String) rawJdbcValue, 0, 0, array, true, options );
deserializeStruct( getRawStructFromJdbcValue( rawJdbcValue ), 0, 0, array, true, options );
if ( inverseOrderMapping != null ) {
StructHelper.orderJdbcValues( embeddableMappingType, inverseOrderMapping, array.clone(), array );
}
return array;
}
protected String getRawStructFromJdbcValue(Object rawJdbcValue) {
return rawJdbcValue.toString();
}
protected <X> String toString(X value, JavaType<X> javaType, WrapperOptions options) {
if ( value == null ) {
return null;
@ -751,10 +1262,17 @@ public abstract class AbstractPostgreSQLStructJdbcType implements AggregateJdbcT
PostgreSQLAppender appender,
WrapperOptions options,
JdbcMapping jdbcMapping,
Object array) {
Object value) {
serializeConvertedBasicTo( appender, options, jdbcMapping, jdbcMapping.convertToRelationalValue( value ) );
}
private void serializeConvertedBasicTo(
PostgreSQLAppender appender,
WrapperOptions options,
JdbcMapping jdbcMapping,
Object subValue) {
//noinspection unchecked
final JavaType<Object> jdbcJavaType = (JavaType<Object>) jdbcMapping.getJdbcJavaType();
final Object subValue = jdbcMapping.convertToRelationalValue( array );
switch ( jdbcMapping.getJdbcType().getDefaultSqlTypeCode() ) {
case SqlTypes.TINYINT:
case SqlTypes.SMALLINT:
@ -830,11 +1348,9 @@ public abstract class AbstractPostgreSQLStructJdbcType implements AggregateJdbcT
byte[].class,
options
);
final int escapes = 1 << appender.quote;
appender.ensureCanFit( escapes + 1 + ( bytes.length << 1 ) );
for ( int i = 0; i < escapes; i++ ) {
appender.append( '\\' );
}
appender.ensureCanFit( appender.quote + 1 + ( bytes.length << 1 ) );
appender.append( '\\' );
appender.append( '\\' );
appender.append( 'x' );
PrimitiveByteArrayJavaType.INSTANCE.appendString(
appender,
@ -844,6 +1360,51 @@ public abstract class AbstractPostgreSQLStructJdbcType implements AggregateJdbcT
case SqlTypes.UUID:
appender.append( subValue.toString() );
break;
case SqlTypes.ARRAY:
if ( subValue != null ) {
final int length = Array.getLength( subValue );
if ( length == 0 ) {
appender.append( "{}" );
}
else {
//noinspection unchecked
final BasicType<Object> elementType = ((BasicPluralType<?, Object>) jdbcMapping).getElementType();
appender.quoteStart();
appender.append( '{' );
Object arrayElement = Array.get( subValue, 0 );
if ( arrayElement == null ) {
appender.appendNull();
}
else {
serializeConvertedBasicTo( appender, options, elementType, arrayElement );
}
for ( int i = 1; i < length; i++ ) {
arrayElement = Array.get( subValue, i );
appender.append( ',' );
if ( arrayElement == null ) {
appender.appendNull();
}
else {
serializeConvertedBasicTo( appender, options, elementType, arrayElement );
}
}
appender.append( '}' );
appender.quoteEnd();
}
}
break;
case SqlTypes.STRUCT:
if ( subValue != null ) {
final AbstractPostgreSQLStructJdbcType structJdbcType = (AbstractPostgreSQLStructJdbcType) jdbcMapping.getJdbcType();
final EmbeddableMappingType subEmbeddableMappingType = structJdbcType.getEmbeddableMappingType();
final Object[] array = subEmbeddableMappingType.getValues( subValue );
appender.quoteStart();
structJdbcType.serializeValuesTo( appender, options, subEmbeddableMappingType, array, '(' );
appender.append( ')' );
appender.quoteEnd();
}
break;
default:
throw new UnsupportedOperationException( "Unsupported JdbcType nested in struct: " + jdbcMapping.getJdbcType() );
}
@ -1028,6 +1589,10 @@ public abstract class AbstractPostgreSQLStructJdbcType implements AggregateJdbcT
append( '"' );
}
public void appendNull() {
sb.append( "NULL" );
}
@Override
public PostgreSQLAppender append(char fragment) {
if ( quote != 1 ) {
@ -1060,15 +1625,13 @@ public abstract class AbstractPostgreSQLStructJdbcType implements AggregateJdbcT
}
private void appendWithQuote(char fragment) {
if ( fragment == '"' ) {
if ( fragment == '"' || fragment == '\\' ) {
sb.ensureCapacity( sb.length() + quote );
for ( int i = 0; i < quote; i++ ) {
sb.append( '"' );
for ( int i = 1; i < quote; i++ ) {
sb.append( '\\' );
}
}
else {
sb.append( fragment );
}
sb.append( fragment );
}
public void ensureCanFit(int lengthIncrease) {

View File

@ -23,6 +23,7 @@ import org.hibernate.type.descriptor.java.spi.UnknownBasicJavaType;
import org.hibernate.type.descriptor.jdbc.BasicBinder;
import org.hibernate.type.descriptor.jdbc.BasicExtractor;
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
import org.hibernate.type.descriptor.jdbc.StructJdbcType;
import org.hibernate.type.spi.TypeConfiguration;
/**
@ -38,7 +39,7 @@ import org.hibernate.type.spi.TypeConfiguration;
*
* @author Christian Beikov
*/
public class DB2StructJdbcType implements AggregateJdbcType {
public class DB2StructJdbcType implements StructJdbcType {
public static final DB2StructJdbcType INSTANCE = new DB2StructJdbcType();
@ -82,6 +83,7 @@ public class DB2StructJdbcType implements AggregateJdbcType {
return embeddableMappingType;
}
@Override
public String getStructTypeName() {
return structTypeName;
}

View File

@ -8,12 +8,20 @@ package org.hibernate.dialect;
import java.io.OutputStream;
import java.lang.reflect.Array;
import java.sql.SQLException;
import java.time.OffsetDateTime;
import java.time.format.DateTimeFormatter;
import java.util.AbstractCollection;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import java.util.NoSuchElementException;
import java.util.Objects;
import org.hibernate.Internal;
import org.hibernate.internal.util.CharSequenceHelper;
import org.hibernate.internal.util.collections.ArrayHelper;
import org.hibernate.metamodel.mapping.AttributeMapping;
import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.metamodel.mapping.JdbcMapping;
@ -21,6 +29,7 @@ import org.hibernate.metamodel.mapping.MappingType;
import org.hibernate.metamodel.mapping.SelectableMapping;
import org.hibernate.metamodel.mapping.internal.EmbeddedAttributeMapping;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.type.BasicPluralType;
import org.hibernate.type.BasicType;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.WrapperOptions;
@ -111,120 +120,172 @@ public class JsonHelper {
else if ( mappedType instanceof BasicType<?> ) {
//noinspection unchecked
final BasicType<Object> basicType = (BasicType<Object>) mappedType;
//noinspection unchecked
final JavaType<Object> javaType = (JavaType<Object>) basicType.getJdbcJavaType();
value = basicType.convertToRelationalValue( value );
switch ( basicType.getJdbcType().getDefaultSqlTypeCode() ) {
case SqlTypes.TINYINT:
case SqlTypes.SMALLINT:
case SqlTypes.INTEGER:
if ( value instanceof Boolean ) {
// BooleanJavaType has this as an implicit conversion
appender.append( (Boolean) value ? '1' : '0' );
break;
}
if ( value instanceof Enum ) {
appender.appendSql( ((Enum<?>) value).ordinal() );
break;
}
case SqlTypes.BOOLEAN:
case SqlTypes.BIT:
case SqlTypes.BIGINT:
case SqlTypes.FLOAT:
case SqlTypes.REAL:
case SqlTypes.DOUBLE:
// These types fit into the native representation of JSON, so let's use that
javaType.appendEncodedString( appender, value );
break;
case SqlTypes.CHAR:
case SqlTypes.NCHAR:
case SqlTypes.VARCHAR:
case SqlTypes.NVARCHAR:
if ( value instanceof Boolean ) {
// BooleanJavaType has this as an implicit conversion
appender.append( '"' );
appender.append( (Boolean) value ? 'Y' : 'N' );
appender.append( '"' );
break;
}
case SqlTypes.LONGVARCHAR:
case SqlTypes.LONGNVARCHAR:
case SqlTypes.LONG32VARCHAR:
case SqlTypes.LONG32NVARCHAR:
case SqlTypes.ENUM:
case SqlTypes.NAMED_ENUM:
// These literals can contain the '"' character, so we need to escape it
appender.append( '"' );
appender.startEscaping();
javaType.appendEncodedString( appender, value );
appender.endEscaping();
appender.append( '"' );
break;
case SqlTypes.DATE:
appender.append( '"' );
JdbcDateJavaType.INSTANCE.appendEncodedString(
appender,
javaType.unwrap( value, java.sql.Date.class, options )
);
appender.append( '"' );
break;
case SqlTypes.TIME:
case SqlTypes.TIME_WITH_TIMEZONE:
case SqlTypes.TIME_UTC:
appender.append( '"' );
JdbcTimeJavaType.INSTANCE.appendEncodedString(
appender,
javaType.unwrap( value, java.sql.Time.class, options )
);
appender.append( '"' );
break;
case SqlTypes.TIMESTAMP:
appender.append( '"' );
JdbcTimestampJavaType.INSTANCE.appendEncodedString(
appender,
javaType.unwrap( value, java.sql.Timestamp.class, options )
);
appender.append( '"' );
break;
case SqlTypes.TIMESTAMP_WITH_TIMEZONE:
case SqlTypes.TIMESTAMP_UTC:
appender.append( '"' );
DateTimeFormatter.ISO_OFFSET_DATE_TIME.formatTo( javaType.unwrap( value, OffsetDateTime.class, options ), appender );
appender.append( '"' );
break;
case SqlTypes.DECIMAL:
case SqlTypes.NUMERIC:
case SqlTypes.DURATION:
case SqlTypes.UUID:
// These types need to be serialized as JSON string, but don't have a need for escaping
appender.append( '"' );
javaType.appendEncodedString( appender, value );
appender.append( '"' );
break;
case SqlTypes.BINARY:
case SqlTypes.VARBINARY:
case SqlTypes.LONGVARBINARY:
case SqlTypes.LONG32VARBINARY:
// These types need to be serialized as JSON string, and for efficiency uses appendString directly
appender.append( '"' );
appender.write(
javaType.unwrap(
value,
byte[].class,
options
)
);
appender.append( '"' );
break;
default:
throw new UnsupportedOperationException( "Unsupported JdbcType nested in JSON: " + basicType.getJdbcType() );
}
convertedBasicValueToString( basicType.convertToRelationalValue( value ), options, appender, basicType );
}
else {
throw new UnsupportedOperationException( "Support for mapping type not yet implemented: " + mappedType.getClass().getName() );
}
}
private static void convertedValueToString(
MappingType mappedType,
Object value,
WrapperOptions options,
JsonAppender appender) {
if ( value == null ) {
appender.append( "null" );
}
else if ( mappedType instanceof EmbeddableMappingType ) {
toString( (EmbeddableMappingType) mappedType, value, options, appender );
}
else if ( mappedType instanceof BasicType<?> ) {
//noinspection unchecked
final BasicType<Object> basicType = (BasicType<Object>) mappedType;
convertedBasicValueToString( value, options, appender, basicType );
}
else {
throw new UnsupportedOperationException( "Support for mapping type not yet implemented: " + mappedType.getClass().getName() );
}
}
private static void convertedBasicValueToString(
Object value,
WrapperOptions options,
JsonAppender appender,
BasicType<Object> basicType) {
//noinspection unchecked
final JavaType<Object> javaType = (JavaType<Object>) basicType.getJdbcJavaType();
switch ( basicType.getJdbcType().getDefaultSqlTypeCode() ) {
case SqlTypes.TINYINT:
case SqlTypes.SMALLINT:
case SqlTypes.INTEGER:
if ( value instanceof Boolean ) {
// BooleanJavaType has this as an implicit conversion
appender.append( (Boolean) value ? '1' : '0' );
break;
}
if ( value instanceof Enum ) {
appender.appendSql( ((Enum<?>) value ).ordinal() );
break;
}
case SqlTypes.BOOLEAN:
case SqlTypes.BIT:
case SqlTypes.BIGINT:
case SqlTypes.FLOAT:
case SqlTypes.REAL:
case SqlTypes.DOUBLE:
// These types fit into the native representation of JSON, so let's use that
javaType.appendEncodedString( appender, value );
break;
case SqlTypes.CHAR:
case SqlTypes.NCHAR:
case SqlTypes.VARCHAR:
case SqlTypes.NVARCHAR:
if ( value instanceof Boolean ) {
// BooleanJavaType has this as an implicit conversion
appender.append( '"' );
appender.append( (Boolean) value ? 'Y' : 'N' );
appender.append( '"' );
break;
}
case SqlTypes.LONGVARCHAR:
case SqlTypes.LONGNVARCHAR:
case SqlTypes.LONG32VARCHAR:
case SqlTypes.LONG32NVARCHAR:
case SqlTypes.CLOB:
case SqlTypes.MATERIALIZED_CLOB:
case SqlTypes.NCLOB:
case SqlTypes.MATERIALIZED_NCLOB:
case SqlTypes.ENUM:
case SqlTypes.NAMED_ENUM:
// These literals can contain the '"' character, so we need to escape it
appender.append( '"' );
appender.startEscaping();
javaType.appendEncodedString( appender, value );
appender.endEscaping();
appender.append( '"' );
break;
case SqlTypes.DATE:
appender.append( '"' );
JdbcDateJavaType.INSTANCE.appendEncodedString(
appender,
javaType.unwrap( value, java.sql.Date.class, options )
);
appender.append( '"' );
break;
case SqlTypes.TIME:
case SqlTypes.TIME_WITH_TIMEZONE:
case SqlTypes.TIME_UTC:
appender.append( '"' );
JdbcTimeJavaType.INSTANCE.appendEncodedString(
appender,
javaType.unwrap( value, java.sql.Time.class, options )
);
appender.append( '"' );
break;
case SqlTypes.TIMESTAMP:
appender.append( '"' );
JdbcTimestampJavaType.INSTANCE.appendEncodedString(
appender,
javaType.unwrap( value, java.sql.Timestamp.class, options )
);
appender.append( '"' );
break;
case SqlTypes.TIMESTAMP_WITH_TIMEZONE:
case SqlTypes.TIMESTAMP_UTC:
appender.append( '"' );
DateTimeFormatter.ISO_OFFSET_DATE_TIME.formatTo(
javaType.unwrap( value, OffsetDateTime.class, options ),
appender
);
appender.append( '"' );
break;
case SqlTypes.DECIMAL:
case SqlTypes.NUMERIC:
case SqlTypes.DURATION:
case SqlTypes.UUID:
// These types need to be serialized as JSON string, but don't have a need for escaping
appender.append( '"' );
javaType.appendEncodedString( appender, value );
appender.append( '"' );
break;
case SqlTypes.BINARY:
case SqlTypes.VARBINARY:
case SqlTypes.LONGVARBINARY:
case SqlTypes.LONG32VARBINARY:
case SqlTypes.BLOB:
case SqlTypes.MATERIALIZED_BLOB:
// These types need to be serialized as JSON string, and for efficiency uses appendString directly
appender.append( '"' );
appender.write(
javaType.unwrap(
value,
byte[].class,
options
)
);
appender.append( '"' );
break;
case SqlTypes.ARRAY:
final int length = Array.getLength( value );
appender.append( '[' );
if ( length != 0 ) {
final BasicType<Object> elementType = ( (BasicPluralType<?, Object>) basicType ).getElementType();
Object arrayElement = Array.get( value, 0 );
convertedValueToString( elementType, arrayElement, options, appender );
for ( int i = 1; i < length; i++ ) {
arrayElement = Array.get( value, i );
appender.append( ',' );
convertedValueToString( elementType, arrayElement, options, appender );
}
}
appender.append( ']' );
break;
default:
throw new UnsupportedOperationException( "Unsupported JdbcType nested in JSON: " + basicType.getJdbcType() );
}
}
public static <X> X fromString(
EmbeddableMappingType embeddableMappingType,
String string,
@ -401,9 +462,41 @@ public class JsonHelper {
throw syntaxError( string, s, i );
}
break;
// todo: add support for arrays
// case '[':
// case ']':
case '[':
switch ( s ) {
case KEY_QUOTE:
// I guess it's ok to have a '[' in the key..
case VALUE_QUOTE:
// In the value it's fine
break;
case VALUE_START:
final SelectableMapping selectable = embeddableMappingType.getJdbcValueSelectable(
selectableIndex
);
final JdbcMapping jdbcMapping = selectable.getJdbcMapping();
if ( !( jdbcMapping instanceof BasicPluralType<?, ?> ) ) {
throw new IllegalArgumentException(
String.format(
"JSON starts array for a non-plural type at index %d. Selectable [%s] is of type [%s]",
i,
selectable.getSelectableName(),
jdbcMapping.getJdbcType().getClass().getName()
)
);
}
final BasicPluralType<?, ?> pluralType = (BasicPluralType<?, ?>) jdbcMapping;
final BasicType<?> elementType = pluralType.getElementType();
final CustomArrayList arrayList = new CustomArrayList();
i = fromArrayString( string, returnEmbeddable, options, i, arrayList, elementType ) - 1;
assert string.charAt( i ) == ']';
values[selectableIndex] = pluralType.getJdbcJavaType().wrap( arrayList, options );
s = State.VALUE_END;
selectableIndex = -1;
break;
default:
throw syntaxError( string, s, i );
}
break;
case '}':
switch ( s ) {
case KEY_QUOTE:
@ -434,7 +527,7 @@ public class JsonHelper {
string,
i,
values,
embeddableMappingType,
embeddableMappingType.getJdbcValueSelectable( selectableIndex ).getJdbcMapping(),
selectableIndex,
returnEmbeddable,
options
@ -464,11 +557,179 @@ public class JsonHelper {
throw new IllegalArgumentException( "JSON not properly formed: " + string.subSequence( start, end ) );
}
private static int fromArrayString(
String string,
boolean returnEmbeddable,
WrapperOptions options,
int begin,
CustomArrayList arrayList,
BasicType<?> elementType) throws SQLException {
boolean hasEscape = false;
assert string.charAt( begin ) == '[';
int start = begin + 1;
State s = State.VALUE_START;
// The following parsing logic assumes JSON is well-formed,
// but for the sake of the Java compiler's flow analysis
// and hopefully also for a better understanding, contains throws for some syntax errors
for ( int i = start; i < string.length(); i++ ) {
final char c = string.charAt( i );
switch ( c ) {
case '\\':
assert s == State.VALUE_QUOTE;
hasEscape = true;
i++;
break;
case '"':
switch ( s ) {
case VALUE_START:
s = State.VALUE_QUOTE;
start = i + 1;
hasEscape = false;
break;
case VALUE_QUOTE:
s = State.VALUE_END;
arrayList.add(
fromString(
elementType,
string,
start,
i,
hasEscape,
returnEmbeddable,
options
)
);
start = -1;
hasEscape = false;
break;
default:
throw syntaxError( string, s, i );
}
break;
case ',':
switch ( s ) {
case VALUE_QUOTE:
// In the value it's fine
break;
case VALUE_END:
s = State.VALUE_START;
break;
default:
throw syntaxError( string, s, i );
}
break;
case '{':
switch ( s ) {
case VALUE_QUOTE:
// In the value it's fine
break;
// case VALUE_START:
// final SelectableMapping selectable = embeddableMappingType.getJdbcValueSelectable(
// selectableIndex
// );
// if ( !( selectable.getJdbcMapping().getJdbcType() instanceof AggregateJdbcType ) ) {
// throw new IllegalArgumentException(
// String.format(
// "JSON starts sub-object for a non-aggregate type at index %d. Selectable [%s] is of type [%s]",
// i,
// selectable.getSelectableName(),
// selectable.getJdbcMapping().getJdbcType().getClass().getName()
// )
// );
// }
// final AggregateJdbcType aggregateJdbcType = (AggregateJdbcType) selectable.getJdbcMapping().getJdbcType();
// final EmbeddableMappingType subMappingType = aggregateJdbcType.getEmbeddableMappingType();
// // This encoding is only possible if the JDBC type is JSON again
// assert aggregateJdbcType.getJdbcTypeCode() == SqlTypes.JSON
// || aggregateJdbcType.getDefaultSqlTypeCode() == SqlTypes.JSON;
// final Object[] subValues = new Object[subMappingType.getJdbcValueCount()];
// i = fromString( subMappingType, string, i, end, subValues, returnEmbeddable, options ) - 1;
// assert string.charAt( i ) == '}';
// if ( returnEmbeddable ) {
// final Object[] attributeValues = StructHelper.getAttributeValues(
// subMappingType,
// subValues,
// options
// );
// values[selectableIndex] = embeddableMappingType.getRepresentationStrategy()
// .getInstantiator()
// .instantiate(
// () -> attributeValues,
// options.getSessionFactory()
// );
// }
// else {
// values[selectableIndex] = subValues;
// }
// s = State.VALUE_END;
// selectableIndex = -1;
// break;
default:
throw syntaxError( string, s, i );
}
break;
case ']':
switch ( s ) {
case VALUE_QUOTE:
// In the value it's fine
break;
case VALUE_END:
// At this point, we are done
return i + 1;
default:
throw syntaxError( string, s, i );
}
break;
default:
switch ( s ) {
case VALUE_QUOTE:
// In keys and values, all chars are fine
break;
case VALUE_START:
// Skip whitespace
if ( Character.isWhitespace( c ) ) {
break;
}
final int elementIndex = arrayList.size();
arrayList.add( null );
// Here we also allow certain literals
final int endIdx = consumeLiteral(
string,
i,
arrayList.getUnderlyingArray(),
elementType,
elementIndex,
returnEmbeddable,
options
);
if ( endIdx != -1 ) {
i = endIdx;
s = State.VALUE_END;
start = -1;
break;
}
throw syntaxError( string, s, i );
case VALUE_END:
// Only whitespace is allowed here
if ( Character.isWhitespace( c ) ) {
break;
}
default:
throw syntaxError( string, s, i );
}
break;
}
}
throw new IllegalArgumentException( "JSON not properly formed: " + string.subSequence( start, string.length() ) );
}
private static int consumeLiteral(
String string,
int start,
Object[] values,
EmbeddableMappingType embeddableMappingType,
JdbcMapping jdbcMapping,
int selectableIndex,
boolean returnEmbeddable,
WrapperOptions options) throws SQLException {
@ -494,7 +755,7 @@ public class JsonHelper {
start,
start + 1,
values,
embeddableMappingType,
jdbcMapping,
selectableIndex,
returnEmbeddable,
options
@ -506,15 +767,14 @@ public class JsonHelper {
start,
start + 1,
values,
embeddableMappingType,
jdbcMapping,
selectableIndex,
returnEmbeddable,
options
);
}
values[selectableIndex] = fromString(
embeddableMappingType,
selectableIndex,
jdbcMapping,
string,
start,
start + 1,
@ -551,7 +811,7 @@ public class JsonHelper {
start,
i,
values,
embeddableMappingType,
jdbcMapping,
selectableIndex,
returnEmbeddable,
options
@ -563,7 +823,7 @@ public class JsonHelper {
start,
i,
values,
embeddableMappingType,
jdbcMapping,
selectableIndex,
returnEmbeddable,
options
@ -581,8 +841,7 @@ public class JsonHelper {
break;
default:
values[selectableIndex] = fromString(
embeddableMappingType,
selectableIndex,
jdbcMapping,
string,
start,
i,
@ -602,7 +861,7 @@ public class JsonHelper {
int start,
int dotIndex,
Object[] values,
EmbeddableMappingType embeddableMappingType,
JdbcMapping jdbcMapping,
int selectableIndex,
boolean returnEmbeddable,
WrapperOptions options) throws SQLException {
@ -616,7 +875,7 @@ public class JsonHelper {
start,
i,
values,
embeddableMappingType,
jdbcMapping,
selectableIndex,
returnEmbeddable,
options
@ -634,8 +893,7 @@ public class JsonHelper {
break;
default:
values[selectableIndex] = fromString(
embeddableMappingType,
selectableIndex,
jdbcMapping,
string,
start,
i,
@ -653,7 +911,7 @@ public class JsonHelper {
int start,
int eIndex,
Object[] values,
EmbeddableMappingType embeddableMappingType,
JdbcMapping jdbcMapping,
int selectableIndex,
boolean returnEmbeddable,
WrapperOptions options) throws SQLException {
@ -680,8 +938,7 @@ public class JsonHelper {
break;
default:
values[selectableIndex] = fromString(
embeddableMappingType,
selectableIndex,
jdbcMapping,
string,
start,
i,
@ -741,18 +998,6 @@ public class JsonHelper {
return selectableIndex;
}
private static Object fromString(
EmbeddableMappingType embeddableMappingType,
int selectableIndex,
String string,
int start,
int end,
boolean returnEmbeddable,
WrapperOptions options) throws SQLException {
final SelectableMapping selectableMapping = embeddableMappingType.getJdbcValueSelectable( selectableIndex );
return fromString( selectableMapping.getJdbcMapping(), string, start, end, returnEmbeddable, options );
}
private static Object fromString(
JdbcMapping jdbcMapping,
String string,
@ -837,19 +1082,22 @@ public class JsonHelper {
case SqlTypes.TINYINT:
case SqlTypes.SMALLINT:
case SqlTypes.INTEGER:
Class<?> javaTypeClass = jdbcMapping.getJavaTypeDescriptor().getJavaTypeClass();
if ( javaTypeClass == Boolean.class ) {
// BooleanJavaType has this as an implicit conversion
return Integer.parseInt( string, start , end, 10 ) == 1;
}
if ( javaTypeClass.isEnum() ) {
return javaTypeClass.getEnumConstants()[Integer.parseInt( string, start , end, 10 )];
if ( jdbcMapping.getValueConverter() == null ) {
Class<?> javaTypeClass = jdbcMapping.getJavaTypeDescriptor().getJavaTypeClass();
if ( javaTypeClass == Boolean.class ) {
// BooleanJavaType has this as an implicit conversion
return Integer.parseInt( string, start, end, 10 ) == 1;
}
if ( javaTypeClass.isEnum() ) {
return javaTypeClass.getEnumConstants()[Integer.parseInt( string, start, end, 10 )];
}
}
case SqlTypes.CHAR:
case SqlTypes.NCHAR:
case SqlTypes.VARCHAR:
case SqlTypes.NVARCHAR:
if ( jdbcMapping.getJavaTypeDescriptor().getJavaTypeClass() == Boolean.class ) {
if ( jdbcMapping.getValueConverter() == null
&& jdbcMapping.getJavaTypeDescriptor().getJavaTypeClass() == Boolean.class ) {
// BooleanJavaType has this as an implicit conversion
return end == start + 1 && string.charAt( start ) == 'Y';
}
@ -1115,4 +1363,88 @@ public class JsonHelper {
}
private static class CustomArrayList extends AbstractCollection<Object> implements Collection<Object> {
Object[] array = ArrayHelper.EMPTY_OBJECT_ARRAY;
int size;
public void ensureCapacity(int minCapacity) {
int oldCapacity = array.length;
if ( minCapacity > oldCapacity ) {
int newCapacity = oldCapacity + ( oldCapacity >> 1 );
newCapacity = Math.max( Math.max( newCapacity, minCapacity ), 10 );
array = Arrays.copyOf( array, newCapacity );
}
}
public Object[] getUnderlyingArray() {
return array;
}
@Override
public int size() {
return size;
}
@Override
public boolean add(Object o) {
if ( size == array.length ) {
ensureCapacity( size + 1 );
}
array[size++] = o;
return true;
}
@Override
public boolean isEmpty() {
return size == 0;
}
@Override
public boolean contains(Object o) {
for ( int i = 0; i < size; i++ ) {
if ( Objects.equals(o, array[i] ) ) {
return true;
}
}
return false;
}
@Override
public Iterator<Object> iterator() {
return new Iterator<>() {
int index;
@Override
public boolean hasNext() {
return index != size;
}
@Override
public Object next() {
if ( index == size ) {
throw new NoSuchElementException();
}
return array[index++];
}
};
}
@Override
public Object[] toArray() {
return Arrays.copyOf( array, size );
}
@Override
public <T> T[] toArray(T[] a) {
//noinspection unchecked
final T[] r = a.length >= size
? a
: (T[]) java.lang.reflect.Array.newInstance( a.getClass().getComponentType(), size );
for (int i = 0; i < size; i++) {
//noinspection unchecked
r[i] = (T) array[i];
}
return null;
}
}
}

View File

@ -6,7 +6,6 @@
*/
package org.hibernate.dialect;
import java.lang.reflect.Array;
import java.sql.CallableStatement;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
@ -15,12 +14,16 @@ import java.sql.Types;
import java.util.Locale;
import org.hibernate.HibernateException;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.boot.model.relational.Database;
import org.hibernate.boot.model.relational.NamedAuxiliaryDatabaseObject;
import org.hibernate.engine.jdbc.Size;
import org.hibernate.mapping.UserDefinedArrayType;
import org.hibernate.type.BasicType;
import org.hibernate.type.descriptor.ValueBinder;
import org.hibernate.type.descriptor.ValueExtractor;
import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.converter.spi.BasicValueConverter;
import org.hibernate.type.descriptor.converter.spi.JpaAttributeConverter;
import org.hibernate.type.descriptor.java.BasicPluralJavaType;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.jdbc.ArrayJdbcType;
@ -28,13 +31,14 @@ import org.hibernate.type.descriptor.jdbc.BasicBinder;
import org.hibernate.type.descriptor.jdbc.BasicExtractor;
import org.hibernate.type.descriptor.jdbc.JdbcLiteralFormatter;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.SqlTypedJdbcType;
import org.hibernate.type.descriptor.jdbc.StructJdbcType;
import org.hibernate.type.internal.BasicTypeImpl;
import org.hibernate.type.spi.TypeConfiguration;
import oracle.jdbc.OracleConnection;
import static java.sql.Types.ARRAY;
import static java.util.Collections.emptySet;
/**
* Descriptor for {@link Types#ARRAY ARRAY} handling.
@ -42,16 +46,19 @@ import static java.util.Collections.emptySet;
* @author Christian Beikov
* @author Jordan Gigov
*/
public class OracleArrayJdbcType extends ArrayJdbcType {
public class OracleArrayJdbcType extends ArrayJdbcType implements SqlTypedJdbcType {
private final String typeName;
private final String upperTypeName;
public OracleArrayJdbcType(JdbcType elementJdbcType, String typeName) {
super( elementJdbcType );
this.typeName = typeName;
this.upperTypeName = typeName == null ? null : typeName.toUpperCase( Locale.ROOT );
}
public String getTypeName() {
@Override
public String getSqlTypeName() {
return typeName;
}
@ -62,12 +69,12 @@ public class OracleArrayJdbcType extends ArrayJdbcType {
@Override
public <X> ValueBinder<X> getBinder(final JavaType<X> javaTypeDescriptor) {
//noinspection unchecked
final BasicPluralJavaType<X> containerJavaType = (BasicPluralJavaType<X>) javaTypeDescriptor;
return new BasicBinder<>( javaTypeDescriptor, this ) {
private String typeName(WrapperOptions options) {
return ( typeName == null ? getTypeName( options, containerJavaType ) : typeName )
.toUpperCase(Locale.ROOT);
return ( upperTypeName == null
? getTypeName( options, (BasicPluralJavaType<?>) getJavaType(), (ArrayJdbcType) getJdbcType() ).toUpperCase( Locale.ROOT )
: upperTypeName
);
}
@Override
protected void doBindNull(PreparedStatement st, int index, WrapperOptions options) throws SQLException {
@ -81,13 +88,13 @@ public class OracleArrayJdbcType extends ArrayJdbcType {
@Override
protected void doBind(PreparedStatement st, X value, int index, WrapperOptions options) throws SQLException {
st.setArray( index, getArray( value, containerJavaType, options ) );
st.setArray( index, getBindValue( value, options ) );
}
@Override
protected void doBind(CallableStatement st, X value, String name, WrapperOptions options)
throws SQLException {
final java.sql.Array arr = getArray( value, containerJavaType, options );
final java.sql.Array arr = getBindValue( value, options );
try {
st.setObject( name, arr, ARRAY );
}
@ -96,15 +103,10 @@ public class OracleArrayJdbcType extends ArrayJdbcType {
}
}
private java.sql.Array getArray(X value, BasicPluralJavaType<X> containerJavaType, WrapperOptions options)
throws SQLException {
//noinspection unchecked
final Class<Object[]> arrayClass = (Class<Object[]>) Array.newInstance(
getElementJdbcType().getPreferredJavaTypeClass( options ),
0
).getClass();
final Object[] objects = javaTypeDescriptor.unwrap( value, arrayClass, options );
final String arrayTypeName = typeName( options ).toUpperCase(Locale.ROOT);
@Override
public java.sql.Array getBindValue(X value, WrapperOptions options) throws SQLException {
final Object[] objects = OracleArrayJdbcType.this.getArray( this, value, options );
final String arrayTypeName = typeName( options );
final OracleConnection oracleConnection = options.getSession()
.getJdbcCoordinator().getLogicalConnection().getPhysicalConnection()
@ -124,29 +126,80 @@ public class OracleArrayJdbcType extends ArrayJdbcType {
return new BasicExtractor<>( javaTypeDescriptor, this ) {
@Override
protected X doExtract(ResultSet rs, int paramIndex, WrapperOptions options) throws SQLException {
return javaTypeDescriptor.wrap( rs.getArray( paramIndex ), options );
return getArray( this, rs.getArray( paramIndex ), options );
}
@Override
protected X doExtract(CallableStatement statement, int index, WrapperOptions options) throws SQLException {
return javaTypeDescriptor.wrap( statement.getArray( index ), options );
return getArray( this, statement.getArray( index ), options );
}
@Override
protected X doExtract(CallableStatement statement, String name, WrapperOptions options) throws SQLException {
return javaTypeDescriptor.wrap( statement.getArray( name ), options );
return getArray( this, statement.getArray( name ), options );
}
};
}
static String getTypeName(WrapperOptions options, BasicPluralJavaType<?> containerJavaType) {
static String getTypeName(WrapperOptions options, BasicPluralJavaType<?> containerJavaType, ArrayJdbcType arrayJdbcType) {
Dialect dialect = options.getSessionFactory().getJdbcServices().getDialect();
return getTypeName( containerJavaType.getElementJavaType(), dialect );
return getTypeName( containerJavaType.getElementJavaType(), arrayJdbcType.getElementJdbcType(), dialect );
}
static String getTypeName(JavaType<?> elementJavaType, Dialect dialect) {
static String getTypeName(BasicType<?> elementType, Dialect dialect) {
final BasicValueConverter<?, ?> converter = elementType.getValueConverter();
if ( converter != null ) {
final String simpleName;
if ( converter instanceof JpaAttributeConverter<?, ?> ) {
simpleName = ( (JpaAttributeConverter<?, ?>) converter ).getConverterJavaType()
.getJavaTypeClass()
.getSimpleName();
}
else {
simpleName = converter.getClass().getSimpleName();
}
return dialect.getArrayTypeName(
simpleName,
null, // not needed by OracleDialect.getArrayTypeName()
null // not needed by OracleDialect.getArrayTypeName()
);
}
return getTypeName( elementType.getJavaTypeDescriptor(), elementType.getJdbcType(), dialect );
}
static String getTypeName(JavaType<?> elementJavaType, JdbcType elementJdbcType, Dialect dialect) {
final String simpleName;
if ( elementJavaType.getJavaTypeClass().isArray() ) {
simpleName = dialect.getArrayTypeName(
elementJavaType.getJavaTypeClass().getComponentType().getSimpleName(),
null, // not needed by OracleDialect.getArrayTypeName()
null // not needed by OracleDialect.getArrayTypeName()
);
}
else if ( elementJdbcType instanceof StructJdbcType ) {
simpleName = ( (StructJdbcType) elementJdbcType ).getStructTypeName();
}
else {
final Class<?> preferredJavaTypeClass = elementJdbcType.getPreferredJavaTypeClass( null );
if ( preferredJavaTypeClass == elementJavaType.getJavaTypeClass() ) {
simpleName = elementJavaType.getJavaTypeClass().getSimpleName();
}
else {
if ( preferredJavaTypeClass.isArray() ) {
simpleName = elementJavaType.getJavaTypeClass().getSimpleName() + dialect.getArrayTypeName(
preferredJavaTypeClass.getComponentType().getSimpleName(),
null,
null
);
}
else {
simpleName = elementJavaType.getJavaTypeClass().getSimpleName() + preferredJavaTypeClass.getSimpleName();
}
}
}
return dialect.getArrayTypeName(
elementJavaType.getJavaTypeClass().getSimpleName(),
simpleName,
null, // not needed by OracleDialect.getArrayTypeName()
null // not needed by OracleDialect.getArrayTypeName()
);
@ -158,455 +211,54 @@ public class OracleArrayJdbcType extends ArrayJdbcType {
Size columnSize,
Database database,
TypeConfiguration typeConfiguration) {
final JdbcType elementJdbcType = getElementJdbcType();
if ( elementJdbcType instanceof StructJdbcType ) {
// OracleAggregateSupport will take care of contributing the auxiliary database object
return;
}
final Dialect dialect = database.getDialect();
final BasicPluralJavaType<?> pluralJavaType = (BasicPluralJavaType<?>) javaType;
final JavaType<?> elementJavaType = pluralJavaType.getElementJavaType();
final String arrayTypeName = typeName == null ? getTypeName( elementJavaType, dialect ) : typeName;
final String elementType =
typeConfiguration.getDdlTypeRegistry().getTypeName(
getElementJdbcType().getDdlTypeCode(),
dialect.getSizeStrategy().resolveSize(
getElementJdbcType(),
elementJavaType,
columnSize.getPrecision(),
columnSize.getScale(),
columnSize.getLength()
),
new BasicTypeImpl<>( elementJavaType, getElementJdbcType() )
);
int arrayLength = columnSize.getArrayLength() == null ? 127 : columnSize.getArrayLength();
database.addAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
arrayTypeName,
database.getDefaultNamespace(),
new String[]{
"create or replace type " + arrayTypeName
+ " as varying array(" + arrayLength + ") of " + elementType
},
new String[] { "drop type " + arrayTypeName + " force" },
emptySet(),
true
)
final String arrayTypeName = typeName == null ? getTypeName( elementJavaType, elementJdbcType, dialect ) : typeName;
final String elementType = typeConfiguration.getDdlTypeRegistry().getTypeName(
elementJdbcType.getDdlTypeCode(),
dialect.getSizeStrategy().resolveSize(
elementJdbcType,
elementJavaType,
columnSize.getPrecision(),
columnSize.getScale(),
columnSize.getLength()
),
new BasicTypeImpl<>( elementJavaType, elementJdbcType )
);
database.addAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
arrayTypeName + "_cmp",
database.getDefaultNamespace(),
new String[]{
"create or replace function " + arrayTypeName + "_cmp(a in " + arrayTypeName +
", b in " + arrayTypeName + ") return number deterministic is begin " +
"if a is null or b is null then return null; end if; " +
"for i in 1 .. least(a.count,b.count) loop " +
"if a(i) is null or b(i) is null then return null;" +
"elsif a(i)>b(i) then return 1;" +
"elsif a(i)<b(i) then return -1; " +
"end if; " +
"end loop; " +
"if a.count=b.count then return 0; elsif a.count>b.count then return 1; else return -1; end if; " +
"end;"
},
new String[] { "drop function " + arrayTypeName + "_cmp" },
emptySet(),
false
)
);
database.addAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
arrayTypeName + "_distinct",
database.getDefaultNamespace(),
new String[]{
"create or replace function " + arrayTypeName + "_distinct(a in " + arrayTypeName +
", b in " + arrayTypeName + ") return number deterministic is begin " +
"if a is null and b is null then return 0; end if; " +
"if a is null or b is null or a.count <> b.count then return 1; end if; " +
"for i in 1 .. a.count loop " +
"if (a(i) is null)<>(b(i) is null) or a(i)<>b(i) then return 1; end if; " +
"end loop; " +
"return 0; " +
"end;"
},
new String[] { "drop function " + arrayTypeName + "_distinct" },
emptySet(),
false
)
);
database.addAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
arrayTypeName + "_position",
database.getDefaultNamespace(),
new String[]{
"create or replace function " + arrayTypeName + "_position(arr in " + arrayTypeName +
", elem in " + getRawTypeName( elementType ) + ", startPos in number default 1) return number deterministic is begin " +
"if arr is null then return null; end if; " +
"if elem is null then " +
"for i in startPos .. arr.count loop " +
"if arr(i) is null then return i; end if; " +
"end loop; " +
"else " +
"for i in startPos .. arr.count loop " +
"if arr(i)=elem then return i; end if; " +
"end loop; " +
"end if; " +
"return 0; " +
"end;"
},
new String[] { "drop function " + arrayTypeName + "_position" },
emptySet(),
false
)
);
database.addAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
arrayTypeName + "_length",
database.getDefaultNamespace(),
new String[]{
"create or replace function " + arrayTypeName + "_length(arr in " + arrayTypeName +
") return number deterministic is begin " +
"if arr is null then return null; end if; " +
"return arr.count; " +
"end;"
},
new String[] { "drop function " + arrayTypeName + "_length" },
emptySet(),
false
)
);
database.addAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
arrayTypeName + "_concat",
database.getDefaultNamespace(),
new String[]{ createOrReplaceConcatFunction( arrayTypeName ) },
new String[] { "drop function " + arrayTypeName + "_concat" },
emptySet(),
false
)
);
database.addAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
arrayTypeName + "_contains",
database.getDefaultNamespace(),
new String[]{
"create or replace function " + arrayTypeName + "_contains(haystack in " + arrayTypeName +
", needle in " + arrayTypeName + ", nullable in number) return number deterministic is found number(1,0); begin " +
"if haystack is null or needle is null then return null; end if; " +
"for i in 1 .. needle.count loop " +
"found := 0; " +
"for j in 1 .. haystack.count loop " +
"if nullable = 1 and needle(i) is null and haystack(j) is null or needle(i)=haystack(j) then found := 1; exit; end if; " +
"end loop; " +
"if found = 0 then return 0; end if;" +
"end loop; " +
"return 1; " +
"end;"
},
new String[] { "drop function " + arrayTypeName + "_contains" },
emptySet(),
false
)
);
database.addAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
arrayTypeName + "_overlaps",
database.getDefaultNamespace(),
new String[]{
"create or replace function " + arrayTypeName + "_overlaps(haystack in " + arrayTypeName +
", needle in " + arrayTypeName + ", nullable in number) return number deterministic is begin " +
"if haystack is null or needle is null then return null; end if; " +
"if needle.count = 0 then return 1; end if; " +
"for i in 1 .. needle.count loop " +
"for j in 1 .. haystack.count loop " +
"if nullable = 1 and needle(i) is null and haystack(j) is null or needle(i)=haystack(j) then return 1; end if; " +
"end loop; " +
"end loop; " +
"return 0; " +
"end;"
},
new String[] { "drop function " + arrayTypeName + "_overlaps" },
emptySet(),
false
)
);
database.addAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
arrayTypeName + "_get",
database.getDefaultNamespace(),
new String[]{
"create or replace function " + arrayTypeName + "_get(arr in " + arrayTypeName +
", idx in number) return " + getRawTypeName( elementType ) + " deterministic is begin " +
"if arr is null or idx is null or arr.count < idx then return null; end if; " +
"return arr(idx); " +
"end;"
},
new String[] { "drop function " + arrayTypeName + "_get" },
emptySet(),
false
)
);
database.addAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
arrayTypeName + "_set",
database.getDefaultNamespace(),
new String[]{
"create or replace function " + arrayTypeName + "_set(arr in " + arrayTypeName +
", idx in number, elem in " + getRawTypeName( elementType ) + ") return " + arrayTypeName + " deterministic is " +
"res " + arrayTypeName + ":=" + arrayTypeName + "(); begin " +
"if arr is not null then " +
"for i in 1 .. arr.count loop " +
"res.extend; " +
"res(i) := arr(i); " +
"end loop; " +
"for i in arr.count+1 .. idx loop " +
"res.extend; " +
"end loop; " +
"else " +
"for i in 1 .. idx loop " +
"res.extend; " +
"end loop; " +
"end if; " +
"res(idx) := elem; " +
"return res; " +
"end;"
},
new String[] { "drop function " + arrayTypeName + "_set" },
emptySet(),
false
)
);
database.addAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
arrayTypeName + "_remove",
database.getDefaultNamespace(),
new String[]{
"create or replace function " + arrayTypeName + "_remove(arr in " + arrayTypeName +
", elem in " + getRawTypeName( elementType ) + ") return " + arrayTypeName + " deterministic is " +
"res " + arrayTypeName + ":=" + arrayTypeName + "(); begin " +
"if arr is null then return null; end if; " +
"if elem is null then " +
"for i in 1 .. arr.count loop " +
"if arr(i) is not null then res.extend; res(res.last) := arr(i); end if; " +
"end loop; " +
"else " +
"for i in 1 .. arr.count loop " +
"if arr(i) is null or arr(i)<>elem then res.extend; res(res.last) := arr(i); end if; " +
"end loop; " +
"end if; " +
"return res; " +
"end;"
},
new String[] { "drop function " + arrayTypeName + "_remove" },
emptySet(),
false
)
);
database.addAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
arrayTypeName + "_remove_index",
database.getDefaultNamespace(),
new String[]{
"create or replace function " + arrayTypeName + "_remove_index(arr in " + arrayTypeName +
", idx in number) return " + arrayTypeName + " deterministic is " +
"res " + arrayTypeName + ":=" + arrayTypeName + "(); begin " +
"if arr is null or idx is null then return arr; end if; " +
"for i in 1 .. arr.count loop " +
"if i<>idx then res.extend; res(res.last) := arr(i); end if; " +
"end loop; " +
"return res; " +
"end;"
},
new String[] { "drop function " + arrayTypeName + "_remove_index" },
emptySet(),
false
)
);
database.addAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
arrayTypeName + "_slice",
database.getDefaultNamespace(),
new String[]{
"create or replace function " + arrayTypeName + "_slice(arr in " + arrayTypeName +
", startIdx in number, endIdx in number) return " + arrayTypeName + " deterministic is " +
"res " + arrayTypeName + ":=" + arrayTypeName + "(); begin " +
"if arr is null or startIdx is null or endIdx is null then return null; end if; " +
"for i in startIdx .. least(arr.count,endIdx) loop " +
"res.extend; res(res.last) := arr(i); " +
"end loop; " +
"return res; " +
"end;"
},
new String[] { "drop function " + arrayTypeName + "_slice" },
emptySet(),
false
)
);
database.addAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
arrayTypeName + "_replace",
database.getDefaultNamespace(),
new String[]{
"create or replace function " + arrayTypeName + "_replace(arr in " + arrayTypeName +
", old in " + getRawTypeName( elementType ) + ", elem in " + getRawTypeName( elementType ) + ") return " + arrayTypeName + " deterministic is " +
"res " + arrayTypeName + ":=" + arrayTypeName + "(); begin " +
"if arr is null then return null; end if; " +
"if old is null then " +
"for i in 1 .. arr.count loop " +
"res.extend; " +
"res(res.last) := coalesce(arr(i),elem); " +
"end loop; " +
"else " +
"for i in 1 .. arr.count loop " +
"res.extend; " +
"if arr(i) = old then " +
"res(res.last) := elem; " +
"else " +
"res(res.last) := arr(i); " +
"end if; " +
"end loop; " +
"end if; " +
"return res; " +
"end;"
},
new String[] { "drop function " + arrayTypeName + "_replace" },
emptySet(),
false
)
);
database.addAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
arrayTypeName + "_trim",
database.getDefaultNamespace(),
new String[]{
"create or replace function " + arrayTypeName + "_trim(arr in " + arrayTypeName +
", elems number) return " + arrayTypeName + " deterministic is " +
"res " + arrayTypeName + ":=" + arrayTypeName + "(); begin " +
"if arr is null or elems is null then return null; end if; " +
"if arr.count < elems then raise_application_error (-20000, 'number of elements to trim must be between 0 and '||arr.count); end if;" +
"for i in 1 .. arr.count-elems loop " +
"res.extend; " +
"res(i) := arr(i); " +
"end loop; " +
"return res; " +
"end;"
},
new String[] { "drop function " + arrayTypeName + "_trim" },
emptySet(),
false
)
);
database.addAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
arrayTypeName + "_fill",
database.getDefaultNamespace(),
new String[]{
"create or replace function " + arrayTypeName + "_fill(elem in " + getRawTypeName( elementType ) +
", elems number) return " + arrayTypeName + " deterministic is " +
"res " + arrayTypeName + ":=" + arrayTypeName + "(); begin " +
"if elems is null then return null; end if; " +
"if elems<0 then raise_application_error (-20000, 'number of elements must be greater than or equal to 0'); end if;" +
"for i in 1 .. elems loop " +
"res.extend; " +
"res(i) := elem; " +
"end loop; " +
"return res; " +
"end;"
},
new String[] { "drop function " + arrayTypeName + "_fill" },
emptySet(),
false
)
);
database.addAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
arrayTypeName + "_positions",
database.getDefaultNamespace(),
new String[]{
"create or replace function " + arrayTypeName + "_positions(arr in " + arrayTypeName +
", elem in " + getRawTypeName( elementType ) + ") return sdo_ordinate_array deterministic is " +
"res sdo_ordinate_array:=sdo_ordinate_array(); begin " +
"if arr is null then return null; end if; " +
"if elem is null then " +
"for i in 1 .. arr.count loop " +
"if arr(i) is null then res.extend; res(res.last):=i; end if; " +
"end loop; " +
"else " +
"for i in 1 .. arr.count loop " +
"if arr(i)=elem then res.extend; res(res.last):=i; end if; " +
"end loop; " +
"end if; " +
"return res; " +
"end;"
},
new String[] { "drop function " + arrayTypeName + "_positions" },
emptySet(),
false
)
);
database.addAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
arrayTypeName + "_to_string",
database.getDefaultNamespace(),
new String[]{
"create or replace function " + arrayTypeName + "_to_string(arr in " + arrayTypeName +
", sep in varchar2) return varchar2 deterministic is " +
"res varchar2(4000):=''; begin " +
"if arr is null or sep is null then return null; end if; " +
"for i in 1 .. arr.count loop " +
"if arr(i) is not null then " +
"if length(res)<>0 then res:=res||sep; end if; " +
"res:=res||arr(i); " +
"end if; " +
"end loop; " +
"return res; " +
"end;"
},
new String[] { "drop function " + arrayTypeName + "_to_string" },
emptySet(),
false
)
final UserDefinedArrayType userDefinedArrayType = database.getDefaultNamespace().createUserDefinedArrayType(
Identifier.toIdentifier( arrayTypeName ),
name -> new UserDefinedArrayType( "orm", database.getDefaultNamespace(), name )
);
userDefinedArrayType.setArraySqlTypeCode( getDdlTypeCode() );
userDefinedArrayType.setElementTypeName( elementType );
userDefinedArrayType.setElementSqlTypeCode( elementJdbcType.getDefaultSqlTypeCode() );
userDefinedArrayType.setArrayLength( columnSize.getArrayLength() == null ? 127 : columnSize.getArrayLength() );
}
protected String createOrReplaceConcatFunction(String arrayTypeName) {
// Since Oracle has no builtin concat function for varrays and doesn't support varargs,
// we have to create a function with a fixed amount of arguments with default that fits "most" cases.
// Let's just use 5 for the time being until someone requests more.
return createOrReplaceConcatFunction( arrayTypeName, 5 );
@Override
public void registerOutParameter(CallableStatement callableStatement, String name) throws SQLException {
callableStatement.registerOutParameter( name, ARRAY, upperTypeName );
}
protected String createOrReplaceConcatFunction(String arrayTypeName, int maxConcatParams) {
final StringBuilder sb = new StringBuilder();
sb.append( "create or replace function " ).append( arrayTypeName ).append( "_concat(" );
sb.append( "arr0 in " ).append( arrayTypeName ).append( ",arr1 in " ).append( arrayTypeName );
for ( int i = 2; i < maxConcatParams; i++ ) {
sb.append( ",arr" ).append( i ).append( " in " ).append( arrayTypeName )
.append( " default " ).append( arrayTypeName ).append( "()" );
}
sb.append( ") return " ).append( arrayTypeName ).append( " deterministic is res " ).append( arrayTypeName )
.append( "; begin if " );
String separator = "";
for ( int i = 0; i < maxConcatParams; i++ ) {
sb.append( separator ).append( "arr" ).append( i ).append( " is null" );
separator = " or ";
}
sb.append( " then return null; end if; " );
sb.append( "select * bulk collect into res from (" );
separator = "";
for ( int i = 0; i < maxConcatParams; i++ ) {
sb.append( separator ).append( "select * from table(arr" ).append( i ).append( ')' );
separator = " union all ";
}
return sb.append( "); return res; end;" ).toString();
@Override
public void registerOutParameter(CallableStatement callableStatement, int index) throws SQLException {
callableStatement.registerOutParameter( index, ARRAY, upperTypeName );
}
private static String getRawTypeName(String typeName) {
//trim off the length/precision/scale
final int paren = typeName.indexOf( '(' );
if ( paren > 0 ) {
final int parenEnd = typeName.lastIndexOf( ')' );
return parenEnd + 1 == typeName.length()
? typeName.substring( 0, paren )
: typeName.substring( 0, paren ) + typeName.substring( parenEnd + 1 );
}
return typeName;
@Override
public String getExtraCreateTableInfo(JavaType<?> javaType, String columnName, String tableName, Database database) {
return getElementJdbcType().getExtraCreateTableInfo(
( (BasicPluralJavaType<?>) javaType ).getElementJavaType(),
columnName,
tableName,
database
);
}
@Override

View File

@ -25,11 +25,12 @@ public class OracleArrayJdbcTypeConstructor implements JdbcTypeConstructor {
@Override
public JdbcType resolveType(
TypeConfiguration typeConfiguration,
Dialect dialect, BasicType<?> elementType,
Dialect dialect,
BasicType<?> elementType,
ColumnTypeInformation columnTypeInformation) {
String typeName = columnTypeInformation == null ? null : columnTypeInformation.getTypeName();
if ( typeName == null || typeName.isBlank() ) {
typeName = OracleArrayJdbcType.getTypeName( elementType.getJavaTypeDescriptor(), dialect );
typeName = OracleArrayJdbcType.getTypeName( elementType, dialect );
}
// if ( typeName == null ) {
// // Fallback to XML type for the representation of arrays as the native JSON type was only introduced in 21
@ -58,7 +59,7 @@ public class OracleArrayJdbcTypeConstructor implements JdbcTypeConstructor {
precision,
scale,
typeConfiguration
), dialect );
), elementType, dialect );
}
return new OracleArrayJdbcType( elementType, typeName );
}

View File

@ -0,0 +1,73 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.dialect;
import java.util.Locale;
import org.hibernate.HibernateException;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.boot.model.relational.Database;
import org.hibernate.mapping.Column;
import org.hibernate.mapping.UserDefinedObjectType;
import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.metamodel.mapping.SelectableMapping;
import org.hibernate.metamodel.spi.RuntimeModelCreationContext;
import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
import oracle.sql.TIMESTAMPTZ;
/**
* @author Christian Beikov
*/
public class OracleBaseStructJdbcType extends StructJdbcType {
public OracleBaseStructJdbcType() {
// The default instance is for reading only and will return an Object[]
this( null, null, null );
}
protected OracleBaseStructJdbcType(EmbeddableMappingType embeddableMappingType, String typeName, int[] orderMapping) {
super(
embeddableMappingType,
typeName == null ? null : typeName.toUpperCase( Locale.ROOT ),
orderMapping
);
}
@Override
public String getExtraCreateTableInfo(
JavaType<?> javaType,
String columnName,
String tableName,
Database database) {
final UserDefinedObjectType udt = database.getDefaultNamespace()
.locateUserDefinedType( Identifier.toIdentifier( getSqlTypeName() ) );
StringBuilder sb = null;
for ( Column column : udt.getColumns() ) {
final JdbcMapping jdbcMapping = (JdbcMapping) column.getValue().getType();
final String extraCreateTableInfo = jdbcMapping.getJdbcType().getExtraCreateTableInfo(
jdbcMapping.getJavaTypeDescriptor(),
columnName + "." + column.getName(),
tableName,
database
);
if ( !extraCreateTableInfo.isEmpty() ) {
if ( sb == null ) {
sb = new StringBuilder();
}
else {
sb.append( ',' );
}
sb.append( extraCreateTableInfo );
}
}
return sb != null ? sb.toString() : "";
}
}

View File

@ -49,6 +49,7 @@ import org.hibernate.exception.spi.TemplatedViolatedConstraintNameExtractor;
import org.hibernate.exception.spi.ViolatedConstraintNameExtractor;
import org.hibernate.internal.util.JdbcExceptionHelper;
import org.hibernate.internal.util.config.ConfigurationHelper;
import org.hibernate.mapping.UserDefinedType;
import org.hibernate.metamodel.mapping.EntityMappingType;
import org.hibernate.metamodel.spi.RuntimeModelCreationContext;
import org.hibernate.persister.entity.mutation.EntityMutationTarget;
@ -78,16 +79,20 @@ import org.hibernate.sql.model.internal.OptionalTableUpdate;
import org.hibernate.tool.schema.extract.internal.SequenceInformationExtractorOracleDatabaseImpl;
import org.hibernate.tool.schema.extract.spi.ColumnTypeInformation;
import org.hibernate.tool.schema.extract.spi.SequenceInformationExtractor;
import org.hibernate.tool.schema.spi.Exporter;
import org.hibernate.type.JavaObjectType;
import org.hibernate.type.NullType;
import org.hibernate.type.StandardBasicTypes;
import org.hibernate.type.descriptor.java.PrimitiveByteArrayJavaType;
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
import org.hibernate.type.descriptor.jdbc.ArrayJdbcType;
import org.hibernate.type.descriptor.jdbc.BlobJdbcType;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.NullJdbcType;
import org.hibernate.type.descriptor.jdbc.ObjectJdbcType;
import org.hibernate.type.descriptor.jdbc.ObjectNullAsNullTypeJdbcType;
import org.hibernate.type.descriptor.jdbc.OracleJsonBlobJdbcType;
import org.hibernate.type.descriptor.jdbc.SqlTypedJdbcType;
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
import org.hibernate.type.descriptor.sql.internal.ArrayDdlTypeImpl;
import org.hibernate.type.descriptor.sql.internal.DdlTypeImpl;
@ -171,6 +176,7 @@ public class OracleDialect extends Dialect {
private static final DatabaseVersion MINIMUM_VERSION = DatabaseVersion.make( 19 );
private final OracleUserDefinedTypeExporter userDefinedTypeExporter = new OracleUserDefinedTypeExporter( this );
private final UniqueDelegate uniqueDelegate = new CreateTableUniqueDelegate(this);
// Is it an Autonomous Database Cloud Service?
@ -800,12 +806,12 @@ public class OracleDialect extends Dialect {
jdbcTypeCode = GEOMETRY;
}
else {
final AggregateJdbcType aggregateDescriptor = jdbcTypeRegistry.findAggregateDescriptor(
final SqlTypedJdbcType descriptor = jdbcTypeRegistry.findSqlTypedDescriptor(
// Skip the schema
columnTypeName.substring( columnTypeName.indexOf( '.' ) + 1 )
);
if ( aggregateDescriptor != null ) {
return aggregateDescriptor;
if ( descriptor != null ) {
return descriptor;
}
}
break;
@ -817,6 +823,15 @@ public class OracleDialect extends Dialect {
ColumnTypeInformation.EMPTY
);
}
else {
final SqlTypedJdbcType descriptor = jdbcTypeRegistry.findSqlTypedDescriptor(
// Skip the schema
columnTypeName.substring( columnTypeName.indexOf( '.' ) + 1 )
);
if ( descriptor != null ) {
return descriptor;
}
}
break;
case NUMERIC:
if ( precision > 8 // precision of 0 means something funny
@ -871,7 +886,7 @@ public class OracleDialect extends Dialect {
@Override
public String getArrayTypeName(String javaElementTypeName, String elementTypeName, Integer maxLength) {
return javaElementTypeName + "Array";
return ( javaElementTypeName == null ? elementTypeName : javaElementTypeName ) + "Array";
}
@Override
@ -880,6 +895,11 @@ public class OracleDialect extends Dialect {
return ARRAY;
}
@Override
public Exporter<UserDefinedType> getUserDefinedTypeExporter() {
return userDefinedTypeExporter;
}
@Override
public void contributeTypes(TypeContributions typeContributions, ServiceRegistry serviceRegistry) {
super.contributeTypes( typeContributions, serviceRegistry );
@ -907,6 +927,8 @@ public class OracleDialect extends Dialect {
}
if ( OracleJdbcHelper.isUsable( serviceRegistry ) ) {
// Register a JdbcType to allow reading from native queries
typeContributions.contributeJdbcType( new ArrayJdbcType( ObjectJdbcType.INSTANCE ) );
typeContributions.contributeJdbcTypeConstructor( getArrayJdbcTypeConstructor( serviceRegistry ) );
typeContributions.contributeJdbcTypeConstructor( getNestedTableJdbcTypeConstructor( serviceRegistry ) );
}

View File

@ -6,55 +6,21 @@
*/
package org.hibernate.dialect;
import oracle.jdbc.OracleConnection;
import org.hibernate.HibernateException;
import org.hibernate.boot.model.relational.Database;
import org.hibernate.boot.model.relational.NamedAuxiliaryDatabaseObject;
import org.hibernate.engine.jdbc.Size;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.ValueBinder;
import org.hibernate.type.descriptor.ValueExtractor;
import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.java.BasicPluralJavaType;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.jdbc.BasicBinder;
import org.hibernate.type.descriptor.jdbc.BasicExtractor;
import org.hibernate.type.descriptor.jdbc.JdbcLiteralFormatter;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.internal.BasicTypeImpl;
import org.hibernate.type.spi.TypeConfiguration;
import java.lang.reflect.Array;
import java.sql.CallableStatement;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Types;
import java.util.Locale;
import static java.sql.Types.ARRAY;
import static java.util.Collections.emptySet;
import static org.hibernate.internal.util.StringHelper.truncate;
/**
* Descriptor for {@link Types#ARRAY ARRAY} handling.
*
* @author Christian Beikov
* @author Jordan Gigov
* Descriptor for {@link SqlTypes#TABLE TABLE} handling.
*/
public class OracleNestedTableJdbcType implements JdbcType {
private final JdbcType elementJdbcType;
private final String typeName;
public class OracleNestedTableJdbcType extends OracleArrayJdbcType {
public OracleNestedTableJdbcType(JdbcType elementJdbcType, String typeName) {
this.elementJdbcType = elementJdbcType;
this.typeName = typeName;
}
@Override
public int getJdbcTypeCode() {
return Types.ARRAY;
super( elementJdbcType, typeName );
}
@Override
@ -62,183 +28,20 @@ public class OracleNestedTableJdbcType implements JdbcType {
return SqlTypes.TABLE;
}
public JdbcType getElementJdbcType() {
return elementJdbcType;
}
@Override
public <T> JavaType<T> getJdbcRecommendedJavaTypeMapping(
Integer precision,
Integer scale,
TypeConfiguration typeConfiguration) {
final JavaType<Object> elementJavaType = elementJdbcType.getJdbcRecommendedJavaTypeMapping(
precision,
scale,
typeConfiguration
);
return typeConfiguration.getJavaTypeRegistry().resolveDescriptor(
Array.newInstance( elementJavaType.getJavaTypeClass(), 0 ).getClass()
);
}
@Override
public <T> JdbcLiteralFormatter<T> getJdbcLiteralFormatter(JavaType<T> javaTypeDescriptor) {
return null;
}
@Override
public Class<?> getPreferredJavaTypeClass(WrapperOptions options) {
return java.sql.Array.class;
}
@Override
public <X> ValueBinder<X> getBinder(final JavaType<X> javaTypeDescriptor) {
//noinspection unchecked
final BasicPluralJavaType<X> containerJavaType = (BasicPluralJavaType<X>) javaTypeDescriptor;
return new BasicBinder<>( javaTypeDescriptor, this ) {
private String typeName(WrapperOptions options) {
return ( typeName == null ? getTypeName( options, containerJavaType ) : typeName )
.toUpperCase(Locale.ROOT);
}
@Override
protected void doBindNull(PreparedStatement st, int index, WrapperOptions options) throws SQLException {
st.setNull( index, ARRAY, typeName( options ) );
}
@Override
protected void doBindNull(CallableStatement st, String name, WrapperOptions options) throws SQLException {
st.setNull( name, ARRAY, typeName( options ) );
}
@Override
protected void doBind(PreparedStatement st, X value, int index, WrapperOptions options) throws SQLException {
st.setArray( index, getArray( value, options ) );
}
@Override
protected void doBind(CallableStatement st, X value, String name, WrapperOptions options)
throws SQLException {
final java.sql.Array arr = getArray( value, options );
try {
st.setObject( name, arr, ARRAY );
}
catch (SQLException ex) {
throw new HibernateException( "JDBC driver does not support named parameters for setArray. Use positional.", ex );
}
}
private java.sql.Array getArray(X value, WrapperOptions options)
throws SQLException {
//noinspection unchecked
final Class<Object[]> arrayClass = (Class<Object[]>) Array.newInstance(
getElementJdbcType().getPreferredJavaTypeClass( options ),
0
).getClass();
final Object[] objects = javaTypeDescriptor.unwrap( value, arrayClass, options );
final String arrayTypeName = typeName( options ).toUpperCase(Locale.ROOT);
final OracleConnection oracleConnection = options.getSession()
.getJdbcCoordinator().getLogicalConnection().getPhysicalConnection()
.unwrap( OracleConnection.class );
try {
return oracleConnection.createOracleArray( arrayTypeName, objects );
}
catch (Exception e) {
throw new HibernateException( "Couldn't create a java.sql.Array", e );
}
}
};
}
@Override
public <X> ValueExtractor<X> getExtractor(final JavaType<X> javaTypeDescriptor) {
return new BasicExtractor<>( javaTypeDescriptor, this ) {
@Override
protected X doExtract(ResultSet rs, int paramIndex, WrapperOptions options) throws SQLException {
return javaTypeDescriptor.wrap( rs.getArray( paramIndex ), options );
}
@Override
protected X doExtract(CallableStatement statement, int index, WrapperOptions options) throws SQLException {
return javaTypeDescriptor.wrap( statement.getArray( index ), options );
}
@Override
protected X doExtract(CallableStatement statement, String name, WrapperOptions options) throws SQLException {
return javaTypeDescriptor.wrap( statement.getArray( name ), options );
}
};
}
static String getTypeName(WrapperOptions options, BasicPluralJavaType<?> containerJavaType) {
Dialect dialect = options.getSessionFactory().getJdbcServices().getDialect();
return getTypeName( containerJavaType.getElementJavaType(), dialect );
}
static String getTypeName(JavaType<?> elementJavaType, Dialect dialect) {
return dialect.getArrayTypeName(
elementJavaType.getJavaTypeClass().getSimpleName(),
null, // not needed by OracleDialect.getArrayTypeName(),
null // not needed by OracleDialect.getArrayTypeName()
);
}
@Override
public void addAuxiliaryDatabaseObjects(
JavaType<?> javaType,
Size columnSize,
Database database,
TypeConfiguration typeConfiguration) {
final Dialect dialect = database.getDialect();
final BasicPluralJavaType<?> pluralJavaType = (BasicPluralJavaType<?>) javaType;
final JavaType<?> elementJavaType = pluralJavaType.getElementJavaType();
final String arrayTypeName = typeName==null ? getTypeName( elementJavaType, dialect ) : typeName;
final String elementType =
typeConfiguration.getDdlTypeRegistry().getTypeName(
getElementJdbcType().getDdlTypeCode(),
dialect.getSizeStrategy().resolveSize(
getElementJdbcType(),
elementJavaType,
columnSize.getPrecision(),
columnSize.getScale(),
columnSize.getLength()
),
new BasicTypeImpl<>( elementJavaType, getElementJdbcType() )
);
database.addAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
arrayTypeName,
database.getDefaultNamespace(),
new String[]{
"create or replace type " + arrayTypeName
+ " as table of " + elementType
},
new String[] { "drop type " + arrayTypeName + " force" },
emptySet(),
true
)
);
}
@Override
public String getExtraCreateTableInfo(JavaType<?> javaType, String columnName, String tableName, Database database) {
final Dialect dialect = database.getDialect();
final BasicPluralJavaType<?> pluralJavaType = (BasicPluralJavaType<?>) javaType;
String elementTypeName = getTypeName( pluralJavaType.getElementJavaType(), dialect );
String elementTypeName = getTypeName( pluralJavaType.getElementJavaType(), getElementJdbcType(), dialect );
return " nested table " + columnName + " store as \"" + truncate(
tableName + " " + columnName + " " + elementTypeName,
dialect.getMaxIdentifierLength()
) + "\"";
}
@Override
public String getFriendlyName() {
return typeName;
}
@Override
public String toString() {
return "OracleArrayTypeDescriptor(" + typeName + ")";
return "OracleNestedTableTypeDescriptor(" + getSqlTypeName() + ")";
}
}

View File

@ -24,11 +24,12 @@ public class OracleNestedTableJdbcTypeConstructor implements JdbcTypeConstructor
@Override
public JdbcType resolveType(
TypeConfiguration typeConfiguration,
Dialect dialect, BasicType<?> elementType,
Dialect dialect,
BasicType<?> elementType,
ColumnTypeInformation columnTypeInformation) {
String typeName = columnTypeInformation == null ? null : columnTypeInformation.getTypeName();
if ( typeName == null || typeName.isBlank() ) {
typeName = OracleArrayJdbcType.getTypeName( elementType.getJavaTypeDescriptor(), dialect );
typeName = OracleArrayJdbcType.getTypeName( elementType, dialect );
}
return new OracleNestedTableJdbcType( elementType.getJdbcType(), typeName );
}
@ -52,7 +53,7 @@ public class OracleNestedTableJdbcTypeConstructor implements JdbcTypeConstructor
precision,
scale,
typeConfiguration
), dialect );
), elementType, dialect );
}
return new OracleNestedTableJdbcType( elementType, typeName );
}

View File

@ -8,10 +8,10 @@ package org.hibernate.dialect;
import java.lang.reflect.Method;
import java.sql.Connection;
import java.util.Locale;
import org.hibernate.HibernateException;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.mapping.UserDefinedObjectType;
import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.metamodel.spi.RuntimeModelCreationContext;
import org.hibernate.type.descriptor.WrapperOptions;
@ -20,7 +20,7 @@ import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
/**
* @author Christian Beikov
*/
public class OracleReflectionStructJdbcType extends StructJdbcType {
public class OracleReflectionStructJdbcType extends OracleBaseStructJdbcType {
public static final AggregateJdbcType INSTANCE = new OracleReflectionStructJdbcType();
private static final ClassValue<Method> RAW_JDBC_TRANSFORMER = new ClassValue<>() {
@ -44,12 +44,8 @@ public class OracleReflectionStructJdbcType extends StructJdbcType {
this( null, null, null );
}
public OracleReflectionStructJdbcType(EmbeddableMappingType embeddableMappingType, String typeName, int[] orderMapping) {
super(
embeddableMappingType,
typeName == null ? null : typeName.toUpperCase( Locale.ROOT ),
orderMapping
);
private OracleReflectionStructJdbcType(EmbeddableMappingType embeddableMappingType, String typeName, int[] orderMapping) {
super( embeddableMappingType, typeName, orderMapping );
}
@Override

View File

@ -510,7 +510,7 @@ public class OracleSqlAstTranslator<T extends JdbcOperation> extends SqlAstTrans
appendSql( ')' );
break;
case SqlTypes.ARRAY:
final String arrayTypeName = ( (OracleArrayJdbcType) jdbcType ).getTypeName();
final String arrayTypeName = ( (OracleArrayJdbcType) jdbcType ).getSqlTypeName();
switch ( operator ) {
case DISTINCT_FROM:
case NOT_DISTINCT_FROM:

View File

@ -6,10 +6,9 @@
*/
package org.hibernate.dialect;
import java.util.Locale;
import org.hibernate.HibernateException;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.mapping.UserDefinedObjectType;
import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.metamodel.spi.RuntimeModelCreationContext;
import org.hibernate.type.descriptor.WrapperOptions;
@ -20,7 +19,7 @@ import oracle.sql.TIMESTAMPTZ;
/**
* @author Christian Beikov
*/
public class OracleStructJdbcType extends StructJdbcType {
public class OracleStructJdbcType extends OracleBaseStructJdbcType {
public OracleStructJdbcType() {
// The default instance is for reading only and will return an Object[]
@ -28,11 +27,7 @@ public class OracleStructJdbcType extends StructJdbcType {
}
private OracleStructJdbcType(EmbeddableMappingType embeddableMappingType, String typeName, int[] orderMapping) {
super(
embeddableMappingType,
typeName == null ? null : typeName.toUpperCase( Locale.ROOT ),
orderMapping
);
super( embeddableMappingType, typeName, orderMapping );
}
@Override

View File

@ -0,0 +1,383 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.dialect;
import java.util.Locale;
import org.hibernate.boot.Metadata;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.boot.model.relational.QualifiedName;
import org.hibernate.boot.model.relational.QualifiedNameParser;
import org.hibernate.boot.model.relational.SqlStringGenerationContext;
import org.hibernate.mapping.UserDefinedArrayType;
import org.hibernate.tool.schema.internal.StandardUserDefinedTypeExporter;
import org.hibernate.type.SqlTypes;
import static java.sql.Types.BOOLEAN;
import static org.hibernate.type.SqlTypes.BIGINT;
import static org.hibernate.type.SqlTypes.BINARY;
import static org.hibernate.type.SqlTypes.DATE;
import static org.hibernate.type.SqlTypes.INTEGER;
import static org.hibernate.type.SqlTypes.LONG32VARBINARY;
import static org.hibernate.type.SqlTypes.SMALLINT;
import static org.hibernate.type.SqlTypes.TABLE;
import static org.hibernate.type.SqlTypes.TIME;
import static org.hibernate.type.SqlTypes.TIMESTAMP;
import static org.hibernate.type.SqlTypes.TIMESTAMP_UTC;
import static org.hibernate.type.SqlTypes.TIMESTAMP_WITH_TIMEZONE;
import static org.hibernate.type.SqlTypes.TINYINT;
import static org.hibernate.type.SqlTypes.VARBINARY;
/**
* @author Christian Beikov
*/
public class OracleUserDefinedTypeExporter extends StandardUserDefinedTypeExporter {
public OracleUserDefinedTypeExporter(Dialect dialect) {
super( dialect );
}
@Override
public String[] getSqlCreateStrings(
UserDefinedArrayType userDefinedType,
Metadata metadata,
SqlStringGenerationContext context) {
final QualifiedName typeName = new QualifiedNameParser.NameParts(
Identifier.toIdentifier( userDefinedType.getCatalog(), userDefinedType.isCatalogQuoted() ),
Identifier.toIdentifier( userDefinedType.getSchema(), userDefinedType.isSchemaQuoted() ),
userDefinedType.getNameIdentifier()
);
final String arrayTypeName = context.format( typeName );
final Integer arraySqlTypeCode = userDefinedType.getArraySqlTypeCode();
final String elementType = userDefinedType.getElementTypeName();
if ( arraySqlTypeCode == null || arraySqlTypeCode == TABLE ) {
return new String[] {
"create or replace type " + arrayTypeName + " as table of " + elementType
};
}
final int arrayLength = userDefinedType.getArrayLength();
final Integer elementSqlTypeCode = userDefinedType.getElementSqlTypeCode();
final String jsonTypeName = metadata.getDatabase().getTypeConfiguration().getDdlTypeRegistry().getTypeName(
SqlTypes.JSON,
dialect
);
final String valueExpression = determineValueExpression( "t.value", elementSqlTypeCode, elementType );
return new String[] {
"create or replace type " + arrayTypeName + " as varying array(" + arrayLength + ") of " + elementType,
"create or replace function " + arrayTypeName + "_cmp(a in " + arrayTypeName +
", b in " + arrayTypeName + ") return number deterministic is begin " +
"if a is null or b is null then return null; end if; " +
"for i in 1 .. least(a.count,b.count) loop " +
"if a(i) is null or b(i) is null then return null;" +
"elsif a(i)>b(i) then return 1;" +
"elsif a(i)<b(i) then return -1; " +
"end if; " +
"end loop; " +
"if a.count=b.count then return 0; elsif a.count>b.count then return 1; else return -1; end if; " +
"end;",
"create or replace function " + arrayTypeName + "_distinct(a in " + arrayTypeName +
", b in " + arrayTypeName + ") return number deterministic is begin " +
"if a is null and b is null then return 0; end if; " +
"if a is null or b is null or a.count <> b.count then return 1; end if; " +
"for i in 1 .. a.count loop " +
"if (a(i) is null)<>(b(i) is null) or a(i)<>b(i) then return 1; end if; " +
"end loop; " +
"return 0; " +
"end;",
"create or replace function " + arrayTypeName + "_position(arr in " + arrayTypeName +
", elem in " + getRawTypeName( elementType ) + ", startPos in number default 1) return number deterministic is begin " +
"if arr is null then return null; end if; " +
"if elem is null then " +
"for i in startPos .. arr.count loop " +
"if arr(i) is null then return i; end if; " +
"end loop; " +
"else " +
"for i in startPos .. arr.count loop " +
"if arr(i)=elem then return i; end if; " +
"end loop; " +
"end if; " +
"return 0; " +
"end;",
"create or replace function " + arrayTypeName + "_length(arr in " + arrayTypeName +
") return number deterministic is begin " +
"if arr is null then return null; end if; " +
"return arr.count; " +
"end;",
createOrReplaceConcatFunction( arrayTypeName ),
"create or replace function " + arrayTypeName + "_contains(haystack in " + arrayTypeName +
", needle in " + arrayTypeName + ", nullable in number) return number deterministic is found number(1,0); begin " +
"if haystack is null or needle is null then return null; end if; " +
"for i in 1 .. needle.count loop " +
"found := 0; " +
"for j in 1 .. haystack.count loop " +
"if nullable = 1 and needle(i) is null and haystack(j) is null or needle(i)=haystack(j) then found := 1; exit; end if; " +
"end loop; " +
"if found = 0 then return 0; end if;" +
"end loop; " +
"return 1; " +
"end;",
"create or replace function " + arrayTypeName + "_overlaps(haystack in " + arrayTypeName +
", needle in " + arrayTypeName + ", nullable in number) return number deterministic is begin " +
"if haystack is null or needle is null then return null; end if; " +
"if needle.count = 0 then return 1; end if; " +
"for i in 1 .. needle.count loop " +
"for j in 1 .. haystack.count loop " +
"if nullable = 1 and needle(i) is null and haystack(j) is null or needle(i)=haystack(j) then return 1; end if; " +
"end loop; " +
"end loop; " +
"return 0; " +
"end;",
"create or replace function " + arrayTypeName + "_get(arr in " + arrayTypeName +
", idx in number) return " + getRawTypeName( elementType ) + " deterministic is begin " +
"if arr is null or idx is null or arr.count < idx then return null; end if; " +
"return arr(idx); " +
"end;",
"create or replace function " + arrayTypeName + "_set(arr in " + arrayTypeName +
", idx in number, elem in " + getRawTypeName( elementType ) + ") return " + arrayTypeName + " deterministic is " +
"res " + arrayTypeName + ":=" + arrayTypeName + "(); begin " +
"if arr is not null then " +
"for i in 1 .. arr.count loop " +
"res.extend; " +
"res(i) := arr(i); " +
"end loop; " +
"for i in arr.count+1 .. idx loop " +
"res.extend; " +
"end loop; " +
"else " +
"for i in 1 .. idx loop " +
"res.extend; " +
"end loop; " +
"end if; " +
"res(idx) := elem; " +
"return res; " +
"end;",
"create or replace function " + arrayTypeName + "_remove(arr in " + arrayTypeName +
", elem in " + getRawTypeName( elementType ) + ") return " + arrayTypeName + " deterministic is " +
"res " + arrayTypeName + ":=" + arrayTypeName + "(); begin " +
"if arr is null then return null; end if; " +
"if elem is null then " +
"for i in 1 .. arr.count loop " +
"if arr(i) is not null then res.extend; res(res.last) := arr(i); end if; " +
"end loop; " +
"else " +
"for i in 1 .. arr.count loop " +
"if arr(i) is null or arr(i)<>elem then res.extend; res(res.last) := arr(i); end if; " +
"end loop; " +
"end if; " +
"return res; " +
"end;",
"create or replace function " + arrayTypeName + "_remove_index(arr in " + arrayTypeName +
", idx in number) return " + arrayTypeName + " deterministic is " +
"res " + arrayTypeName + ":=" + arrayTypeName + "(); begin " +
"if arr is null or idx is null then return arr; end if; " +
"for i in 1 .. arr.count loop " +
"if i<>idx then res.extend; res(res.last) := arr(i); end if; " +
"end loop; " +
"return res; " +
"end;",
"create or replace function " + arrayTypeName + "_slice(arr in " + arrayTypeName +
", startIdx in number, endIdx in number) return " + arrayTypeName + " deterministic is " +
"res " + arrayTypeName + ":=" + arrayTypeName + "(); begin " +
"if arr is null or startIdx is null or endIdx is null then return null; end if; " +
"for i in startIdx .. least(arr.count,endIdx) loop " +
"res.extend; res(res.last) := arr(i); " +
"end loop; " +
"return res; " +
"end;",
"create or replace function " + arrayTypeName + "_replace(arr in " + arrayTypeName +
", old in " + getRawTypeName( elementType ) + ", elem in " + getRawTypeName( elementType ) + ") return " + arrayTypeName + " deterministic is " +
"res " + arrayTypeName + ":=" + arrayTypeName + "(); begin " +
"if arr is null then return null; end if; " +
"if old is null then " +
"for i in 1 .. arr.count loop " +
"res.extend; " +
"res(res.last) := coalesce(arr(i),elem); " +
"end loop; " +
"else " +
"for i in 1 .. arr.count loop " +
"res.extend; " +
"if arr(i) = old then " +
"res(res.last) := elem; " +
"else " +
"res(res.last) := arr(i); " +
"end if; " +
"end loop; " +
"end if; " +
"return res; " +
"end;",
"create or replace function " + arrayTypeName + "_trim(arr in " + arrayTypeName +
", elems number) return " + arrayTypeName + " deterministic is " +
"res " + arrayTypeName + ":=" + arrayTypeName + "(); begin " +
"if arr is null or elems is null then return null; end if; " +
"if arr.count < elems then raise_application_error (-20000, 'number of elements to trim must be between 0 and '||arr.count); end if;" +
"for i in 1 .. arr.count-elems loop " +
"res.extend; " +
"res(i) := arr(i); " +
"end loop; " +
"return res; " +
"end;",
"create or replace function " + arrayTypeName + "_fill(elem in " + getRawTypeName( elementType ) +
", elems number) return " + arrayTypeName + " deterministic is " +
"res " + arrayTypeName + ":=" + arrayTypeName + "(); begin " +
"if elems is null then return null; end if; " +
"if elems<0 then raise_application_error (-20000, 'number of elements must be greater than or equal to 0'); end if;" +
"for i in 1 .. elems loop " +
"res.extend; " +
"res(i) := elem; " +
"end loop; " +
"return res; " +
"end;",
"create or replace function " + arrayTypeName + "_positions(arr in " + arrayTypeName +
", elem in " + getRawTypeName( elementType ) + ") return sdo_ordinate_array deterministic is " +
"res sdo_ordinate_array:=sdo_ordinate_array(); begin " +
"if arr is null then return null; end if; " +
"if elem is null then " +
"for i in 1 .. arr.count loop " +
"if arr(i) is null then res.extend; res(res.last):=i; end if; " +
"end loop; " +
"else " +
"for i in 1 .. arr.count loop " +
"if arr(i)=elem then res.extend; res(res.last):=i; end if; " +
"end loop; " +
"end if; " +
"return res; " +
"end;",
"create or replace function " + arrayTypeName + "_to_string(arr in " + arrayTypeName +
", sep in varchar2) return varchar2 deterministic is " +
"res varchar2(4000):=''; begin " +
"if arr is null or sep is null then return null; end if; " +
"for i in 1 .. arr.count loop " +
"if arr(i) is not null then " +
"if length(res)<>0 then res:=res||sep; end if; " +
"res:=res||arr(i); " +
"end if; " +
"end loop; " +
"return res; " +
"end;",
"create or replace function " + arrayTypeName + "_from_json(arr in " + jsonTypeName +
") return " + arrayTypeName + " deterministic is " +
"res " + arrayTypeName + ":=" + arrayTypeName + "(); begin " +
"if arr is null then return null; end if; " +
"select " + valueExpression + " bulk collect into res " +
"from json_table(arr,'$[*]' columns (value path '$')) t; " +
"return res; " +
"end;"
};
}
@Override
public String[] getSqlDropStrings(UserDefinedArrayType userDefinedType, Metadata metadata, SqlStringGenerationContext context) {
final QualifiedName typeName = new QualifiedNameParser.NameParts(
Identifier.toIdentifier( userDefinedType.getCatalog(), userDefinedType.isCatalogQuoted() ),
Identifier.toIdentifier( userDefinedType.getSchema(), userDefinedType.isSchemaQuoted() ),
userDefinedType.getNameIdentifier()
);
final String arrayTypeName = context.format( typeName );
final Integer arraySqlTypeCode = userDefinedType.getArraySqlTypeCode();
if ( arraySqlTypeCode == null || arraySqlTypeCode == TABLE ) {
return new String[] {
"drop type " + arrayTypeName + " force"
};
}
return new String[] {
"drop type " + arrayTypeName + " force",
"drop function " + arrayTypeName + "_cmp",
"drop function " + arrayTypeName + "_distinct",
"drop function " + arrayTypeName + "_position",
"drop function " + arrayTypeName + "_length",
"drop function " + arrayTypeName + "_concat",
"drop function " + arrayTypeName + "_contains",
"drop function " + arrayTypeName + "_overlaps",
"drop function " + arrayTypeName + "_get",
"drop function " + arrayTypeName + "_set",
"drop function " + arrayTypeName + "_remove",
"drop function " + arrayTypeName + "_remove_index",
"drop function " + arrayTypeName + "_slice",
"drop function " + arrayTypeName + "_replace",
"drop function " + arrayTypeName + "_trim",
"drop function " + arrayTypeName + "_fill",
"drop function " + arrayTypeName + "_positions",
"drop function " + arrayTypeName + "_to_string",
"drop function " + arrayTypeName + "_from_json"
};
}
private String determineValueExpression(String expression, int elementSqlTypeCode, String elementType) {
switch ( elementSqlTypeCode ) {
case BOOLEAN:
if ( elementType.toLowerCase( Locale.ROOT ).trim().startsWith( "number" ) ) {
return "decode(" + expression + ",'true',1,'false',0,null)";
}
case TINYINT:
case SMALLINT:
case INTEGER:
case BIGINT:
return "cast(" + expression + " as " + elementType + ")";
case DATE:
return "to_date(" + expression + ",'YYYY-MM-DD')";
case TIME:
return "to_timestamp(" + expression + ",'hh24:mi:ss')";
case TIMESTAMP:
return "to_timestamp(" + expression + ",'YYYY-MM-DD\"T\"hh24:mi:ss.FF9')";
case TIMESTAMP_WITH_TIMEZONE:
case TIMESTAMP_UTC:
return "to_timestamp_tz(" + expression + ",'YYYY-MM-DD\"T\"hh24:mi:ss.FF9TZH:TZM')";
case BINARY:
case VARBINARY:
case LONG32VARBINARY:
return "hextoraw(" + expression + ")";
default:
return expression;
}
}
protected String createOrReplaceConcatFunction(String arrayTypeName) {
// Since Oracle has no builtin concat function for varrays and doesn't support varargs,
// we have to create a function with a fixed amount of arguments with default that fits "most" cases.
// Let's just use 5 for the time being until someone requests more.
return createOrReplaceConcatFunction( arrayTypeName, 5 );
}
protected String createOrReplaceConcatFunction(String arrayTypeName, int maxConcatParams) {
final StringBuilder sb = new StringBuilder();
sb.append( "create or replace function " ).append( arrayTypeName ).append( "_concat(" );
sb.append( "arr0 in " ).append( arrayTypeName ).append( ",arr1 in " ).append( arrayTypeName );
for ( int i = 2; i < maxConcatParams; i++ ) {
sb.append( ",arr" ).append( i ).append( " in " ).append( arrayTypeName )
.append( " default " ).append( arrayTypeName ).append( "()" );
}
sb.append( ") return " ).append( arrayTypeName ).append( " deterministic is res " ).append( arrayTypeName )
.append( "; begin if " );
String separator = "";
for ( int i = 0; i < maxConcatParams; i++ ) {
sb.append( separator ).append( "arr" ).append( i ).append( " is null" );
separator = " or ";
}
sb.append( " then return null; end if; " );
sb.append( "select * bulk collect into res from (" );
separator = "";
for ( int i = 0; i < maxConcatParams; i++ ) {
sb.append( separator ).append( "select * from table(arr" ).append( i ).append( ')' );
separator = " union all ";
}
return sb.append( "); return res; end;" ).toString();
}
protected String getRawTypeName(String typeName) {
//trim off the length/precision/scale
final int paren = typeName.indexOf( '(' );
if ( paren > 0 ) {
final int parenEnd = typeName.lastIndexOf( ')' );
return parenEnd + 1 == typeName.length()
? typeName.substring( 0, paren )
: typeName.substring( 0, paren ) + typeName.substring( parenEnd + 1 );
}
return typeName;
}
}

View File

@ -84,6 +84,7 @@ import org.hibernate.type.descriptor.jdbc.BlobJdbcType;
import org.hibernate.type.descriptor.jdbc.ClobJdbcType;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.ObjectNullAsBinaryTypeJdbcType;
import org.hibernate.type.descriptor.jdbc.SqlTypedJdbcType;
import org.hibernate.type.descriptor.jdbc.XmlJdbcType;
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
import org.hibernate.type.descriptor.sql.internal.ArrayDdlTypeImpl;
@ -359,12 +360,23 @@ public class PostgreSQLDialect extends Dialect {
ColumnTypeInformation.EMPTY
);
}
final SqlTypedJdbcType elementDescriptor = jdbcTypeRegistry.findSqlTypedDescriptor( componentTypeName );
if ( elementDescriptor != null ) {
return jdbcTypeRegistry.resolveTypeConstructorDescriptor(
jdbcTypeCode,
elementDescriptor,
ColumnTypeInformation.EMPTY
);
}
}
break;
case STRUCT:
final AggregateJdbcType aggregateDescriptor = jdbcTypeRegistry.findAggregateDescriptor( columnTypeName );
if ( aggregateDescriptor != null ) {
return aggregateDescriptor;
final SqlTypedJdbcType descriptor = jdbcTypeRegistry.findSqlTypedDescriptor(
// Skip the schema
columnTypeName.substring( columnTypeName.indexOf( '.' ) + 1 )
);
if ( descriptor != null ) {
return descriptor;
}
break;
}

View File

@ -143,6 +143,7 @@ public class PostgreSQLEnumJdbcType implements JdbcType {
boolean sortEnumValues) {
final Dialect dialect = database.getDialect();
final Class<? extends Enum<?>> enumClass = (Class<? extends Enum<?>>) javaType.getJavaType();
final String enumTypeName = enumClass.getSimpleName();
final String[] enumeratedValues = EnumHelper.getEnumeratedValues( enumClass );
if ( sortEnumValues ) {
Arrays.sort( enumeratedValues );
@ -155,7 +156,7 @@ public class PostgreSQLEnumJdbcType implements JdbcType {
if ( create != null && create.length > 0 ) {
database.addAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
enumClass.getSimpleName(),
enumTypeName,
database.getDefaultNamespace(),
create,
drop,

View File

@ -61,7 +61,7 @@ public class PostgreSQLStructCastingJdbcType extends AbstractPostgreSQLStructJdb
appender.append( "cast(" );
appender.append( writeExpression );
appender.append( " as " );
appender.append( getTypeName() );
appender.append( getStructTypeName() );
appender.append( ')' );
}

View File

@ -11,6 +11,7 @@ import java.sql.PreparedStatement;
import java.sql.SQLException;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.mapping.UserDefinedObjectType;
import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.metamodel.spi.RuntimeModelCreationContext;
import org.hibernate.type.descriptor.ValueBinder;
@ -58,6 +59,14 @@ public class PostgreSQLStructPGObjectJdbcType extends AbstractPostgreSQLStructJd
);
}
@Override
protected String getRawStructFromJdbcValue(Object rawJdbcValue) {
if ( rawJdbcValue instanceof PGobject ) {
return ( (PGobject) rawJdbcValue ).getValue();
}
return (String) rawJdbcValue;
}
@Override
public <X> ValueBinder<X> getBinder(JavaType<X> javaType) {
return new BasicBinder<>( javaType, this ) {
@ -70,7 +79,7 @@ public class PostgreSQLStructPGObjectJdbcType extends AbstractPostgreSQLStructJd
options
);
final PGobject holder = new PGobject();
holder.setType( getTypeName() );
holder.setType( getStructTypeName() );
holder.setValue( stringValue );
st.setObject( index, holder );
}
@ -84,7 +93,7 @@ public class PostgreSQLStructPGObjectJdbcType extends AbstractPostgreSQLStructJd
options
);
final PGobject holder = new PGobject();
holder.setType( getTypeName() );
holder.setType( getStructTypeName() );
holder.setValue( stringValue );
st.setObject( name, holder );
}

View File

@ -7,6 +7,9 @@
package org.hibernate.dialect;
import java.sql.Blob;
import java.sql.Clob;
import java.sql.NClob;
import java.sql.SQLException;
import org.hibernate.Internal;
@ -14,6 +17,7 @@ import org.hibernate.metamodel.mapping.AttributeMapping;
import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.metamodel.mapping.MappingType;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
@ -166,28 +170,45 @@ public class StructHelper {
assert attributeMapping.getJdbcTypeCount() == 1;
jdbcValueCount = 1;
final JdbcMapping jdbcMapping = attributeMapping.getSingleJdbcMapping();
final JavaType<Object> relationalJavaType;
if ( jdbcMapping.getValueConverter() == null ) {
//noinspection unchecked
relationalJavaType = (JavaType<Object>) jdbcMapping.getJdbcJavaType();
}
else {
//noinspection unchecked
relationalJavaType = jdbcMapping.getValueConverter().getRelationalJavaType();
}
final Class<?> preferredJavaTypeClass = jdbcMapping.getJdbcType().getPreferredJavaTypeClass( options );
if ( preferredJavaTypeClass == null ) {
jdbcValues[jdbcIndex] = relationalJavaType.wrap(
jdbcMapping.convertToRelationalValue( attributeValues[attributeIndex] ),
options
);
}
else {
jdbcValues[jdbcIndex] = relationalJavaType.unwrap(
jdbcMapping.convertToRelationalValue( attributeValues[attributeIndex] ),
preferredJavaTypeClass,
options
);
final Object relationalValue = jdbcMapping.convertToRelationalValue( attributeValues[attributeIndex] );
if ( relationalValue != null ) {
// Regardless how LOBs are bound by default, through structs we must use the native types
switch ( jdbcMapping.getJdbcType().getDefaultSqlTypeCode() ) {
case SqlTypes.BLOB:
case SqlTypes.MATERIALIZED_BLOB:
//noinspection unchecked,rawtypes
jdbcValues[jdbcIndex] = ( (JavaType) jdbcMapping.getJdbcJavaType() ).unwrap(
relationalValue,
Blob.class,
options
);
break;
case SqlTypes.CLOB:
case SqlTypes.MATERIALIZED_CLOB:
//noinspection unchecked,rawtypes
jdbcValues[jdbcIndex] = ( (JavaType) jdbcMapping.getJdbcJavaType() ).unwrap(
relationalValue,
Clob.class,
options
);
break;
case SqlTypes.NCLOB:
case SqlTypes.MATERIALIZED_NCLOB:
//noinspection unchecked,rawtypes
jdbcValues[jdbcIndex] = ( (JavaType) jdbcMapping.getJdbcJavaType() ).unwrap(
relationalValue,
NClob.class,
options
);
break;
default:
//noinspection unchecked
jdbcValues[jdbcIndex] = jdbcMapping.getJdbcValueBinder().getBindValue(
relationalValue,
options
);
break;
}
}
}
return jdbcValueCount;

View File

@ -11,13 +11,17 @@ import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Struct;
import java.util.ArrayList;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.metamodel.mapping.AttributeMapping;
import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.metamodel.mapping.MappingType;
import org.hibernate.metamodel.spi.EmbeddableInstantiator;
import org.hibernate.metamodel.spi.RuntimeModelCreationContext;
import org.hibernate.type.BasicPluralType;
import org.hibernate.type.BasicType;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.ValueBinder;
import org.hibernate.type.descriptor.ValueExtractor;
@ -25,14 +29,16 @@ import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.java.spi.UnknownBasicJavaType;
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
import org.hibernate.type.descriptor.jdbc.ArrayJdbcType;
import org.hibernate.type.descriptor.jdbc.BasicBinder;
import org.hibernate.type.descriptor.jdbc.BasicExtractor;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.spi.TypeConfiguration;
/**
* @author Christian Beikov
*/
public class StructJdbcType implements AggregateJdbcType {
public class StructJdbcType implements org.hibernate.type.descriptor.jdbc.StructJdbcType {
public static final AggregateJdbcType INSTANCE = new StructJdbcType();
@ -71,6 +77,11 @@ public class StructJdbcType implements AggregateJdbcType {
return SqlTypes.STRUCT;
}
@Override
public String getStructTypeName() {
return typeName;
}
@Override
public AggregateJdbcType resolveAggregateJdbcType(
EmbeddableMappingType mappingType,
@ -319,6 +330,57 @@ public class StructJdbcType implements AggregateJdbcType {
jdbcValue = jdbcMapping.getJdbcJavaType()
.wrap( transformRawJdbcValue( rawJdbcValue, options ), options );
break;
case SqlTypes.ARRAY:
final BasicType<?> elementType = ( (BasicPluralType<?, ?>) jdbcMapping ).getElementType();
final JdbcType elementJdbcType = elementType.getJdbcType();
final Object[] array;
final Object[] newArray;
switch ( elementJdbcType.getDefaultSqlTypeCode() ) {
case SqlTypes.TIME_WITH_TIMEZONE:
case SqlTypes.TIME_UTC:
case SqlTypes.TIMESTAMP_WITH_TIMEZONE:
case SqlTypes.TIMESTAMP_UTC:
// Only transform the raw jdbc value if it could be a TIMESTAMPTZ
array = (Object[]) ((java.sql.Array) rawJdbcValue).getArray();
newArray = new Object[array.length];
for ( int j = 0; j < array.length; j++ ) {
newArray[j] = elementType.getJdbcJavaType().wrap(
transformRawJdbcValue( array[j], options ),
options
);
}
jdbcValue = jdbcMapping.getJdbcJavaType().wrap( newArray, options );
break;
case SqlTypes.STRUCT:
case SqlTypes.JSON:
case SqlTypes.SQLXML:
array = (Object[]) ( (java.sql.Array) rawJdbcValue ).getArray();
newArray = new Object[array.length];
final AggregateJdbcType aggregateJdbcType = (AggregateJdbcType) elementJdbcType;
final EmbeddableMappingType subEmbeddableMappingType = aggregateJdbcType.getEmbeddableMappingType();
final EmbeddableInstantiator instantiator = subEmbeddableMappingType.getRepresentationStrategy()
.getInstantiator();
for ( int j = 0; j < array.length; j++ ) {
final Object[] subValues = StructHelper.getAttributeValues(
subEmbeddableMappingType,
aggregateJdbcType.extractJdbcValues(
array[j],
options
),
options
);
newArray[j] = instantiator.instantiate(
() -> subValues,
options.getSessionFactory()
);
}
jdbcValue = jdbcMapping.getJdbcJavaType().wrap( newArray, options );
break;
default:
jdbcValue = jdbcMapping.getJdbcJavaType().wrap( rawJdbcValue, options );
break;
}
break;
default:
jdbcValue = jdbcMapping.getJdbcJavaType().wrap( rawJdbcValue, options );
break;
@ -382,6 +444,57 @@ public class StructJdbcType implements AggregateJdbcType {
targetJdbcValues[jdbcIndex] = jdbcMapping.getJdbcJavaType()
.wrap( transformRawJdbcValue( rawJdbcValue, options ), options );
break;
case SqlTypes.ARRAY:
final BasicType<?> elementType = ( (BasicPluralType<?, ?>) jdbcMapping ).getElementType();
final JdbcType elementJdbcType = elementType.getJdbcType();
final Object[] array;
final Object[] newArray;
switch ( elementJdbcType.getDefaultSqlTypeCode() ) {
case SqlTypes.TIME_WITH_TIMEZONE:
case SqlTypes.TIME_UTC:
case SqlTypes.TIMESTAMP_WITH_TIMEZONE:
case SqlTypes.TIMESTAMP_UTC:
// Only transform the raw jdbc value if it could be a TIMESTAMPTZ
array = (Object[]) ((java.sql.Array) rawJdbcValue).getArray();
newArray = new Object[array.length];
for ( int j = 0; j < array.length; j++ ) {
newArray[j] = elementType.getJdbcJavaType().wrap(
transformRawJdbcValue( array[j], options ),
options
);
}
targetJdbcValues[jdbcIndex] = jdbcMapping.getJdbcJavaType().wrap( newArray, options );
break;
case SqlTypes.STRUCT:
case SqlTypes.JSON:
case SqlTypes.SQLXML:
array = (Object[]) ( (java.sql.Array) rawJdbcValue ).getArray();
newArray = new Object[array.length];
final AggregateJdbcType aggregateJdbcType = (AggregateJdbcType) elementJdbcType;
final EmbeddableMappingType subEmbeddableMappingType = aggregateJdbcType.getEmbeddableMappingType();
final EmbeddableInstantiator instantiator = subEmbeddableMappingType.getRepresentationStrategy()
.getInstantiator();
for ( int j = 0; j < array.length; j++ ) {
final Object[] subValues = StructHelper.getAttributeValues(
subEmbeddableMappingType,
aggregateJdbcType.extractJdbcValues(
array[j],
options
),
options
);
newArray[j] = instantiator.instantiate(
() -> subValues,
options.getSessionFactory()
);
}
targetJdbcValues[jdbcIndex] = jdbcMapping.getJdbcJavaType().wrap( newArray, options );
break;
default:
targetJdbcValues[jdbcIndex] = jdbcMapping.getJdbcJavaType().wrap( rawJdbcValue, options );
break;
}
break;
default:
targetJdbcValues[jdbcIndex] = jdbcMapping.getJdbcJavaType().wrap( rawJdbcValue, options );
break;

View File

@ -12,12 +12,12 @@ import org.hibernate.Incubating;
import org.hibernate.boot.model.relational.AuxiliaryDatabaseObject;
import org.hibernate.boot.model.relational.Namespace;
import org.hibernate.dialect.Dialect;
import org.hibernate.mapping.AggregateColumn;
import org.hibernate.mapping.Column;
import org.hibernate.metamodel.mapping.SelectableMapping;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.tool.schema.extract.spi.ColumnTypeInformation;
import org.hibernate.type.spi.TypeConfiguration;
/**
@ -37,41 +37,41 @@ public interface AggregateSupport {
* @param template The custom read expression template of the column
* @param placeholder The placeholder to replace with the actual read expression
* @param aggregateParentReadExpression The expression to the aggregate column, which contains the column
* @param columnExpression The column within the aggregate type, for which to return the read expression
* @param aggregateColumn The type information for the aggregate column
* @param column The column within the aggregate type, for which to return the read expression
* @param aggregateColumnType The type information for the aggregate column
* @param columnType The type information for the column within the aggregate type
*/
String aggregateComponentCustomReadExpression(
String template,
String placeholder,
String aggregateParentReadExpression,
String column,
ColumnTypeInformation aggregateColumnType,
ColumnTypeInformation columnType);
String columnExpression,
AggregateColumn aggregateColumn,
Column column);
/**
* Returns the assignment expression to use for {@code column},
* which is part of the aggregate type of {@code aggregatePath}.
*
* @param aggregateParentAssignmentExpression The expression to the aggregate column, which contains the column
* @param columnExpression The column within the aggregate type, for which to return the assignment expression
* @param aggregateColumn The type information for the aggregate column
* @param column The column within the aggregate type, for which to return the assignment expression
* @param aggregateColumnType The type information for the aggregate column
* @param columnType The type information for the column within the aggregate type
*/
String aggregateComponentAssignmentExpression(
String aggregateParentAssignmentExpression,
String column,
ColumnTypeInformation aggregateColumnType,
ColumnTypeInformation columnType);
String columnExpression,
AggregateColumn aggregateColumn,
Column column);
/**
* Returns the custom write expression to use for an aggregate column
* of the given column type, containing the given aggregated columns.
*
* @param aggregateColumnType The type information for the aggregate column
* @param aggregateColumn The type information for the aggregate column
* @param aggregatedColumns The columns of the aggregate type
*/
String aggregateCustomWriteExpression(ColumnTypeInformation aggregateColumnType, List<Column> aggregatedColumns);
String aggregateCustomWriteExpression(AggregateColumn aggregateColumn, List<Column> aggregatedColumns);
/**
* Whether {@link #aggregateCustomWriteExpressionRenderer(SelectableMapping, SelectableMapping[], TypeConfiguration)} is needed
@ -132,7 +132,7 @@ public interface AggregateSupport {
List<AuxiliaryDatabaseObject> aggregateAuxiliaryDatabaseObjects(
Namespace namespace,
String aggregatePath,
ColumnTypeInformation aggregateColumnType,
AggregateColumn aggregateColumn,
List<Column> aggregatedColumns);
/**

View File

@ -11,9 +11,9 @@ import java.util.List;
import org.hibernate.boot.model.relational.AuxiliaryDatabaseObject;
import org.hibernate.boot.model.relational.Namespace;
import org.hibernate.mapping.AggregateColumn;
import org.hibernate.mapping.Column;
import org.hibernate.metamodel.mapping.SelectableMapping;
import org.hibernate.tool.schema.extract.spi.ColumnTypeInformation;
import org.hibernate.type.spi.TypeConfiguration;
public class AggregateSupportImpl implements AggregateSupport {
@ -25,24 +25,24 @@ public class AggregateSupportImpl implements AggregateSupport {
String template,
String placeholder,
String aggregateParentReadExpression,
String column,
ColumnTypeInformation aggregateColumnType,
ColumnTypeInformation columnType) {
String columnExpression,
AggregateColumn aggregateColumn,
Column column) {
throw new UnsupportedOperationException( "Dialect does not support aggregateComponentCustomReadExpression: " + getClass().getName() );
}
@Override
public String aggregateComponentAssignmentExpression(
String aggregateParentAssignmentExpression,
String column,
ColumnTypeInformation aggregateColumnType,
ColumnTypeInformation columnType) {
String columnExpression,
AggregateColumn aggregateColumn,
Column column) {
throw new UnsupportedOperationException( "Dialect does not support aggregateComponentAssignmentExpression: " + getClass().getName() );
}
@Override
public String aggregateCustomWriteExpression(
ColumnTypeInformation aggregateColumnType,
AggregateColumn aggregateColumn,
List<Column> aggregatedColumns) {
return null;
}
@ -76,7 +76,7 @@ public class AggregateSupportImpl implements AggregateSupport {
public List<AuxiliaryDatabaseObject> aggregateAuxiliaryDatabaseObjects(
Namespace namespace,
String aggregatePath,
ColumnTypeInformation aggregateColumnType,
AggregateColumn aggregateColumn,
List<Column> aggregatedColumns) {
return Collections.emptyList();
}

View File

@ -47,40 +47,40 @@ public class DB2AggregateSupport extends AggregateSupportImpl {
String template,
String placeholder,
String aggregateParentReadExpression,
String column,
ColumnTypeInformation aggregateColumnType,
ColumnTypeInformation columnType) {
switch ( aggregateColumnType.getTypeCode() ) {
String columnExpression,
AggregateColumn aggregateColumn,
Column column) {
switch ( aggregateColumn.getTypeCode() ) {
case STRUCT:
return template.replace( placeholder, aggregateParentReadExpression + ".." + column );
return template.replace( placeholder, aggregateParentReadExpression + ".." + columnExpression );
}
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateColumnType.getTypeCode() );
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateColumn.getTypeCode() );
}
@Override
public String aggregateComponentAssignmentExpression(
String aggregateParentAssignmentExpression,
String column,
ColumnTypeInformation aggregateColumnType,
ColumnTypeInformation columnType) {
switch ( aggregateColumnType.getTypeCode() ) {
String columnExpression,
AggregateColumn aggregateColumn,
Column column) {
switch ( aggregateColumn.getTypeCode() ) {
case STRUCT:
return aggregateParentAssignmentExpression + ".." + column;
return aggregateParentAssignmentExpression + ".." + columnExpression;
}
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateColumnType.getTypeCode() );
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateColumn.getTypeCode() );
}
@Override
public String aggregateCustomWriteExpression(
ColumnTypeInformation aggregateColumnType,
AggregateColumn aggregateColumn,
List<Column> aggregatedColumns) {
switch ( aggregateColumnType.getTypeCode() ) {
switch ( aggregateColumn.getTypeCode() ) {
case STRUCT:
final StringBuilder sb = new StringBuilder();
appendStructCustomWriteExpression( aggregateColumnType, aggregatedColumns, sb );
appendStructCustomWriteExpression( aggregateColumn, aggregatedColumns, sb );
return sb.toString();
}
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateColumnType.getTypeCode() );
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateColumn.getTypeCode() );
}
private static void appendStructCustomWriteExpression(
@ -322,12 +322,12 @@ public class DB2AggregateSupport extends AggregateSupportImpl {
public List<AuxiliaryDatabaseObject> aggregateAuxiliaryDatabaseObjects(
Namespace namespace,
String aggregatePath,
ColumnTypeInformation aggregateColumnType,
AggregateColumn aggregateColumn,
List<Column> aggregatedColumns) {
if ( aggregateColumnType.getTypeCode() != STRUCT ) {
if ( aggregateColumn.getTypeCode() != STRUCT ) {
return Collections.emptyList();
}
final String columnType = aggregateColumnType.getTypeName();
final String columnType = aggregateColumn.getTypeName();
// The serialize and deserialize functions, as well as the transform are for supporting struct types in native queries and functions
var list = new ArrayList<AuxiliaryDatabaseObject>( 3 );
var serializerSb = new StringBuilder();

View File

@ -7,22 +7,39 @@
package org.hibernate.dialect.aggregate;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.boot.model.relational.AuxiliaryDatabaseObject;
import org.hibernate.boot.model.relational.Namespace;
import org.hibernate.dialect.DatabaseVersion;
import org.hibernate.dialect.Dialect;
import org.hibernate.dialect.OracleArrayJdbcType;
import org.hibernate.engine.jdbc.Size;
import org.hibernate.mapping.AggregateColumn;
import org.hibernate.mapping.Column;
import org.hibernate.mapping.UserDefinedArrayType;
import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.metamodel.mapping.SelectableMapping;
import org.hibernate.metamodel.mapping.SelectablePath;
import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.AbstractSqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.tool.schema.extract.spi.ColumnTypeInformation;
import org.hibernate.type.BasicPluralType;
import org.hibernate.type.BasicType;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
import org.hibernate.type.descriptor.jdbc.ArrayJdbcType;
import org.hibernate.type.descriptor.jdbc.StructJdbcType;
import org.hibernate.type.descriptor.sql.DdlType;
import org.hibernate.type.descriptor.sql.spi.DdlTypeRegistry;
import org.hibernate.type.spi.TypeConfiguration;
import static org.hibernate.type.SqlTypes.ARRAY;
import static org.hibernate.type.SqlTypes.BIGINT;
import static org.hibernate.type.SqlTypes.BINARY;
import static org.hibernate.type.SqlTypes.BLOB;
@ -35,12 +52,13 @@ import static org.hibernate.type.SqlTypes.LONG32VARBINARY;
import static org.hibernate.type.SqlTypes.NCLOB;
import static org.hibernate.type.SqlTypes.SMALLINT;
import static org.hibernate.type.SqlTypes.STRUCT;
import static org.hibernate.type.SqlTypes.STRUCT_ARRAY;
import static org.hibernate.type.SqlTypes.STRUCT_TABLE;
import static org.hibernate.type.SqlTypes.TABLE;
import static org.hibernate.type.SqlTypes.TIME;
import static org.hibernate.type.SqlTypes.TIMESTAMP;
import static org.hibernate.type.SqlTypes.TIMESTAMP_UTC;
import static org.hibernate.type.SqlTypes.TIMESTAMP_WITH_TIMEZONE;
import static org.hibernate.type.SqlTypes.TIME_UTC;
import static org.hibernate.type.SqlTypes.TIME_WITH_TIMEZONE;
import static org.hibernate.type.SqlTypes.TINYINT;
import static org.hibernate.type.SqlTypes.VARBINARY;
@ -54,7 +72,8 @@ public class OracleAggregateSupport extends AggregateSupportImpl {
private static final AggregateSupport LEGACY_INSTANCE = new OracleAggregateSupport( false, JsonSupport.NONE );
private static final String JSON_QUERY_START = "json_query(";
private static final String JSON_QUERY_END = "')";
private static final String JSON_QUERY_JSON_END = "' returning json)";
private static final String JSON_QUERY_BLOB_END = "' returning blob)";
private final boolean checkConstraintSupport;
private final JsonSupport jsonSupport;
@ -93,30 +112,32 @@ public class OracleAggregateSupport extends AggregateSupportImpl {
String template,
String placeholder,
String aggregateParentReadExpression,
String column,
ColumnTypeInformation aggregateColumnType,
ColumnTypeInformation columnType) {
switch ( aggregateColumnType.getTypeCode() ) {
String columnExpression,
AggregateColumn aggregateColumn,
Column column) {
switch ( aggregateColumn.getTypeCode() ) {
case JSON:
String jsonTypeName = "json";
switch ( jsonSupport ) {
case OSON:
case MERGEPATCH:
case QUERY_AND_PATH:
case QUERY:
jsonTypeName = "blob";
case OSON:
final String parentPartExpression;
if ( aggregateParentReadExpression.startsWith( JSON_QUERY_START )
&& aggregateParentReadExpression.endsWith( JSON_QUERY_END ) ) {
parentPartExpression = aggregateParentReadExpression.substring( JSON_QUERY_START.length(), aggregateParentReadExpression.length() - JSON_QUERY_END.length() ) + ".";
&& ( aggregateParentReadExpression.endsWith( JSON_QUERY_JSON_END ) || aggregateParentReadExpression.endsWith( JSON_QUERY_BLOB_END ) ) ) {
parentPartExpression = aggregateParentReadExpression.substring( JSON_QUERY_START.length(), aggregateParentReadExpression.length() - JSON_QUERY_JSON_END.length() ) + ".";
}
else {
parentPartExpression = aggregateParentReadExpression + ",'$.";
}
switch ( columnType.getTypeCode() ) {
switch ( column.getTypeCode() ) {
case BOOLEAN:
if ( columnType.getTypeName().toLowerCase( Locale.ROOT ).trim().startsWith( "number" ) ) {
if ( column.getTypeName().toLowerCase( Locale.ROOT ).trim().startsWith( "number" ) ) {
return template.replace(
placeholder,
"decode(json_value(" + parentPartExpression + column + "'),'true',1,'false',0,null)"
"decode(json_value(" + parentPartExpression + columnExpression + "'),'true',1,'false',0,null)"
);
}
case TINYINT:
@ -125,28 +146,28 @@ public class OracleAggregateSupport extends AggregateSupportImpl {
case BIGINT:
return template.replace(
placeholder,
"json_value(" + parentPartExpression + column + "' returning " + columnType.getTypeName() + ')'
"json_value(" + parentPartExpression + columnExpression + "' returning " + column.getTypeName() + ')'
);
case DATE:
return template.replace(
placeholder,
"to_date(json_value(" + parentPartExpression + column + "'),'YYYY-MM-DD')"
"to_date(json_value(" + parentPartExpression + columnExpression + "'),'YYYY-MM-DD')"
);
case TIME:
return template.replace(
placeholder,
"to_timestamp(json_value(" + parentPartExpression + column + "'),'hh24:mi:ss')"
"to_timestamp(json_value(" + parentPartExpression + columnExpression + "'),'hh24:mi:ss')"
);
case TIMESTAMP:
return template.replace(
placeholder,
"to_timestamp(json_value(" + parentPartExpression + column + "'),'YYYY-MM-DD\"T\"hh24:mi:ss.FF9')"
"to_timestamp(json_value(" + parentPartExpression + columnExpression + "'),'YYYY-MM-DD\"T\"hh24:mi:ss.FF9')"
);
case TIMESTAMP_WITH_TIMEZONE:
case TIMESTAMP_UTC:
return template.replace(
placeholder,
"to_timestamp_tz(json_value(" + parentPartExpression + column + "'),'YYYY-MM-DD\"T\"hh24:mi:ss.FF9TZH:TZM')"
"to_timestamp_tz(json_value(" + parentPartExpression + columnExpression + "'),'YYYY-MM-DD\"T\"hh24:mi:ss.FF9TZH:TZM')"
);
case BINARY:
case VARBINARY:
@ -154,7 +175,7 @@ public class OracleAggregateSupport extends AggregateSupportImpl {
// We encode binary data as hex, so we have to decode here
return template.replace(
placeholder,
"hextoraw(json_value(" + parentPartExpression + column + "'))"
"hextoraw(json_value(" + parentPartExpression + columnExpression + "'))"
);
case CLOB:
case NCLOB:
@ -162,56 +183,102 @@ public class OracleAggregateSupport extends AggregateSupportImpl {
// We encode binary data as hex, so we have to decode here
return template.replace(
placeholder,
"(select * from json_table(" + aggregateParentReadExpression + ",'$' columns (" + column + " " + columnType.getTypeName() + " path '$." + column + "')))"
"(select * from json_table(" + aggregateParentReadExpression + ",'$' columns (" + columnExpression + " " + column.getTypeName() + " path '$." + columnExpression + "')))"
);
case ARRAY:
final BasicPluralType<?, ?> pluralType = (BasicPluralType<?, ?>) column.getValue().getType();
final OracleArrayJdbcType jdbcType = (OracleArrayJdbcType) pluralType.getJdbcType();
switch ( jdbcType.getElementJdbcType().getDefaultSqlTypeCode() ) {
case BOOLEAN:
case DATE:
case TIME:
case TIMESTAMP:
case TIMESTAMP_WITH_TIMEZONE:
case TIMESTAMP_UTC:
case BINARY:
case VARBINARY:
case LONG32VARBINARY:
return template.replace(
placeholder,
jdbcType.getSqlTypeName() + "_from_json(json_query(" + parentPartExpression + columnExpression + "' returning " + jsonTypeName + "))"
);
default:
return template.replace(
placeholder,
"json_value(" + parentPartExpression + columnExpression + "' returning " + column.getTypeName() + ')'
);
}
case JSON:
return template.replace(
placeholder,
"json_query(" + parentPartExpression + column + "')"
"json_query(" + parentPartExpression + columnExpression + "' returning " + jsonTypeName + ")"
);
default:
return template.replace(
placeholder,
"cast(json_value(" + parentPartExpression + column + "') as " + columnType.getTypeName() + ')'
"cast(json_value(" + parentPartExpression + columnExpression + "') as " + column.getTypeName() + ')'
);
}
case NONE:
throw new UnsupportedOperationException( "The Oracle version doesn't support JSON aggregates!" );
}
case STRUCT:
return template.replace( placeholder, aggregateParentReadExpression + "." + column );
case STRUCT_ARRAY:
case STRUCT_TABLE:
return template.replace( placeholder, aggregateParentReadExpression + "." + columnExpression );
}
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateColumnType.getTypeCode() );
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateColumn.getTypeCode() );
}
@Override
public String aggregateComponentAssignmentExpression(
String aggregateParentAssignmentExpression,
String column,
ColumnTypeInformation aggregateColumnType,
ColumnTypeInformation columnType) {
switch ( aggregateColumnType.getTypeCode() ) {
String columnExpression,
AggregateColumn aggregateColumn,
Column column) {
switch ( aggregateColumn.getTypeCode() ) {
case JSON:
// For JSON we always have to replace the whole object
return aggregateParentAssignmentExpression;
case STRUCT:
return aggregateParentAssignmentExpression + "." + column;
case STRUCT_ARRAY:
case STRUCT_TABLE:
return aggregateParentAssignmentExpression + "." + columnExpression;
}
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateColumnType.getTypeCode() );
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateColumn.getTypeCode() );
}
private String jsonCustomWriteExpression(String customWriteExpression, int sqlTypeCode, String typeName) {
private String jsonCustomWriteExpression(
String customWriteExpression,
JdbcMapping jdbcMapping,
SelectableMapping column,
TypeConfiguration typeConfiguration) {
final int sqlTypeCode = jdbcMapping.getJdbcType().getDefaultSqlTypeCode();
switch ( jsonSupport ) {
case OSON:
// return customWriteExpression;
case MERGEPATCH:
switch ( sqlTypeCode ) {
case CLOB:
return "to_clob(" + customWriteExpression + ")";
case BOOLEAN:
if ( typeName.toLowerCase( Locale.ROOT ).trim().startsWith( "number" ) ) {
final String sqlTypeName = AbstractSqlAstTranslator.getSqlTypeName( column, typeConfiguration );
if ( sqlTypeName.toLowerCase( Locale.ROOT ).trim().startsWith( "number" ) ) {
return "decode(" + customWriteExpression + ",1,'true',0,'false',null)";
}
case ARRAY:
final BasicPluralType<?, ?> pluralType = (BasicPluralType<?, ?>) jdbcMapping;
final OracleArrayJdbcType jdbcType = (OracleArrayJdbcType) pluralType.getJdbcType();
switch ( jdbcType.getElementJdbcType().getDefaultSqlTypeCode() ) {
case CLOB:
return "(select json_arrayagg(to_clob(t.column_value)) from table(" + customWriteExpression + ") t)";
case BOOLEAN:
final String elementTypeName = determineElementTypeName( column.toSize(), pluralType, typeConfiguration );
if ( elementTypeName.toLowerCase( Locale.ROOT ).trim().startsWith( "number" ) ) {
return "(select json_arrayagg(decode(t.column_value,1,'true',0,'false',null)) from table(" + customWriteExpression + ") t)";
}
default:
break;
}
default:
return customWriteExpression;
}
@ -219,6 +286,22 @@ public class OracleAggregateSupport extends AggregateSupportImpl {
throw new IllegalStateException( "JSON not supported!" );
}
private static String determineElementTypeName(
Size castTargetSize,
BasicPluralType<?, ?> pluralType,
TypeConfiguration typeConfiguration) {
final DdlTypeRegistry ddlTypeRegistry = typeConfiguration.getDdlTypeRegistry();
final BasicType<?> expressionType = pluralType.getElementType();
DdlType ddlType = ddlTypeRegistry.getDescriptor( expressionType.getJdbcType().getDdlTypeCode() );
if ( ddlType == null ) {
// this may happen when selecting a null value like `SELECT null from ...`
// some dbs need the value to be cast so not knowing the real type we fall back to INTEGER
ddlType = ddlTypeRegistry.getDescriptor( SqlTypes.INTEGER );
}
return ddlType.getTypeName( castTargetSize, expressionType, ddlTypeRegistry );
}
@Override
public boolean requiresAggregateCustomWriteExpressionRenderer(int aggregateSqlTypeCode) {
return aggregateSqlTypeCode == JSON;
@ -242,6 +325,38 @@ public class OracleAggregateSupport extends AggregateSupportImpl {
return checkConstraintSupport;
}
@Override
public List<AuxiliaryDatabaseObject> aggregateAuxiliaryDatabaseObjects(
Namespace namespace,
String aggregatePath,
AggregateColumn aggregateColumn,
List<Column> aggregatedColumns) {
final int typeCode = aggregateColumn.getTypeCode();
if ( typeCode == STRUCT_ARRAY || typeCode == STRUCT_TABLE ) {
final UserDefinedArrayType arrayType = namespace.createUserDefinedArrayType(
Identifier.toIdentifier( aggregateColumn.getSqlType() ),
name -> new UserDefinedArrayType( "orm", namespace, name )
);
final ArrayJdbcType jdbcType = (ArrayJdbcType) ( (BasicType<?>) aggregateColumn.getValue().getType() ).getJdbcType();
final StructJdbcType elementJdbcType = (StructJdbcType) jdbcType.getElementJdbcType();
if ( typeCode == STRUCT_ARRAY ) {
arrayType.setArraySqlTypeCode( ARRAY );
arrayType.setArrayLength( aggregateColumn.getArrayLength() == null ? 127 : aggregateColumn.getArrayLength() );
}
else {
arrayType.setArraySqlTypeCode( TABLE );
}
arrayType.setElementTypeName( elementJdbcType.getStructTypeName() );
arrayType.setElementSqlTypeCode( elementJdbcType.getDefaultSqlTypeCode() );
}
return super.aggregateAuxiliaryDatabaseObjects(
namespace,
aggregatePath,
aggregateColumn,
aggregatedColumns
);
}
private String determineJsonTypeName(SelectableMapping aggregateColumn) {
final String columnDefinition = aggregateColumn.getColumnDefinition();
if ( columnDefinition == null ) {
@ -314,7 +429,6 @@ public class OracleAggregateSupport extends AggregateSupportImpl {
);
currentMappingType = currentAggregate.embeddableMappingType;
}
final int sqlTypeCode = column.getJdbcMapping().getJdbcType().getDefaultSqlTypeCode();
final String customWriteExpression = column.getWriteExpression();
currentAggregate.subExpressions.put(
parts[parts.length - 1].getSelectableName(),
@ -322,34 +436,15 @@ public class OracleAggregateSupport extends AggregateSupportImpl {
column,
aggregateSupport.jsonCustomWriteExpression(
customWriteExpression,
sqlTypeCode,
determineTypeName( column, typeConfiguration )
column.getJdbcMapping(),
column,
typeConfiguration
)
)
);
}
}
private static String determineTypeName(SelectableMapping column, TypeConfiguration typeConfiguration) {
final String typeName;
if ( column.getColumnDefinition() == null ) {
final DdlType ddlType = typeConfiguration.getDdlTypeRegistry().getDescriptor(
column.getJdbcMapping().getJdbcType().getDefaultSqlTypeCode()
);
return ddlType.getCastTypeName(
column.getJdbcMapping().getJdbcType(),
column.getJdbcMapping().getJavaTypeDescriptor(),
column.getLength(),
column.getPrecision(),
column.getScale()
);
}
else{
typeName = column.getColumnDefinition();
}
return typeName;
}
@Override
public void append(
SqlAppender sb,

View File

@ -10,18 +10,33 @@ import java.util.LinkedHashMap;
import java.util.Map;
import org.hibernate.dialect.Dialect;
import org.hibernate.mapping.AggregateColumn;
import org.hibernate.mapping.Column;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.metamodel.mapping.SelectableMapping;
import org.hibernate.metamodel.mapping.SelectablePath;
import org.hibernate.sql.Template;
import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.tool.schema.extract.spi.ColumnTypeInformation;
import org.hibernate.type.BasicPluralType;
import org.hibernate.type.spi.TypeConfiguration;
import static org.hibernate.type.SqlTypes.ARRAY;
import static org.hibernate.type.SqlTypes.BIGINT;
import static org.hibernate.type.SqlTypes.BINARY;
import static org.hibernate.type.SqlTypes.BOOLEAN;
import static org.hibernate.type.SqlTypes.DOUBLE;
import static org.hibernate.type.SqlTypes.FLOAT;
import static org.hibernate.type.SqlTypes.INTEGER;
import static org.hibernate.type.SqlTypes.JSON;
import static org.hibernate.type.SqlTypes.JSON_ARRAY;
import static org.hibernate.type.SqlTypes.LONG32VARBINARY;
import static org.hibernate.type.SqlTypes.SMALLINT;
import static org.hibernate.type.SqlTypes.STRUCT;
import static org.hibernate.type.SqlTypes.STRUCT_ARRAY;
import static org.hibernate.type.SqlTypes.STRUCT_TABLE;
import static org.hibernate.type.SqlTypes.TINYINT;
import static org.hibernate.type.SqlTypes.VARBINARY;
public class PostgreSQLAggregateSupport extends AggregateSupportImpl {
@ -37,16 +52,17 @@ public class PostgreSQLAggregateSupport extends AggregateSupportImpl {
String template,
String placeholder,
String aggregateParentReadExpression,
String column,
ColumnTypeInformation aggregateColumnType,
ColumnTypeInformation columnType) {
switch ( aggregateColumnType.getTypeCode() ) {
String columnExpression,
AggregateColumn aggregateColumn,
Column column) {
switch ( aggregateColumn.getTypeCode() ) {
case JSON_ARRAY:
case JSON:
switch ( columnType.getTypeCode() ) {
switch ( column.getTypeCode() ) {
case JSON:
return template.replace(
placeholder,
aggregateParentReadExpression + "->'" + column + "'"
aggregateParentReadExpression + "->'" + columnExpression + "'"
);
case BINARY:
case VARBINARY:
@ -54,27 +70,72 @@ public class PostgreSQLAggregateSupport extends AggregateSupportImpl {
// We encode binary data as hex, so we have to decode here
return template.replace(
placeholder,
"decode(" + aggregateParentReadExpression + "->>'" + column + "','hex')"
"decode(" + aggregateParentReadExpression + "->>'" + columnExpression + "','hex')"
);
case ARRAY:
final BasicPluralType<?, ?> pluralType = (BasicPluralType<?, ?>) column.getValue().getType();
switch ( pluralType.getElementType().getJdbcType().getDefaultSqlTypeCode() ) {
case BOOLEAN:
case TINYINT:
case SMALLINT:
case INTEGER:
case BIGINT:
case FLOAT:
case DOUBLE:
// For types that are natively supported in jsonb we can use jsonb_array_elements,
// but note that we can't use that for string types,
// because casting a jsonb[] to text[] will not omit the quotes of the jsonb text values
return template.replace(
placeholder,
"cast(array(select jsonb_array_elements(" + aggregateParentReadExpression + "->'" + columnExpression + "')) as " + column.getTypeName() + ')'
);
case BINARY:
case VARBINARY:
case LONG32VARBINARY:
// We encode binary data as hex, so we have to decode here
return template.replace(
placeholder,
"array(select decode(jsonb_array_elements_text(" + aggregateParentReadExpression + "->'" + columnExpression + "'),'hex'))"
);
default:
return template.replace(
placeholder,
"cast(array(select jsonb_array_elements_text(" + aggregateParentReadExpression + "->'" + columnExpression + "')) as " + column.getTypeName() + ')'
);
}
default:
return template.replace(
placeholder,
"cast(" + aggregateParentReadExpression + "->>'" + column + "' as " + columnType.getTypeName() + ')'
"cast(" + aggregateParentReadExpression + "->>'" + columnExpression + "' as " + column.getTypeName() + ')'
);
}
case STRUCT:
return template.replace( placeholder, '(' + aggregateParentReadExpression + ")." + column );
case STRUCT_ARRAY:
case STRUCT_TABLE:
return template.replace( placeholder, '(' + aggregateParentReadExpression + ")." + columnExpression );
}
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateColumnType.getTypeCode() );
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateColumn.getTypeCode() );
}
private static String jsonCustomWriteExpression(String customWriteExpression, int sqlTypeCode) {
private static String jsonCustomWriteExpression(String customWriteExpression, JdbcMapping jdbcMapping) {
final int sqlTypeCode = jdbcMapping.getJdbcType().getDefaultSqlTypeCode();
switch ( sqlTypeCode ) {
case BINARY:
case VARBINARY:
case LONG32VARBINARY:
// We encode binary data as hex
return "to_jsonb(encode(" + customWriteExpression + ",'hex'))";
case ARRAY:
final BasicPluralType<?, ?> pluralType = (BasicPluralType<?, ?>) jdbcMapping;
switch ( pluralType.getElementType().getJdbcType().getDefaultSqlTypeCode() ) {
case BINARY:
case VARBINARY:
case LONG32VARBINARY:
// We encode binary data as hex
return "to_jsonb(array(select encode(unnest(" + customWriteExpression + "),'hex')))";
default:
return "to_jsonb(" + customWriteExpression + ")";
}
default:
return "to_jsonb(" + customWriteExpression + ")";
}
@ -83,17 +144,20 @@ public class PostgreSQLAggregateSupport extends AggregateSupportImpl {
@Override
public String aggregateComponentAssignmentExpression(
String aggregateParentAssignmentExpression,
String column,
ColumnTypeInformation aggregateColumnType,
ColumnTypeInformation columnType) {
switch ( aggregateColumnType.getTypeCode() ) {
String columnExpression,
AggregateColumn aggregateColumn,
Column column) {
switch ( aggregateColumn.getTypeCode() ) {
case JSON:
case JSON_ARRAY:
// For JSON we always have to replace the whole object
return aggregateParentAssignmentExpression;
case STRUCT:
return aggregateParentAssignmentExpression + "." + column;
case STRUCT_ARRAY:
case STRUCT_TABLE:
return aggregateParentAssignmentExpression + "." + columnExpression;
}
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateColumnType.getTypeCode() );
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateColumn.getTypeCode() );
}
@Override
@ -151,13 +215,12 @@ public class PostgreSQLAggregateSupport extends AggregateSupportImpl {
k -> new AggregateJsonWriteExpression()
);
}
final int sqlTypeCode = column.getJdbcMapping().getJdbcType().getDefaultSqlTypeCode();
final String customWriteExpression = column.getWriteExpression();
currentAggregate.subExpressions.put(
parts[parts.length - 1].getSelectableName(),
new BasicJsonWriteExpression(
column,
jsonCustomWriteExpression( customWriteExpression, sqlTypeCode )
jsonCustomWriteExpression( customWriteExpression, column.getJdbcMapping() )
)
);
}

View File

@ -158,8 +158,8 @@ public class InverseDistributionFunction extends AbstractSqmSelfRenderingFunctio
}
@Override
protected ReturnableType<?> resolveResultType(
Supplier<MappingModelExpressible<?>> inferredTypeSupplier,
protected ReturnableType<?> determineResultType(
SqmToSqlAstConverter converter,
TypeConfiguration typeConfiguration) {
return (ReturnableType<?>)
getWithinGroup().getSortSpecifications().get( 0 )

View File

@ -6,49 +6,25 @@
*/
package org.hibernate.dialect.function.array;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.hibernate.engine.jdbc.Size;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
import org.hibernate.metamodel.model.domain.DomainType;
import org.hibernate.query.ReturnableType;
import org.hibernate.query.SemanticException;
import org.hibernate.query.spi.QueryEngine;
import org.hibernate.query.sqm.NodeBuilder;
import org.hibernate.query.sqm.function.AbstractSqmSelfRenderingFunctionDescriptor;
import org.hibernate.query.sqm.function.FunctionKind;
import org.hibernate.query.sqm.function.FunctionRenderer;
import org.hibernate.query.sqm.function.SelfRenderingOrderedSetAggregateFunctionSqlAstExpression;
import org.hibernate.query.sqm.function.SelfRenderingSqmOrderedSetAggregateFunction;
import org.hibernate.query.sqm.produce.function.ArgumentsValidator;
import org.hibernate.query.sqm.produce.function.FunctionReturnTypeResolver;
import org.hibernate.query.sqm.produce.function.StandardArgumentsValidators;
import org.hibernate.query.sqm.produce.function.StandardFunctionArgumentTypeResolvers;
import org.hibernate.query.sqm.sql.SqmToSqlAstConverter;
import org.hibernate.query.sqm.tree.SqmTypedNode;
import org.hibernate.query.sqm.tree.predicate.SqmPredicate;
import org.hibernate.query.sqm.tree.select.SqmOrderByClause;
import org.hibernate.query.sqm.tree.select.SqmSortSpecification;
import org.hibernate.sql.ast.Clause;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.sql.ast.tree.SqlAstNode;
import org.hibernate.sql.ast.tree.cte.CteContainer;
import org.hibernate.sql.ast.tree.cte.SelfRenderingCteObject;
import org.hibernate.sql.ast.tree.expression.Distinct;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.expression.QueryTransformer;
import org.hibernate.sql.ast.tree.predicate.Predicate;
import org.hibernate.sql.ast.tree.select.QuerySpec;
import org.hibernate.sql.ast.tree.select.SortSpecification;
import org.hibernate.type.BasicPluralType;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.sql.DdlType;
import org.hibernate.type.descriptor.sql.spi.DdlTypeRegistry;
import org.hibernate.type.spi.TypeConfiguration;
/**
* @author Christian Beikov
@ -94,8 +70,24 @@ public class OracleArrayAggEmulation extends AbstractSqmSelfRenderingFunctionDes
List<SortSpecification> withinGroup,
ReturnableType<?> returnType,
SqlAstTranslator<?> translator) {
sqlAppender.appendSql( "json_arrayagg" );
sqlAppender.appendSql( '(' );
if ( !( returnType instanceof BasicPluralType<?, ?> ) ) {
throw new SemanticException(
"Oracle array_agg emulation requires a basic plural return type, but resolved return type was: " + returnType
);
}
final BasicPluralType<?, ?> pluralType = (BasicPluralType<?, ?>) returnType;
final boolean returnJson = pluralType.getJdbcType().getDefaultSqlTypeCode() == SqlTypes.JSON;
if ( returnJson ) {
sqlAppender.append( "json_arrayagg(" );
}
else {
final String arrayTypeName = DdlTypeHelper.getTypeName(
returnType,
translator.getSessionFactory().getTypeConfiguration()
);
sqlAppender.append( arrayTypeName );
sqlAppender.append( "_from_json(json_arrayagg(" );
}
final SqlAstNode firstArg = sqlAstArguments.get( 0 );
final Expression arg;
if ( firstArg instanceof Distinct ) {
@ -129,214 +121,8 @@ public class OracleArrayAggEmulation extends AbstractSqmSelfRenderingFunctionDes
sqlAppender.appendSql( ')' );
translator.getCurrentClauseStack().pop();
}
}
@Override
public <T> SelfRenderingSqmOrderedSetAggregateFunction<T> generateSqmOrderedSetAggregateFunctionExpression(
List<? extends SqmTypedNode<?>> arguments,
SqmPredicate filter,
SqmOrderByClause withinGroupClause,
ReturnableType<T> impliedResultType,
QueryEngine queryEngine) {
return new OracleArrayAggSqmFunction<>(
this,
this,
arguments,
filter,
withinGroupClause,
impliedResultType,
getArgumentsValidator(),
getReturnTypeResolver(),
queryEngine.getCriteriaBuilder(),
getName()
);
}
protected static class OracleArrayAggSqmFunction<T> extends SelfRenderingSqmOrderedSetAggregateFunction<T> {
public OracleArrayAggSqmFunction(
OracleArrayAggEmulation descriptor,
FunctionRenderer renderingSupport,
List<? extends SqmTypedNode<?>> arguments,
SqmPredicate filter,
SqmOrderByClause withinGroupClause,
ReturnableType<T> impliedResultType,
ArgumentsValidator argumentsValidator,
FunctionReturnTypeResolver returnTypeResolver,
NodeBuilder nodeBuilder,
String name) {
super(
descriptor,
renderingSupport,
arguments,
filter,
withinGroupClause,
impliedResultType,
argumentsValidator,
returnTypeResolver,
nodeBuilder,
name
);
}
@Override
protected ReturnableType<?> resolveResultType(TypeConfiguration typeConfiguration) {
return getReturnTypeResolver().resolveFunctionReturnType(
getImpliedResultType(),
() -> null,
getArguments(),
nodeBuilder().getTypeConfiguration()
);
}
@Override
public Expression convertToSqlAst(SqmToSqlAstConverter walker) {
final ReturnableType<?> resultType = resolveResultType( walker );
if ( resultType == null ) {
throw new SemanticException(
"Oracle array_agg emulation requires knowledge about the return type, but resolved return type could not be determined"
);
}
final DomainType<?> type = resultType.getSqmType();
if ( !( type instanceof BasicPluralType<?, ?> ) ) {
throw new SemanticException(
"Oracle array_agg emulation requires a basic plural return type, but resolved return type was: " + type
);
}
final BasicPluralType<?, ?> pluralType = (BasicPluralType<?, ?>) type;
if ( pluralType.getJdbcType().getDefaultSqlTypeCode() == SqlTypes.JSON ) {
// If we can return the result as JSON, we don't need further special handling
return super.convertToSqlAst( walker );
}
// If we have to return an array type, then we must apply some further magic to transform the json array
// into an array of the desired array type via a with-clause defined function
final TypeConfiguration typeConfiguration = walker.getCreationContext().getSessionFactory().getTypeConfiguration();
final DdlTypeRegistry ddlTypeRegistry = typeConfiguration.getDdlTypeRegistry();
final DdlType ddlType = ddlTypeRegistry.getDescriptor(
pluralType.getJdbcType().getDdlTypeCode()
);
final String arrayTypeName = ddlType.getCastTypeName( Size.nil(), pluralType, ddlTypeRegistry );
List<SqlAstNode> arguments = resolveSqlAstArguments( getArguments(), walker );
if ( getArgumentsValidator() != null ) {
getArgumentsValidator().validateSqlTypes( arguments, getFunctionName() );
}
List<SortSpecification> withinGroup;
if ( getWithinGroup() == null ) {
withinGroup = Collections.emptyList();
}
else {
walker.getCurrentClauseStack().push( Clause.WITHIN_GROUP );
try {
final List<SqmSortSpecification> sortSpecifications = getWithinGroup().getSortSpecifications();
withinGroup = new ArrayList<>( sortSpecifications.size() );
for ( SqmSortSpecification sortSpecification : sortSpecifications ) {
final SortSpecification specification = (SortSpecification) walker.visitSortSpecification( sortSpecification );
if ( specification != null ) {
withinGroup.add( specification );
}
}
}
finally {
walker.getCurrentClauseStack().pop();
}
}
final OracleArrayAggEmulationSqlAstExpression expression = new OracleArrayAggEmulationSqlAstExpression(
getFunctionName(),
getFunctionRenderer(),
arguments,
getFilter() == null ? null : walker.visitNestedTopLevelPredicate( getFilter() ),
withinGroup,
resultType,
getMappingModelExpressible( walker, resultType, arguments ),
arrayTypeName
);
walker.registerQueryTransformer( expression );
return expression;
}
private static class OracleArrayAggEmulationSqlAstExpression
extends SelfRenderingOrderedSetAggregateFunctionSqlAstExpression
implements QueryTransformer {
private final String arrayTypeName;
private final String functionName;
public OracleArrayAggEmulationSqlAstExpression(
String functionName,
FunctionRenderer renderer,
List<? extends SqlAstNode> sqlAstArguments,
Predicate filter,
List<SortSpecification> withinGroup,
ReturnableType<?> type,
JdbcMappingContainer expressible,
String arrayTypeName) {
super(
functionName,
renderer,
sqlAstArguments,
filter,
withinGroup,
type,
expressible
);
this.arrayTypeName = arrayTypeName;
this.functionName = "json_to_" + arrayTypeName;
}
@Override
public QuerySpec transform(CteContainer cteContainer, QuerySpec querySpec, SqmToSqlAstConverter converter) {
if ( cteContainer.getCteStatement( functionName ) == null ) {
cteContainer.addCteObject(
new SelfRenderingCteObject() {
@Override
public String getName() {
return functionName;
}
@Override
public void render(
SqlAppender sqlAppender,
SqlAstTranslator<?> walker,
SessionFactoryImplementor sessionFactory) {
sqlAppender.appendSql( "function " );
sqlAppender.appendSql( functionName );
sqlAppender.appendSql( "(p_json_array in " );
sqlAppender.appendSql(
sessionFactory.getTypeConfiguration().getDdlTypeRegistry()
.getTypeName(
SqlTypes.JSON,
sessionFactory.getJdbcServices().getDialect()
)
);
sqlAppender.appendSql( ") return " );
sqlAppender.appendSql( arrayTypeName );
sqlAppender.appendSql( " is v_result " );
sqlAppender.appendSql( arrayTypeName );
sqlAppender.appendSql( "; begin select t.value bulk collect into v_result " );
sqlAppender.appendSql( "from json_table(p_json_array,'$[*]' columns (value path '$')) t;" );
sqlAppender.appendSql( "return v_result; end; " );
}
}
);
}
return querySpec;
}
@Override
public void renderToSql(
SqlAppender sqlAppender,
SqlAstTranslator<?> walker,
SessionFactoryImplementor sessionFactory) {
// Oracle doesn't have an array_agg function, so we must use the collect function,
// which requires that we cast the result to the array type.
// On empty results, we require that array_agg returns null,
// but Oracle rather returns an empty collection, so we have to handle that.
// Unfortunately, nullif doesn't work with collection types,
// so we have to render a case when expression instead
sqlAppender.append( functionName );
sqlAppender.append( '(' );
super.renderToSql( sqlAppender, walker, sessionFactory );
sqlAppender.appendSql( ')' );
}
if ( !returnJson ) {
sqlAppender.appendSql( ')' );
}
}

View File

@ -0,0 +1,174 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.mapping;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.boot.model.relational.Namespace;
import org.hibernate.boot.model.relational.QualifiedTableName;
import org.hibernate.boot.model.relational.QualifiedTypeName;
import org.hibernate.boot.model.relational.SqlStringGenerationContext;
import org.hibernate.dialect.Dialect;
public class AbstractUserDefinedType implements UserDefinedType {
private final String contributor;
private Identifier catalog;
private Identifier schema;
private Identifier name;
public AbstractUserDefinedType(
String contributor,
Namespace namespace,
Identifier physicalTypeName) {
this.contributor = contributor;
this.catalog = namespace.getPhysicalName().getCatalog();
this.schema = namespace.getPhysicalName().getSchema();
this.name = physicalTypeName;
}
@Override
public String getContributor() {
return contributor;
}
public String getQualifiedName(SqlStringGenerationContext context) {
return context.format( new QualifiedTypeName( catalog, schema, name ) );
}
public void setName(String name) {
this.name = Identifier.toIdentifier( name );
}
public String getName() {
return name == null ? null : name.getText();
}
public Identifier getNameIdentifier() {
return name;
}
public String getQuotedName() {
return name == null ? null : name.toString();
}
public String getQuotedName(Dialect dialect) {
return name == null ? null : name.render( dialect );
}
public QualifiedTableName getQualifiedTableName() {
return name == null ? null : new QualifiedTableName( catalog, schema, name );
}
public boolean isQuoted() {
return name.isQuoted();
}
public void setQuoted(boolean quoted) {
if ( quoted == name.isQuoted() ) {
return;
}
this.name = new Identifier( name.getText(), quoted );
}
public void setSchema(String schema) {
this.schema = Identifier.toIdentifier( schema );
}
public String getSchema() {
return schema == null ? null : schema.getText();
}
public String getQuotedSchema() {
return schema == null ? null : schema.toString();
}
public String getQuotedSchema(Dialect dialect) {
return schema == null ? null : schema.render( dialect );
}
public boolean isSchemaQuoted() {
return schema != null && schema.isQuoted();
}
public void setCatalog(String catalog) {
this.catalog = Identifier.toIdentifier( catalog );
}
public String getCatalog() {
return catalog == null ? null : catalog.getText();
}
public String getQuotedCatalog() {
return catalog == null ? null : catalog.render();
}
public String getQuotedCatalog(Dialect dialect) {
return catalog == null ? null : catalog.render( dialect );
}
public boolean isCatalogQuoted() {
return catalog != null && catalog.isQuoted();
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + (catalog == null ? 0 : catalog.hashCode());
result = prime * result + (name == null ? 0 : name.hashCode());
result = prime * result + (schema == null ? 0 : schema.hashCode());
return result;
}
@Override
public boolean equals(Object object) {
return object != null && object.getClass() == getClass() && equals( (AbstractUserDefinedType) object);
}
public boolean equals(AbstractUserDefinedType table) {
if ( null == table ) {
return false;
}
else if ( this == table ) {
return true;
}
else {
return Identifier.areEqual( name, table.name )
&& Identifier.areEqual( schema, table.schema )
&& Identifier.areEqual( catalog, table.catalog );
}
}
public String toString() {
final StringBuilder buf = new StringBuilder()
.append( getClass().getSimpleName() )
.append( '(' );
if ( getCatalog() != null ) {
buf.append( getCatalog() ).append( "." );
}
if ( getSchema() != null ) {
buf.append( getSchema() ).append( "." );
}
buf.append( getName() ).append( ')' );
return buf.toString();
}
@Override
public String getExportIdentifier() {
final StringBuilder qualifiedName = new StringBuilder();
if ( catalog != null ) {
qualifiedName.append( catalog.render() ).append( '.' );
}
if ( schema != null ) {
qualifiedName.append( schema.render() ).append( '.' );
}
return qualifiedName.append( name.render() ).toString();
}
}

View File

@ -10,6 +10,10 @@ import org.hibernate.dialect.Dialect;
import org.hibernate.metamodel.mapping.SelectablePath;
import org.hibernate.sql.Template;
import static org.hibernate.type.SqlTypes.JSON_ARRAY;
import static org.hibernate.type.SqlTypes.STRUCT_ARRAY;
import static org.hibernate.type.SqlTypes.STRUCT_TABLE;
/**
* An aggregate column is a column of type {@link org.hibernate.type.SqlTypes#STRUCT},
* {@link org.hibernate.type.SqlTypes#JSON} or {@link org.hibernate.type.SqlTypes#SQLXML}
@ -76,7 +80,7 @@ public class AggregateColumn extends Column {
final String simpleAggregateName = aggregateColumn.getQuotedName( dialect );
final String aggregateSelectableExpression;
if ( parentAggregateColumn == null ) {
aggregateSelectableExpression = Template.TEMPLATE + "." + simpleAggregateName;
aggregateSelectableExpression = getRootAggregateSelectableExpression( aggregateColumn, simpleAggregateName );
}
else {
aggregateSelectableExpression = dialect.getAggregateSupport().aggregateComponentCustomReadExpression(
@ -87,12 +91,24 @@ public class AggregateColumn extends Column {
parentAggregateColumn.getComponent()
),
simpleAggregateName,
parentAggregateColumn, aggregateColumn
parentAggregateColumn,
aggregateColumn
);
}
return aggregateSelectableExpression;
}
private static String getRootAggregateSelectableExpression(AggregateColumn aggregateColumn, String simpleAggregateName) {
switch ( aggregateColumn.getTypeCode() ) {
case JSON_ARRAY:
case STRUCT_ARRAY:
case STRUCT_TABLE:
return Template.TEMPLATE;
default:
return Template.TEMPLATE + "." + simpleAggregateName;
}
}
public String getAggregateAssignmentExpressionTemplate(Dialect dialect) {
return getAggregateAssignmentExpressionTemplate( dialect, component );
}

View File

@ -1192,5 +1192,9 @@ public class BasicValue extends SimpleValue implements JdbcTypeIndicators, Resol
* The resolved MutabilityPlan
*/
MutabilityPlan<J> getMutabilityPlan();
default void updateResolution(BasicType<?> type) {
throw new UnsupportedOperationException();
}
}
}

View File

@ -0,0 +1,59 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.mapping;
import org.hibernate.Incubating;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.boot.model.relational.Namespace;
/**
* A mapping model object representing a named relational database array type.
*/
@Incubating
public class UserDefinedArrayType extends AbstractUserDefinedType {
private Integer arraySqlTypeCode;
private String elementTypeName;
private Integer elementSqlTypeCode;
private Integer arrayLength;
public UserDefinedArrayType(String contributor, Namespace namespace, Identifier physicalTypeName) {
super( contributor, namespace, physicalTypeName );
}
public Integer getArraySqlTypeCode() {
return arraySqlTypeCode;
}
public void setArraySqlTypeCode(Integer arraySqlTypeCode) {
this.arraySqlTypeCode = arraySqlTypeCode;
}
public String getElementTypeName() {
return elementTypeName;
}
public void setElementTypeName(String elementTypeName) {
this.elementTypeName = elementTypeName;
}
public Integer getElementSqlTypeCode() {
return elementSqlTypeCode;
}
public void setElementSqlTypeCode(Integer elementSqlTypeCode) {
this.elementSqlTypeCode = elementSqlTypeCode;
}
public Integer getArrayLength() {
return arrayLength;
}
public void setArrayLength(Integer arrayLength) {
this.arrayLength = arrayLength;
}
}

View File

@ -0,0 +1,118 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.mapping;
import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.hibernate.Incubating;
import org.hibernate.Internal;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.boot.model.relational.Namespace;
/**
* A mapping model object representing a relational database {@linkplain org.hibernate.annotations.Struct UDT}.
*/
@Incubating
public class UserDefinedObjectType extends AbstractUserDefinedType {
private final Map<String, Column> columns = new LinkedHashMap<>();
private int[] orderMapping;
private String comment;
public UserDefinedObjectType(String contributor, Namespace namespace, Identifier physicalTypeName) {
super( contributor, namespace, physicalTypeName );
}
/**
* Return the column which is identified by column provided as argument.
*
* @param column column with at least a name.
* @return the underlying column or null if not inside this table.
* Note: the instance *can* be different than the input parameter,
* but the name will be the same.
*/
public Column getColumn(Column column) {
if ( column == null ) {
return null;
}
else {
final Column existing = columns.get( column.getCanonicalName() );
return column.equals( existing ) ? existing : null;
}
}
public Column getColumn(Identifier name) {
if ( name == null ) {
return null;
}
return columns.get( name.getCanonicalName() );
}
public Column getColumn(int n) {
final Iterator<Column> iter = columns.values().iterator();
for ( int i = 0; i < n - 1; i++ ) {
iter.next();
}
return iter.next();
}
public void addColumn(Column column) {
final Column old = getColumn( column );
if ( old == null ) {
columns.put( column.getCanonicalName(), column );
column.uniqueInteger = columns.size();
}
else {
column.uniqueInteger = old.uniqueInteger;
}
}
public int getColumnSpan() {
return columns.size();
}
public Collection<Column> getColumns() {
return columns.values();
}
public boolean containsColumn(Column column) {
return columns.containsValue( column );
}
public String getComment() {
return comment;
}
public void setComment(String comment) {
this.comment = comment;
}
@Internal
public void reorderColumns(List<Column> columns) {
if ( orderMapping != null ) {
return;
}
orderMapping = new int[columns.size()];
int i = 0;
for ( Column column : this.columns.values() ) {
orderMapping[columns.indexOf( column )] = i++;
}
this.columns.clear();
for ( Column column : columns ) {
this.columns.put( column.getCanonicalName(), column );
}
}
@Internal
public int[] getOrderMapping() {
return orderMapping;
}
}

View File

@ -7,268 +7,48 @@
package org.hibernate.mapping;
import java.io.Serializable;
import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.hibernate.Internal;
import org.hibernate.Incubating;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.boot.model.relational.ContributableDatabaseObject;
import org.hibernate.boot.model.relational.Namespace;
import org.hibernate.boot.model.relational.QualifiedTableName;
import org.hibernate.boot.model.relational.QualifiedTypeName;
import org.hibernate.boot.model.relational.SqlStringGenerationContext;
import org.hibernate.dialect.Dialect;
/**
* A mapping model object representing a relational database {@linkplain org.hibernate.annotations.Struct UDT}.
* A mapping model object which represents a user defined type.
*
* @see UserDefinedObjectType
* @see UserDefinedArrayType
*/
public class UserDefinedType implements Serializable, ContributableDatabaseObject {
@Incubating
public interface UserDefinedType extends Serializable, ContributableDatabaseObject {
private final String contributor;
String getQualifiedName(SqlStringGenerationContext context);
private Identifier catalog;
private Identifier schema;
private Identifier name;
String getName();
private final Map<String, Column> columns = new LinkedHashMap<>();
private int[] orderMapping;
private String comment;
Identifier getNameIdentifier();
public UserDefinedType(
String contributor,
Namespace namespace,
Identifier physicalTypeName) {
this.contributor = contributor;
this.catalog = namespace.getPhysicalName().getCatalog();
this.schema = namespace.getPhysicalName().getSchema();
this.name = physicalTypeName;
}
String getQuotedName();
@Override
public String getContributor() {
return contributor;
}
String getQuotedName(Dialect dialect);
public String getQualifiedName(SqlStringGenerationContext context) {
return context.format( new QualifiedTypeName( catalog, schema, name ) );
}
QualifiedTableName getQualifiedTableName();
boolean isQuoted();
public void setName(String name) {
this.name = Identifier.toIdentifier( name );
}
String getSchema();
public String getName() {
return name == null ? null : name.getText();
}
String getQuotedSchema();
public Identifier getNameIdentifier() {
return name;
}
String getQuotedSchema(Dialect dialect);
public String getQuotedName() {
return name == null ? null : name.toString();
}
boolean isSchemaQuoted();
public String getQuotedName(Dialect dialect) {
return name == null ? null : name.render( dialect );
}
String getCatalog();
public QualifiedTableName getQualifiedTableName() {
return name == null ? null : new QualifiedTableName( catalog, schema, name );
}
String getQuotedCatalog();
public boolean isQuoted() {
return name.isQuoted();
}
public void setQuoted(boolean quoted) {
if ( quoted == name.isQuoted() ) {
return;
}
this.name = new Identifier( name.getText(), quoted );
}
public void setSchema(String schema) {
this.schema = Identifier.toIdentifier( schema );
}
public String getSchema() {
return schema == null ? null : schema.getText();
}
public String getQuotedSchema() {
return schema == null ? null : schema.toString();
}
public String getQuotedSchema(Dialect dialect) {
return schema == null ? null : schema.render( dialect );
}
public boolean isSchemaQuoted() {
return schema != null && schema.isQuoted();
}
public void setCatalog(String catalog) {
this.catalog = Identifier.toIdentifier( catalog );
}
public String getCatalog() {
return catalog == null ? null : catalog.getText();
}
public String getQuotedCatalog() {
return catalog == null ? null : catalog.render();
}
public String getQuotedCatalog(Dialect dialect) {
return catalog == null ? null : catalog.render( dialect );
}
public boolean isCatalogQuoted() {
return catalog != null && catalog.isQuoted();
}
/**
* Return the column which is identified by column provided as argument.
*
* @param column column with at least a name.
* @return the underlying column or null if not inside this table.
* Note: the instance *can* be different than the input parameter,
* but the name will be the same.
*/
public Column getColumn(Column column) {
if ( column == null ) {
return null;
}
else {
final Column existing = columns.get( column.getCanonicalName() );
return column.equals( existing ) ? existing : null;
}
}
public Column getColumn(Identifier name) {
if ( name == null ) {
return null;
}
return columns.get( name.getCanonicalName() );
}
public Column getColumn(int n) {
final Iterator<Column> iter = columns.values().iterator();
for ( int i = 0; i < n - 1; i++ ) {
iter.next();
}
return iter.next();
}
public void addColumn(Column column) {
final Column old = getColumn( column );
if ( old == null ) {
columns.put( column.getCanonicalName(), column );
column.uniqueInteger = columns.size();
}
else {
column.uniqueInteger = old.uniqueInteger;
}
}
public int getColumnSpan() {
return columns.size();
}
public Collection<Column> getColumns() {
return columns.values();
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + (catalog == null ? 0 : catalog.hashCode());
result = prime * result + (name == null ? 0 : name.hashCode());
result = prime * result + (schema == null ? 0 : schema.hashCode());
return result;
}
@Override
public boolean equals(Object object) {
return object instanceof UserDefinedType && equals( (UserDefinedType) object);
}
public boolean equals(UserDefinedType table) {
if ( null == table ) {
return false;
}
else if ( this == table ) {
return true;
}
else {
return Identifier.areEqual( name, table.name )
&& Identifier.areEqual( schema, table.schema )
&& Identifier.areEqual( catalog, table.catalog );
}
}
public boolean containsColumn(Column column) {
return columns.containsValue( column );
}
public String toString() {
final StringBuilder buf = new StringBuilder()
.append( getClass().getSimpleName() )
.append( '(' );
if ( getCatalog() != null ) {
buf.append( getCatalog() ).append( "." );
}
if ( getSchema() != null ) {
buf.append( getSchema() ).append( "." );
}
buf.append( getName() ).append( ')' );
return buf.toString();
}
public String getComment() {
return comment;
}
public void setComment(String comment) {
this.comment = comment;
}
@Override
public String getExportIdentifier() {
final StringBuilder qualifiedName = new StringBuilder();
if ( catalog != null ) {
qualifiedName.append( catalog.render() ).append( '.' );
}
if ( schema != null ) {
qualifiedName.append( schema.render() ).append( '.' );
}
return qualifiedName.append( name.render() ).toString();
}
@Internal
public void reorderColumns(List<Column> columns) {
if ( orderMapping != null ) {
return;
}
orderMapping = new int[columns.size()];
int i = 0;
for ( Column column : this.columns.values() ) {
orderMapping[columns.indexOf( column )] = i++;
}
this.columns.clear();
for ( Column column : columns ) {
this.columns.put( column.getCanonicalName(), column );
}
}
@Internal
public int[] getOrderMapping() {
return orderMapping;
}
boolean isCatalogQuoted();
}

View File

@ -16,6 +16,7 @@ import org.hibernate.PropertyNotFoundException;
import org.hibernate.boot.model.convert.spi.ConverterDescriptor;
import org.hibernate.internal.EntityManagerMessageLogger;
import org.hibernate.internal.HEMLogging;
import org.hibernate.mapping.AggregateColumn;
import org.hibernate.mapping.Any;
import org.hibernate.mapping.Collection;
import org.hibernate.mapping.Component;
@ -56,8 +57,10 @@ import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.property.access.internal.PropertyAccessMapImpl;
import org.hibernate.property.access.spi.Getter;
import org.hibernate.type.AnyType;
import org.hibernate.type.BasicPluralType;
import org.hibernate.type.EntityType;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.java.spi.EmbeddableAggregateJavaType;
import org.hibernate.type.spi.CompositeTypeImplementor;
import jakarta.persistence.ManyToMany;
@ -330,7 +333,16 @@ public class AttributeFactory {
return context.resolveBasicType( type );
}
else {
return (DomainType<Y>) hibernateValue.getType();
final org.hibernate.type.Type type = hibernateValue.getType();
if ( type instanceof BasicPluralType<?, ?> ) {
final JavaType<?> javaTypeDescriptor = ( (BasicPluralType<?, ?>) type ).getElementType()
.getJavaTypeDescriptor();
if ( javaTypeDescriptor instanceof EmbeddableAggregateJavaType<?> ) {
final AggregateColumn aggregateColumn = (AggregateColumn) hibernateValue.getColumns().get( 0 );
classEmbeddableType( context, aggregateColumn.getComponent() );
}
}
return (DomainType<Y>) type;
}
}

View File

@ -297,6 +297,7 @@ public abstract class AbstractEmbeddableMapping implements EmbeddableMappingType
nullable = column.isNullable();
isLob = column.isSqlTypeLob( creationProcess.getCreationContext().getMetadata() );
selectablePath = basicValue.createSelectablePath( column.getQuotedName( dialect ) );
MappingModelCreationHelper.resolveAggregateColumnBasicType( creationProcess, role, column );
}
else {
columnDefinition = null;

View File

@ -60,12 +60,24 @@ import org.hibernate.type.CompositeType;
import org.hibernate.type.EntityType;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.Type;
import org.hibernate.type.descriptor.java.BasicPluralJavaType;
import org.hibernate.type.descriptor.java.ImmutableMutabilityPlan;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.java.MutabilityPlan;
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
import org.hibernate.type.descriptor.jdbc.JdbcTypeConstructor;
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
import org.hibernate.type.spi.CompositeTypeImplementor;
import org.hibernate.type.spi.TypeConfiguration;
import static org.hibernate.type.SqlTypes.JSON;
import static org.hibernate.type.SqlTypes.JSON_ARRAY;
import static org.hibernate.type.SqlTypes.SQLXML;
import static org.hibernate.type.SqlTypes.STRUCT;
import static org.hibernate.type.SqlTypes.STRUCT_ARRAY;
import static org.hibernate.type.SqlTypes.STRUCT_TABLE;
import static org.hibernate.type.SqlTypes.XML_ARRAY;
/**
* Describes a "normal" embeddable.
*
@ -225,23 +237,73 @@ public class EmbeddableMappingTypeImpl extends AbstractEmbeddableMapping impleme
final TypeConfiguration typeConfiguration = creationContext.getTypeConfiguration();
final BasicTypeRegistry basicTypeRegistry = typeConfiguration.getBasicTypeRegistry();
final Column aggregateColumn = bootDescriptor.getAggregateColumn();
Integer aggregateSqlTypeCode = aggregateColumn.getSqlTypeCode();
boolean isArray = false;
String structTypeName = null;
switch ( aggregateSqlTypeCode ) {
case STRUCT:
structTypeName = aggregateColumn.getSqlType( creationContext.getMetadata() );
break;
case STRUCT_ARRAY:
case STRUCT_TABLE:
isArray = true;
aggregateSqlTypeCode = STRUCT;
structTypeName = bootDescriptor.getStructName();
if ( structTypeName == null ) {
final String arrayTypeName = aggregateColumn.getSqlType( creationContext.getMetadata() );
if ( arrayTypeName.endsWith( " array" ) ) {
structTypeName = arrayTypeName.substring( 0, arrayTypeName.length() - " array".length() );
}
}
break;
case JSON_ARRAY:
isArray = true;
aggregateSqlTypeCode = JSON;
break;
case XML_ARRAY:
isArray = true;
aggregateSqlTypeCode = SQLXML;
break;
}
final JdbcTypeRegistry jdbcTypeRegistry = typeConfiguration.getJdbcTypeRegistry();
final AggregateJdbcType aggregateJdbcType = jdbcTypeRegistry.resolveAggregateDescriptor(
aggregateSqlTypeCode,
structTypeName,
this,
creationContext
);
final BasicType<?> basicType = basicTypeRegistry.resolve(
getMappedJavaType(),
typeConfiguration.getJdbcTypeRegistry().resolveAggregateDescriptor(
aggregateColumn.getSqlTypeCode(),
aggregateColumn.getSqlTypeCode() == SqlTypes.STRUCT
? aggregateColumn.getSqlType( creationContext.getMetadata() )
: null,
this,
creationContext
)
aggregateJdbcType
);
// Register the resolved type under its struct name and java class name
if ( bootDescriptor.getStructName() != null ) {
basicTypeRegistry.register( basicType, bootDescriptor.getStructName() );
basicTypeRegistry.register( basicType, getMappedJavaType().getJavaTypeClass().getName() );
}
return basicType;
final BasicValue basicValue = (BasicValue) aggregateColumn.getValue();
final BasicType<?> resolvedJdbcMapping;
if ( isArray ) {
final JdbcTypeConstructor arrayConstructor = jdbcTypeRegistry.getConstructor( SqlTypes.ARRAY );
if ( arrayConstructor == null ) {
throw new IllegalArgumentException( "No JdbcTypeConstructor registered for SqlTypes.ARRAY" );
}
//noinspection rawtypes,unchecked
final BasicType<?> arrayType = ( (BasicPluralJavaType) basicValue.getResolution().getDomainJavaType() ).resolveType(
typeConfiguration,
creationContext.getDialect(),
basicType,
aggregateColumn,
typeConfiguration.getCurrentBaseSqlTypeIndicators()
);
basicTypeRegistry.register( arrayType );
resolvedJdbcMapping = arrayType;
}
else {
resolvedJdbcMapping = basicType;
}
basicValue.getResolution().updateResolution( resolvedJdbcMapping );
return resolvedJdbcMapping;
}
public EmbeddableMappingTypeImpl(
@ -376,6 +438,7 @@ public class EmbeddableMappingTypeImpl extends AbstractEmbeddableMapping impleme
containingTableExpression = rootTableExpression;
columnExpression = rootTableKeyColumnNames[columnPosition];
}
final NavigableRole role = valueMapping.getNavigableRole().append( bootPropertyDescriptor.getName() );
final SelectablePath selectablePath;
final String columnDefinition;
final Long length;
@ -394,6 +457,7 @@ public class EmbeddableMappingTypeImpl extends AbstractEmbeddableMapping impleme
isLob = column.isSqlTypeLob( creationProcess.getCreationContext().getMetadata() );
nullable = bootPropertyDescriptor.isOptional() && column.isNullable() ;
selectablePath = basicValue.createSelectablePath( column.getQuotedName( dialect ) );
MappingModelCreationHelper.resolveAggregateColumnBasicType( creationProcess, role, column );
}
else {
columnDefinition = null;
@ -407,18 +471,18 @@ public class EmbeddableMappingTypeImpl extends AbstractEmbeddableMapping impleme
}
attributeMapping = MappingModelCreationHelper.buildBasicAttributeMapping(
bootPropertyDescriptor.getName(),
valueMapping.getNavigableRole().append( bootPropertyDescriptor.getName() ),
role,
attributeIndex,
attributeIndex,
bootPropertyDescriptor,
this,
(BasicType<?>) subtype,
basicValue.getResolution().getLegacyResolvedBasicType(),
containingTableExpression,
columnExpression,
selectablePath,
selectable.isFormula(),
selectable.getCustomReadExpression(),
selectable.getWriteExpr( ( (BasicType<?>) subtype ).getJdbcMapping(), dialect ),
selectable.getWriteExpr( basicValue.getResolution().getJdbcMapping(), dialect ),
columnDefinition,
length,
precision,

View File

@ -12,6 +12,8 @@ import java.util.Iterator;
import java.util.List;
import java.util.SortedMap;
import java.util.SortedSet;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
import java.util.function.Function;
import org.hibernate.FetchMode;
@ -28,9 +30,11 @@ import org.hibernate.engine.FetchTiming;
import org.hibernate.engine.spi.CascadeStyle;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.mapping.AggregateColumn;
import org.hibernate.mapping.Any;
import org.hibernate.mapping.BasicValue;
import org.hibernate.mapping.Collection;
import org.hibernate.mapping.Column;
import org.hibernate.mapping.Component;
import org.hibernate.mapping.DependantValue;
import org.hibernate.mapping.IndexedCollection;
@ -63,6 +67,7 @@ import org.hibernate.metamodel.mapping.EntityMappingType;
import org.hibernate.metamodel.mapping.ForeignKeyDescriptor;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.metamodel.mapping.ManagedMappingType;
import org.hibernate.metamodel.mapping.MappingType;
import org.hibernate.metamodel.mapping.ModelPart;
import org.hibernate.metamodel.mapping.ModelPartContainer;
import org.hibernate.metamodel.mapping.PluralAttributeMapping;
@ -81,13 +86,27 @@ import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.persister.entity.Joinable;
import org.hibernate.property.access.internal.ChainedPropertyAccessImpl;
import org.hibernate.property.access.spi.PropertyAccess;
import org.hibernate.query.sqm.sql.SqmToSqlAstConverter;
import org.hibernate.spi.NavigablePath;
import org.hibernate.sql.ast.Clause;
import org.hibernate.sql.ast.SqlAstJoinType;
import org.hibernate.sql.ast.spi.SqlAliasBase;
import org.hibernate.sql.ast.spi.SqlAliasStemHelper;
import org.hibernate.sql.ast.spi.SqlAstCreationState;
import org.hibernate.sql.ast.spi.SqlExpressionResolver;
import org.hibernate.sql.ast.spi.SqlSelection;
import org.hibernate.sql.ast.tree.expression.ColumnReference;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.expression.SqlTuple;
import org.hibernate.sql.ast.tree.from.TableGroup;
import org.hibernate.sql.ast.tree.from.TableGroupJoin;
import org.hibernate.sql.ast.tree.from.TableGroupProducer;
import org.hibernate.sql.ast.tree.predicate.Predicate;
import org.hibernate.sql.results.graph.DomainResult;
import org.hibernate.sql.results.graph.DomainResultCreationState;
import org.hibernate.sql.results.graph.Fetch;
import org.hibernate.sql.results.graph.FetchOptions;
import org.hibernate.sql.results.graph.FetchParent;
import org.hibernate.type.AssociationType;
import org.hibernate.type.BasicType;
import org.hibernate.type.ComponentType;
@ -1617,6 +1636,159 @@ public class MappingModelCreationHelper {
}
}
public static BasicType<?> resolveAggregateColumnBasicType(
MappingModelCreationProcess creationProcess,
NavigableRole navigableRole,
Column column) {
if ( column instanceof AggregateColumn ) {
final Component component = ( (AggregateColumn) column ).getComponent();
final CompositeType compositeType = component.getType();
final NavigableRole embeddableRole = navigableRole.append( CollectionPart.Nature.ELEMENT.getName() );
final EmbeddableMappingTypeImpl mappingType = EmbeddableMappingTypeImpl.from(
component,
compositeType,
component.getColumnInsertability(),
component.getColumnUpdateability(),
inflightDescriptor -> new EmbeddableValuedModelPart() {
@Override
public EmbeddableMappingType getEmbeddableTypeDescriptor() {
return inflightDescriptor;
}
@Override
public SqlTuple toSqlExpression(
TableGroup tableGroup,
Clause clause,
SqmToSqlAstConverter walker,
SqlAstCreationState sqlAstCreationState) {
return null;
}
@Override
public String getContainingTableExpression() {
return "";
}
@Override
public SqlAstJoinType getDefaultSqlAstJoinType(TableGroup parentTableGroup) {
return null;
}
@Override
public boolean isSimpleJoinPredicate(Predicate predicate) {
return predicate == null;
}
@Override
public TableGroupJoin createTableGroupJoin(
NavigablePath navigablePath,
TableGroup lhs,
String explicitSourceAlias,
SqlAliasBase explicitSqlAliasBase,
SqlAstJoinType sqlAstJoinType,
boolean fetched,
boolean addsPredicate,
SqlAstCreationState creationState) {
return null;
}
@Override
public TableGroup createRootTableGroupJoin(
NavigablePath navigablePath,
TableGroup lhs,
String explicitSourceAlias,
SqlAliasBase explicitSqlAliasBase,
SqlAstJoinType sqlAstJoinType,
boolean fetched,
Consumer<Predicate> predicateConsumer,
SqlAstCreationState creationState) {
return null;
}
@Override
public String getSqlAliasStem() {
return "";
}
@Override
public String getFetchableName() {
return CollectionPart.Nature.ELEMENT.getName();
}
@Override
public int getFetchableKey() {
return 0;
}
@Override
public FetchOptions getMappedFetchOptions() {
return null;
}
@Override
public Fetch generateFetch(
FetchParent fetchParent,
NavigablePath fetchablePath,
FetchTiming fetchTiming,
boolean selected,
String resultVariable,
DomainResultCreationState creationState) {
return null;
}
@Override
public NavigableRole getNavigableRole() {
return embeddableRole;
}
@Override
public String getPartName() {
return CollectionPart.Nature.ELEMENT.getName();
}
@Override
public MappingType getPartMappingType() {
return inflightDescriptor;
}
@Override
public <T> DomainResult<T> createDomainResult(
NavigablePath navigablePath,
TableGroup tableGroup,
String resultVariable,
DomainResultCreationState creationState) {
return null;
}
@Override
public void applySqlSelections(
NavigablePath navigablePath,
TableGroup tableGroup,
DomainResultCreationState creationState) {
}
@Override
public void applySqlSelections(
NavigablePath navigablePath,
TableGroup tableGroup,
DomainResultCreationState creationState,
BiConsumer<SqlSelection, JdbcMapping> selectionConsumer) {
}
@Override
public EntityMappingType findContainingEntityMapping() {
return null;
}
},
creationProcess
);
return (BasicType<?>) mappingType.getAggregateMapping().getJdbcMapping();
}
return null;
}
private static class CollectionMappingTypeImpl implements CollectionMappingType {
private final JavaType<?> collectionJtd;
private final CollectionSemantics<?,?> semantics;

View File

@ -5705,6 +5705,7 @@ public abstract class AbstractEntityPersister
scale = column.getScale();
nullable = column.isNullable();
isLob = column.isSqlTypeLob( creationContext.getMetadata() );
MappingModelCreationHelper.resolveAggregateColumnBasicType( creationProcess, role, column );
}
else {
final String[] attrColumnFormulaTemplate = propertyColumnFormulaTemplates[ propertyIndex ];

View File

@ -158,7 +158,7 @@ public class PostgreSQLCallableStatementSupport extends AbstractStandardCallable
final String castType;
if ( type != null && type.getJdbcType() instanceof AbstractPostgreSQLStructJdbcType ) {
// We have to cast struct type parameters so that PostgreSQL understands nulls
castType = ( (AbstractPostgreSQLStructJdbcType) type.getJdbcType() ).getTypeName();
castType = ( (AbstractPostgreSQLStructJdbcType) type.getJdbcType() ).getStructTypeName();
buffer.append( "cast(" );
}
else {

View File

@ -27,18 +27,20 @@ import org.hibernate.sql.ast.tree.SqlAstNode;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.type.spi.TypeConfiguration;
import org.checkerframework.checker.nullness.qual.Nullable;
import static java.util.Collections.emptyList;
/**
* @author Steve Ebersole
*/
public class SelfRenderingSqmFunction<T> extends SqmFunction<T> {
private final ReturnableType<T> impliedResultType;
private final ArgumentsValidator argumentsValidator;
private final @Nullable ReturnableType<T> impliedResultType;
private final @Nullable ArgumentsValidator argumentsValidator;
private final FunctionReturnTypeResolver returnTypeResolver;
private final FunctionRenderingSupport renderingSupport;
private final FunctionRenderer renderer;
private ReturnableType<?> resultType;
private @Nullable ReturnableType<?> resultType;
/**
* @deprecated Use {@link #SelfRenderingSqmFunction(SqmFunctionDescriptor, FunctionRenderer, List, ReturnableType, ArgumentsValidator, FunctionReturnTypeResolver, NodeBuilder, String)} instead
@ -48,8 +50,8 @@ public class SelfRenderingSqmFunction<T> extends SqmFunction<T> {
SqmFunctionDescriptor descriptor,
FunctionRenderingSupport renderingSupport,
List<? extends SqmTypedNode<?>> arguments,
ReturnableType<T> impliedResultType,
ArgumentsValidator argumentsValidator,
@Nullable ReturnableType<T> impliedResultType,
@Nullable ArgumentsValidator argumentsValidator,
FunctionReturnTypeResolver returnTypeResolver,
NodeBuilder nodeBuilder,
String name) {
@ -65,8 +67,8 @@ public class SelfRenderingSqmFunction<T> extends SqmFunction<T> {
SqmFunctionDescriptor descriptor,
FunctionRenderer renderer,
List<? extends SqmTypedNode<?>> arguments,
ReturnableType<T> impliedResultType,
ArgumentsValidator argumentsValidator,
@Nullable ReturnableType<T> impliedResultType,
@Nullable ArgumentsValidator argumentsValidator,
FunctionReturnTypeResolver returnTypeResolver,
NodeBuilder nodeBuilder,
String name) {
@ -117,11 +119,11 @@ public class SelfRenderingSqmFunction<T> extends SqmFunction<T> {
return renderer;
}
protected ReturnableType<T> getImpliedResultType() {
protected @Nullable ReturnableType<T> getImpliedResultType() {
return impliedResultType;
}
protected ArgumentsValidator getArgumentsValidator() {
protected @Nullable ArgumentsValidator getArgumentsValidator() {
return argumentsValidator;
}
@ -130,7 +132,7 @@ public class SelfRenderingSqmFunction<T> extends SqmFunction<T> {
}
protected List<SqlAstNode> resolveSqlAstArguments(List<? extends SqmTypedNode<?>> sqmArguments, SqmToSqlAstConverter walker) {
if ( sqmArguments == null || sqmArguments.isEmpty() ) {
if ( sqmArguments.isEmpty() ) {
return emptyList();
}
@ -167,11 +169,11 @@ public class SelfRenderingSqmFunction<T> extends SqmFunction<T> {
@Override
public Expression convertToSqlAst(SqmToSqlAstConverter walker) {
final ReturnableType<?> resultType = resolveResultType( walker );
List<SqlAstNode> arguments = resolveSqlAstArguments( getArguments(), walker );
if ( argumentsValidator != null ) {
argumentsValidator.validateSqlTypes( arguments, getFunctionName() );
final @Nullable ReturnableType<?> resultType = resolveResultType( walker );
final List<SqlAstNode> arguments = resolveSqlAstArguments( getArguments(), walker );
final ArgumentsValidator validator = argumentsValidator;
if ( validator != null ) {
validator.validateSqlTypes( arguments, getFunctionName() );
}
return new SelfRenderingFunctionSqlAstExpression(
getFunctionName(),
@ -182,30 +184,46 @@ public class SelfRenderingSqmFunction<T> extends SqmFunction<T> {
);
}
public SqmExpressible<T> getNodeType() {
public @Nullable SqmExpressible<T> getNodeType() {
SqmExpressible<T> nodeType = super.getNodeType();
if ( nodeType == null ) {
nodeType = (SqmExpressible<T>) resolveResultType( nodeBuilder().getTypeConfiguration() );
//noinspection unchecked
nodeType = (SqmExpressible<T>) determineResultType( null, nodeBuilder().getTypeConfiguration() );
setExpressibleType( nodeType );
}
return nodeType;
}
protected ReturnableType<?> resolveResultType(TypeConfiguration typeConfiguration) {
@Deprecated(forRemoval = true)
protected @Nullable ReturnableType<?> resolveResultType(TypeConfiguration typeConfiguration) {
return resolveResultType( () -> null, typeConfiguration );
}
protected ReturnableType<?> resolveResultType(SqmToSqlAstConverter walker) {
public @Nullable ReturnableType<?> resolveResultType(SqmToSqlAstConverter walker) {
if ( resultType == null ) {
return resolveResultType(
walker::resolveFunctionImpliedReturnType,
resultType = determineResultType(
walker,
walker.getCreationContext().getMappingMetamodel().getTypeConfiguration()
);
setExpressibleType( resultType );
}
return resultType;
}
protected ReturnableType<?> resolveResultType(
protected @Nullable ReturnableType<?> determineResultType(
SqmToSqlAstConverter converter,
TypeConfiguration typeConfiguration) {
return returnTypeResolver.resolveFunctionReturnType(
impliedResultType,
converter,
getArguments(),
typeConfiguration
);
}
@Deprecated(forRemoval = true)
protected @Nullable ReturnableType<?> resolveResultType(
Supplier<MappingModelExpressible<?>> inferredTypeSupplier,
TypeConfiguration typeConfiguration) {
if ( resultType == null ) {
@ -224,7 +242,7 @@ public class SelfRenderingSqmFunction<T> extends SqmFunction<T> {
SqmToSqlAstConverter walker,
ReturnableType<?> resultType,
List<SqlAstNode> arguments) {
MappingModelExpressible<?> mapping;
final MappingModelExpressible<?> mapping;
if ( resultType instanceof MappingModelExpressible ) {
// here we have a BasicType, which can be cast
// directly to BasicValuedMapping

View File

@ -5894,6 +5894,13 @@ public abstract class BaseSqmToSqlAstConverter<T extends Statement> extends Base
}
}
if ( sqmExpression instanceof SelfRenderingSqmFunction<?> ) {
return domainModel.resolveMappingExpressible(
( (SelfRenderingSqmFunction<?>) sqmExpression ).resolveResultType( this ),
this::findTableGroupByPath
);
}
log.debugf( "Determining mapping-model type for generalized SqmExpression : %s", sqmExpression );
final SqmExpressible<?> nodeType = sqmExpression.getNodeType();
if ( nodeType == null ) {

View File

@ -29,6 +29,7 @@ import org.hibernate.sql.ast.tree.expression.Format;
import org.hibernate.sql.ast.tree.expression.JdbcLiteral;
import org.hibernate.sql.ast.tree.expression.JdbcParameter;
import org.hibernate.sql.ast.tree.expression.ModifiedSubQueryExpression;
import org.hibernate.sql.ast.tree.expression.NestedColumnReference;
import org.hibernate.sql.ast.tree.expression.Over;
import org.hibernate.sql.ast.tree.expression.Overflow;
import org.hibernate.sql.ast.tree.expression.QueryLiteral;
@ -128,6 +129,8 @@ public interface SqlAstWalker {
void visitColumnReference(ColumnReference columnReference);
void visitNestedColumnReference(NestedColumnReference nestedColumnReference);
void visitAggregateColumnWriteExpression(AggregateColumnWriteExpression aggregateColumnWriteExpression);
void visitExtractUnit(ExtractUnit extractUnit);

View File

@ -83,6 +83,7 @@ import org.hibernate.query.sqm.sql.internal.SqmParameterInterpretation;
import org.hibernate.query.sqm.sql.internal.SqmPathInterpretation;
import org.hibernate.query.sqm.tree.expression.Conversion;
import org.hibernate.spi.NavigablePath;
import org.hibernate.sql.Template;
import org.hibernate.sql.ast.Clause;
import org.hibernate.sql.ast.SqlAstJoinType;
import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
@ -124,6 +125,7 @@ import org.hibernate.sql.ast.tree.expression.JdbcParameter;
import org.hibernate.sql.ast.tree.expression.Literal;
import org.hibernate.sql.ast.tree.expression.LiteralAsParameter;
import org.hibernate.sql.ast.tree.expression.ModifiedSubQueryExpression;
import org.hibernate.sql.ast.tree.expression.NestedColumnReference;
import org.hibernate.sql.ast.tree.expression.OrderedSetAggregateFunctionExpression;
import org.hibernate.sql.ast.tree.expression.Over;
import org.hibernate.sql.ast.tree.expression.Overflow;
@ -6881,6 +6883,19 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
}
}
@Override
public void visitNestedColumnReference(NestedColumnReference nestedColumnReference) {
final String readExpression = nestedColumnReference.getReadExpression();
int start = 0;
int idx;
while ( ( idx = readExpression.indexOf( Template.TEMPLATE, start ) ) != -1 ) {
append( readExpression, start, idx );
nestedColumnReference.getBaseExpression().accept( this );
start = idx + Template.TEMPLATE.length();
}
append( readExpression, start, readExpression.length() );
}
@Override
public void visitAggregateColumnWriteExpression(AggregateColumnWriteExpression aggregateColumnWriteExpression) {
aggregateColumnWriteExpression.appendWriteExpression(

View File

@ -34,6 +34,7 @@ import org.hibernate.sql.ast.tree.expression.FunctionExpression;
import org.hibernate.sql.ast.tree.expression.JdbcLiteral;
import org.hibernate.sql.ast.tree.expression.JdbcParameter;
import org.hibernate.sql.ast.tree.expression.ModifiedSubQueryExpression;
import org.hibernate.sql.ast.tree.expression.NestedColumnReference;
import org.hibernate.sql.ast.tree.expression.OrderedSetAggregateFunctionExpression;
import org.hibernate.sql.ast.tree.expression.Over;
import org.hibernate.sql.ast.tree.expression.Overflow;
@ -464,6 +465,10 @@ public class AbstractSqlAstWalker implements SqlAstWalker {
public void visitColumnReference(ColumnReference columnReference) {
}
@Override
public void visitNestedColumnReference(NestedColumnReference nestedColumnReference) {
}
@Override
public void visitAggregateColumnWriteExpression(AggregateColumnWriteExpression aggregateColumnWriteExpression) {
}

View File

@ -34,6 +34,7 @@ import org.hibernate.sql.ast.tree.expression.Format;
import org.hibernate.sql.ast.tree.expression.JdbcLiteral;
import org.hibernate.sql.ast.tree.expression.JdbcParameter;
import org.hibernate.sql.ast.tree.expression.ModifiedSubQueryExpression;
import org.hibernate.sql.ast.tree.expression.NestedColumnReference;
import org.hibernate.sql.ast.tree.expression.Over;
import org.hibernate.sql.ast.tree.expression.Overflow;
import org.hibernate.sql.ast.tree.expression.QueryLiteral;
@ -119,6 +120,11 @@ public class ExpressionReplacementWalker implements SqlAstWalker {
doReplaceExpression( columnReference );
}
@Override
public void visitNestedColumnReference(NestedColumnReference nestedColumnReference) {
doReplaceExpression( nestedColumnReference );
}
@Override
public void visitAggregateColumnWriteExpression(AggregateColumnWriteExpression aggregateColumnWriteExpression) {
doReplaceExpression( aggregateColumnWriteExpression );

View File

@ -13,6 +13,8 @@ import org.hibernate.metamodel.mapping.SelectableMapping;
import org.hibernate.metamodel.mapping.SelectablePath;
import org.hibernate.sql.ast.tree.expression.ColumnReference;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.expression.NestedColumnReference;
import org.hibernate.sql.ast.tree.from.EmbeddableFunctionTableReference;
import org.hibernate.sql.ast.tree.from.TableReference;
import org.hibernate.sql.results.graph.FetchParent;
import org.hibernate.type.NullType;
@ -94,10 +96,12 @@ public interface SqlExpressionResolver {
default Expression resolveSqlExpression(TableReference tableReference, SelectableMapping selectableMapping) {
return resolveSqlExpression(
createColumnReferenceKey( tableReference, selectableMapping ),
processingState -> new ColumnReference(
tableReference,
selectableMapping
)
processingState -> tableReference.isEmbeddableFunctionTableReference()
? new NestedColumnReference(
tableReference.asEmbeddableFunctionTableReference(),
selectableMapping
)
: new ColumnReference( tableReference, selectableMapping )
);
}

View File

@ -150,6 +150,10 @@ public class ColumnReference implements Expression, Assignable {
return columnExpression;
}
protected String getReadExpression() {
return readExpression;
}
public String getSelectableName() {
return selectablePath.getSelectableName();
}

View File

@ -0,0 +1,38 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.sql.ast.tree.expression;
import org.hibernate.metamodel.mapping.SelectableMapping;
import org.hibernate.sql.ast.SqlAstWalker;
import org.hibernate.sql.ast.tree.from.EmbeddableFunctionTableReference;
/**
* Model a column which is relative to a base expression e.g. {@code array[1].columnName}.
* This is needed to model column references within e.g. arrays.
*/
public class NestedColumnReference extends ColumnReference {
private final Expression baseExpression;
public NestedColumnReference(EmbeddableFunctionTableReference tableReference, SelectableMapping selectableMapping) {
super( tableReference, selectableMapping );
this.baseExpression = tableReference.getExpression();
}
public Expression getBaseExpression() {
return baseExpression;
}
@Override
public String getReadExpression() {
return super.getReadExpression();
}
@Override
public void accept(SqlAstWalker sqlTreeWalker) {
sqlTreeWalker.visitNestedColumnReference( this );
}
}

View File

@ -40,6 +40,16 @@ public class EmbeddableFunctionTableReference extends AbstractTableReference {
return expression;
}
@Override
public boolean isEmbeddableFunctionTableReference() {
return true;
}
@Override
public EmbeddableFunctionTableReference asEmbeddableFunctionTableReference() {
return this;
}
@Override
public void accept(SqlAstWalker sqlTreeWalker) {
expression.accept( sqlTreeWalker );

View File

@ -15,6 +15,8 @@ import org.hibernate.spi.NavigablePath;
import org.hibernate.sql.ast.SqlAstWalker;
import org.hibernate.sql.ast.tree.SqlAstNode;
import org.checkerframework.checker.nullness.qual.Nullable;
import static org.hibernate.internal.util.StringHelper.isEmpty;
/**
@ -72,4 +74,12 @@ public interface TableReference extends SqlAstNode, ColumnReferenceQualifier {
NavigablePath navigablePath,
String tableExpression,
boolean resolve);
default boolean isEmbeddableFunctionTableReference() {
return false;
}
default @Nullable EmbeddableFunctionTableReference asEmbeddableFunctionTableReference() {
return null;
}
}

View File

@ -20,6 +20,7 @@ import org.hibernate.boot.model.relational.SqlStringGenerationContext;
import org.hibernate.dialect.Dialect;
import org.hibernate.dialect.aggregate.AggregateSupport;
import org.hibernate.mapping.AggregateColumn;
import org.hibernate.mapping.BasicValue;
import org.hibernate.mapping.CheckConstraint;
import org.hibernate.mapping.Column;
import org.hibernate.mapping.Component;
@ -28,6 +29,7 @@ import org.hibernate.mapping.Table;
import org.hibernate.mapping.Value;
import org.hibernate.sql.Template;
import org.hibernate.tool.schema.spi.Exporter;
import org.hibernate.type.SqlTypes;
import static java.util.Collections.addAll;
import static org.hibernate.internal.util.StringHelper.EMPTY_STRINGS;
@ -178,13 +180,28 @@ public class StandardTableExporter implements Exporter<Table> {
for ( Column column : table.getColumns() ) {
if ( column instanceof AggregateColumn ) {
final AggregateColumn aggregateColumn = (AggregateColumn) column;
applyAggregateColumnCheck( buf, aggregateColumn );
if ( !isArray( aggregateColumn ) ) {
applyAggregateColumnCheck( buf, aggregateColumn );
}
}
}
}
}
}
private boolean isArray(AggregateColumn aggregateColumn) {
final BasicValue value = (BasicValue) aggregateColumn.getValue();
switch ( value.getResolution().getJdbcType().getDefaultSqlTypeCode() ) {
case SqlTypes.STRUCT_ARRAY:
case SqlTypes.STRUCT_TABLE:
case SqlTypes.JSON_ARRAY:
case SqlTypes.XML_ARRAY:
case SqlTypes.ARRAY:
return true;
}
return false;
}
private void applyAggregateColumnCheck(StringBuilder buf, AggregateColumn aggregateColumn) {
final AggregateSupport aggregateSupport = dialect.getAggregateSupport();
final int checkStart = buf.length();
@ -219,7 +236,7 @@ public class StandardTableExporter implements Exporter<Table> {
if ( value instanceof Component ) {
final Component component = (Component) value;
final AggregateColumn subAggregateColumn = component.getAggregateColumn();
if ( subAggregateColumn != null ) {
if ( subAggregateColumn != null && !isArray( subAggregateColumn ) ) {
final String subAggregatePath = subAggregateColumn.getAggregateReadExpressionTemplate( dialect )
.replace( Template.TEMPLATE + ".", "" );
final int checkStart = buf.length();

View File

@ -18,6 +18,8 @@ import org.hibernate.boot.model.relational.SqlStringGenerationContext;
import org.hibernate.dialect.Dialect;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.mapping.Column;
import org.hibernate.mapping.UserDefinedArrayType;
import org.hibernate.mapping.UserDefinedObjectType;
import org.hibernate.mapping.UserDefinedType;
import org.hibernate.tool.schema.spi.Exporter;
@ -36,6 +38,21 @@ public class StandardUserDefinedTypeExporter implements Exporter<UserDefinedType
UserDefinedType userDefinedType,
Metadata metadata,
SqlStringGenerationContext context) {
if ( userDefinedType instanceof UserDefinedObjectType ) {
return getSqlCreateStrings( (UserDefinedObjectType) userDefinedType, metadata, context );
}
else if ( userDefinedType instanceof UserDefinedArrayType ) {
return getSqlCreateStrings( (UserDefinedArrayType) userDefinedType, metadata, context );
}
else {
throw new IllegalArgumentException( "Unsupported user-defined type: " + userDefinedType );
}
}
public String[] getSqlCreateStrings(
UserDefinedObjectType userDefinedType,
Metadata metadata,
SqlStringGenerationContext context) {
final QualifiedName typeName = new QualifiedNameParser.NameParts(
Identifier.toIdentifier( userDefinedType.getCatalog(), userDefinedType.isCatalogQuoted() ),
Identifier.toIdentifier( userDefinedType.getSchema(), userDefinedType.isSchemaQuoted() ),
@ -82,12 +99,19 @@ public class StandardUserDefinedTypeExporter implements Exporter<UserDefinedType
}
}
public String[] getSqlCreateStrings(
UserDefinedArrayType userDefinedType,
Metadata metadata,
SqlStringGenerationContext context) {
throw new IllegalArgumentException( "Exporter does not support name array types. Can't generate create strings for: " + userDefinedType );
}
/**
* @param udt The UDT.
* @param formattedTypeName The formatted UDT name.
* @param sqlStrings The list of SQL strings to add comments to.
*/
protected void applyComments(UserDefinedType udt, String formattedTypeName, List<String> sqlStrings) {
protected void applyComments(UserDefinedObjectType udt, String formattedTypeName, List<String> sqlStrings) {
if ( dialect.supportsCommentOn() ) {
if ( udt.getComment() != null ) {
sqlStrings.add( "comment on type " + formattedTypeName + " is '" + udt.getComment() + "'" );
@ -106,16 +130,28 @@ public class StandardUserDefinedTypeExporter implements Exporter<UserDefinedType
}
@Override
public String[] getSqlDropStrings(UserDefinedType table, Metadata metadata, SqlStringGenerationContext context) {
public String[] getSqlDropStrings(UserDefinedType userDefinedType, Metadata metadata, SqlStringGenerationContext context) {
if ( userDefinedType instanceof UserDefinedObjectType ) {
return getSqlDropStrings( (UserDefinedObjectType) userDefinedType, metadata, context );
}
else if ( userDefinedType instanceof UserDefinedArrayType ) {
return getSqlDropStrings( (UserDefinedArrayType) userDefinedType, metadata, context );
}
else {
throw new IllegalArgumentException( "Unsupported user-defined type: " + userDefinedType );
}
}
public String[] getSqlDropStrings(UserDefinedObjectType userDefinedType, Metadata metadata, SqlStringGenerationContext context) {
StringBuilder buf = new StringBuilder( "drop type " );
if ( dialect.supportsIfExistsBeforeTypeName() ) {
buf.append( "if exists " );
}
final QualifiedName typeName = new QualifiedNameParser.NameParts(
Identifier.toIdentifier( table.getCatalog(), table.isCatalogQuoted() ),
Identifier.toIdentifier( table.getSchema(), table.isSchemaQuoted() ),
table.getNameIdentifier()
Identifier.toIdentifier( userDefinedType.getCatalog(), userDefinedType.isCatalogQuoted() ),
Identifier.toIdentifier( userDefinedType.getSchema(), userDefinedType.isSchemaQuoted() ),
userDefinedType.getNameIdentifier()
);
buf.append( context.format( typeName ) );
@ -125,4 +161,8 @@ public class StandardUserDefinedTypeExporter implements Exporter<UserDefinedType
return new String[] { buf.toString() };
}
public String[] getSqlDropStrings(UserDefinedArrayType userDefinedType, Metadata metadata, SqlStringGenerationContext context) {
throw new IllegalArgumentException( "Exporter does not support name array types. Can't generate drop strings for: " + userDefinedType );
}
}

View File

@ -18,8 +18,10 @@ import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.type.descriptor.converter.spi.BasicValueConverter;
import org.hibernate.type.descriptor.java.BasicPluralJavaType;
import org.hibernate.type.descriptor.java.ImmutableMutabilityPlan;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.jdbc.ArrayJdbcType;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.internal.BasicTypeImpl;
import org.hibernate.type.internal.ConvertedBasicTypeImpl;
@ -156,7 +158,29 @@ public class BasicTypeRegistry implements Serializable {
return resolve(
javaType,
jdbcType,
() -> new BasicTypeImpl<>( javaType, jdbcType )
() -> {
if ( javaType instanceof BasicPluralJavaType<?> && jdbcType instanceof ArrayJdbcType ) {
//noinspection unchecked
final BasicPluralJavaType<Object> pluralJavaType = (BasicPluralJavaType<Object>) javaType;
final BasicType<Object> elementType = resolve(
pluralJavaType.getElementJavaType(),
( (ArrayJdbcType) jdbcType ).getElementJdbcType()
);
final BasicType<?> resolvedType = pluralJavaType.resolveType(
typeConfiguration,
typeConfiguration.getCurrentBaseSqlTypeIndicators().getDialect(),
elementType,
null,
typeConfiguration.getCurrentBaseSqlTypeIndicators()
);
if ( resolvedType instanceof BasicPluralType<?, ?> ) {
register( resolvedType );
}
//noinspection unchecked
return (BasicType<J>) resolvedType;
}
return new BasicTypeImpl<>( javaType, jdbcType );
}
);
}

View File

@ -579,6 +579,28 @@ public class SqlTypes {
*/
public static final int DURATION = 3015;
/**
* A type code for an array of struct objects.
*/
public static final int STRUCT_ARRAY = 3016;
/**
* A type code representing an Oracle-style nested table for a struct.
*
* @see org.hibernate.dialect.OracleNestedTableJdbcType
*/
public final static int STRUCT_TABLE = 3017;
/**
* A type code for an array of json objects.
*/
public static final int JSON_ARRAY = 3018;
/**
* A type code for an array of xml objects.
*/
public static final int XML_ARRAY = 3019;
// Interval types
/**

View File

@ -37,4 +37,8 @@ public interface ValueBinder<X> {
* @throws SQLException Indicates a JDBC error occurred.
*/
void bind(CallableStatement st, X value, String name, WrapperOptions options) throws SQLException;
default Object getBindValue(X value, WrapperOptions options) throws SQLException {
return value;
}
}

View File

@ -9,6 +9,7 @@ package org.hibernate.type.descriptor.java;
import java.io.Serializable;
import java.lang.reflect.Array;
import java.sql.SQLException;
import java.util.Collection;
import org.hibernate.HibernateException;
import org.hibernate.SharedSessionContract;
@ -68,7 +69,9 @@ public class ArrayJavaType<T> extends AbstractArrayJavaType<T[], T> {
}
final Class<?> elementJavaTypeClass = elementType.getJavaTypeDescriptor().getJavaTypeClass();
if ( elementType instanceof BasicPluralType<?, ?>
|| elementJavaTypeClass != null && elementJavaTypeClass.isArray() ) {
|| elementJavaTypeClass != null && elementJavaTypeClass.isArray()
&& elementJavaTypeClass != byte[].class ) {
// No support for nested arrays, except for byte[][]
return null;
}
final ArrayJavaType<T> arrayJavaType;
@ -328,6 +331,16 @@ public class ArrayJavaType<T> extends AbstractArrayJavaType<T[], T> {
wrapped[0] = (T) value;
return wrapped;
}
else if ( value instanceof Collection<?> ) {
final Collection<?> collection = (Collection<?>) value;
//noinspection unchecked
final T[] wrapped = (T[]) java.lang.reflect.Array.newInstance( getElementJavaType().getJavaTypeClass(), collection.size() );
int i = 0;
for ( Object e : collection ) {
wrapped[i++] = getElementJavaType().wrap( e, options );
}
return wrapped;
}
throw unknownWrap( value.getClass() );
}

View File

@ -11,6 +11,7 @@ import java.lang.reflect.Array;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import org.hibernate.HibernateException;
@ -172,6 +173,15 @@ public class BooleanPrimitiveArrayJavaType extends AbstractArrayJavaType<boolean
// Support binding a single element as parameter value
return new boolean[]{ (boolean) value };
}
else if ( value instanceof Collection<?> ) {
final Collection<?> collection = (Collection<?>) value;
final boolean[] wrapped = new boolean[collection.size()];
int i = 0;
for ( Object e : collection ) {
wrapped[i++] = getElementJavaType().wrap( e, options );
}
return wrapped;
}
throw unknownWrap( value.getClass() );
}

View File

@ -11,6 +11,7 @@ import java.lang.reflect.Array;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import org.hibernate.HibernateException;
@ -172,6 +173,15 @@ public class DoublePrimitiveArrayJavaType extends AbstractArrayJavaType<double[]
// Support binding a single element as parameter value
return new double[]{ (double) value };
}
else if ( value instanceof Collection<?> ) {
final Collection<?> collection = (Collection<?>) value;
final double[] wrapped = new double[collection.size()];
int i = 0;
for ( Object e : collection ) {
wrapped[i++] = getElementJavaType().wrap( e, options );
}
return wrapped;
}
throw unknownWrap( value.getClass() );
}

View File

@ -11,6 +11,7 @@ import java.lang.reflect.Array;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import org.hibernate.HibernateException;
@ -172,6 +173,15 @@ public class FloatPrimitiveArrayJavaType extends AbstractArrayJavaType<float[],
// Support binding a single element as parameter value
return new float[]{ (float) value };
}
else if ( value instanceof Collection<?> ) {
final Collection<?> collection = (Collection<?>) value;
final float[] wrapped = new float[collection.size()];
int i = 0;
for ( Object e : collection ) {
wrapped[i++] = getElementJavaType().wrap( e, options );
}
return wrapped;
}
throw unknownWrap( value.getClass() );
}

View File

@ -11,6 +11,7 @@ import java.lang.reflect.Array;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import org.hibernate.HibernateException;
@ -172,6 +173,15 @@ public class IntegerPrimitiveArrayJavaType extends AbstractArrayJavaType<int[],
// Support binding a single element as parameter value
return new int[]{ (int) value };
}
else if ( value instanceof Collection<?> ) {
final Collection<?> collection = (Collection<?>) value;
final int[] wrapped = new int[collection.size()];
int i = 0;
for ( Object e : collection ) {
wrapped[i++] = getElementJavaType().wrap( e, options );
}
return wrapped;
}
throw unknownWrap( value.getClass() );
}

View File

@ -11,6 +11,7 @@ import java.lang.reflect.Array;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import org.hibernate.HibernateException;
@ -172,6 +173,15 @@ public class LongPrimitiveArrayJavaType extends AbstractArrayJavaType<long[], Lo
// Support binding a single element as parameter value
return new long[]{ (long) value };
}
else if ( value instanceof Collection<?> ) {
final Collection<?> collection = (Collection<?>) value;
final long[] wrapped = new long[collection.size()];
int i = 0;
for ( Object e : collection ) {
wrapped[i++] = getElementJavaType().wrap( e, options );
}
return wrapped;
}
throw unknownWrap( value.getClass() );
}

View File

@ -11,6 +11,7 @@ import java.lang.reflect.Array;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import org.hibernate.HibernateException;
@ -172,6 +173,15 @@ public class ShortPrimitiveArrayJavaType extends AbstractArrayJavaType<short[],
// Support binding a single element as parameter value
return new short[]{ (short) value };
}
else if ( value instanceof Collection<?> ) {
final Collection<?> collection = (Collection<?>) value;
final short[] wrapped = new short[collection.size()];
int i = 0;
for ( Object e : collection ) {
wrapped[i++] = getElementJavaType().wrap( e, options );
}
return wrapped;
}
throw unknownWrap( value.getClass() );
}

View File

@ -10,8 +10,11 @@ import org.hibernate.type.BasicType;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.java.AbstractClassJavaType;
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.JdbcTypeIndicators;
import org.hibernate.type.descriptor.jdbc.internal.DelayedStructJdbcType;
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
/**
* Java type for embeddable aggregates, which allows resolving a recommended {@link JdbcType}.
@ -27,6 +30,10 @@ public class EmbeddableAggregateJavaType<T> extends AbstractClassJavaType<T> {
this.structName = structName;
}
public String getStructName() {
return structName;
}
@Override
public JdbcType getRecommendedJdbcType(JdbcTypeIndicators context) {
final BasicType<T> basicType = context.getTypeConfiguration().getBasicTypeForJavaType( getJavaType() );
@ -34,7 +41,14 @@ public class EmbeddableAggregateJavaType<T> extends AbstractClassJavaType<T> {
return basicType.getJdbcType();
}
if ( structName != null ) {
return context.getJdbcType( SqlTypes.STRUCT );
final JdbcTypeRegistry jdbcTypeRegistry = context.getTypeConfiguration().getJdbcTypeRegistry();
final AggregateJdbcType aggregateDescriptor = jdbcTypeRegistry.findAggregateDescriptor( structName );
if ( aggregateDescriptor != null ) {
return aggregateDescriptor;
}
if ( jdbcTypeRegistry.findDescriptor( SqlTypes.STRUCT ) != null ) {
return new DelayedStructJdbcType( this, structName );
}
}
// prefer json by default for now
final JdbcType descriptor = context.getJdbcType( SqlTypes.JSON );

View File

@ -7,6 +7,7 @@
package org.hibernate.type.descriptor.jdbc;
import java.lang.reflect.Array;
import java.lang.reflect.Type;
import java.sql.CallableStatement;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
@ -14,8 +15,12 @@ import java.sql.SQLException;
import java.sql.Types;
import org.hibernate.HibernateException;
import org.hibernate.dialect.StructHelper;
import org.hibernate.engine.jdbc.Size;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.metamodel.spi.EmbeddableInstantiator;
import org.hibernate.type.BasicPluralType;
import org.hibernate.type.descriptor.ValueBinder;
import org.hibernate.type.descriptor.ValueExtractor;
import org.hibernate.type.descriptor.WrapperOptions;
@ -26,6 +31,7 @@ import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.jdbc.internal.JdbcLiteralFormatterArray;
import org.hibernate.type.descriptor.sql.spi.DdlTypeRegistry;
import org.hibernate.type.internal.BasicTypeImpl;
import org.hibernate.type.internal.ParameterizedTypeImpl;
import org.hibernate.type.spi.TypeConfiguration;
/**
@ -61,9 +67,18 @@ public class ArrayJdbcType implements JdbcType {
scale,
typeConfiguration
);
return typeConfiguration.getJavaTypeRegistry().resolveDescriptor(
final JavaType<Object> javaType = typeConfiguration.getJavaTypeRegistry().resolveDescriptor(
Array.newInstance( elementJavaType.getJavaTypeClass(), 0 ).getClass()
);
if ( javaType instanceof BasicPluralType<?, ?> ) {
//noinspection unchecked
return (JavaType<T>) javaType;
}
//noinspection unchecked
return (JavaType<T>) javaType.createJavaType(
new ParameterizedTypeImpl( javaType.getJavaTypeClass(), new Type[0], null ),
typeConfiguration
);
}
@Override
@ -92,6 +107,70 @@ public class ArrayJdbcType implements JdbcType {
return java.sql.Array.class;
}
protected Object[] getArray(BasicBinder<?> binder, Object value, WrapperOptions options) throws SQLException {
final JdbcType elementJdbcType = ( (ArrayJdbcType) binder.getJdbcType() ).getElementJdbcType();
//noinspection unchecked
final JavaType<Object> javaType = (JavaType<Object>) binder.getJavaType();
if ( elementJdbcType instanceof AggregateJdbcType ) {
final AggregateJdbcType aggregateJdbcType = (AggregateJdbcType) elementJdbcType;
final Object[] domainObjects = ( javaType ).unwrap(
value,
Object[].class,
options
);
final Object[] objects = new Object[domainObjects.length];
for ( int i = 0; i < domainObjects.length; i++ ) {
objects[i] = aggregateJdbcType.createJdbcValue( domainObjects[i], options );
}
return objects;
}
else {
final TypeConfiguration typeConfiguration = options.getSessionFactory().getTypeConfiguration();
final JdbcType underlyingJdbcType = typeConfiguration.getJdbcTypeRegistry()
.getDescriptor( elementJdbcType.getDefaultSqlTypeCode() );
final Class<?> preferredJavaTypeClass = elementJdbcType.getPreferredJavaTypeClass( options );
final Class<?> elementJdbcJavaTypeClass;
if ( preferredJavaTypeClass == null ) {
elementJdbcJavaTypeClass = underlyingJdbcType.getJdbcRecommendedJavaTypeMapping(
null,
null,
typeConfiguration
).getJavaTypeClass();
}
else {
elementJdbcJavaTypeClass = preferredJavaTypeClass;
}
//noinspection unchecked
final Class<Object[]> arrayClass = (Class<Object[]>)
Array.newInstance( elementJdbcJavaTypeClass, 0 ).getClass();
return javaType.unwrap( value, arrayClass, options );
}
}
protected <X> X getArray(BasicExtractor<X> extractor, java.sql.Array array, WrapperOptions options) throws SQLException {
if ( array != null && getElementJdbcType() instanceof AggregateJdbcType ) {
final AggregateJdbcType aggregateJdbcType = (AggregateJdbcType) getElementJdbcType();
final EmbeddableMappingType embeddableMappingType = aggregateJdbcType.getEmbeddableMappingType();
final EmbeddableInstantiator instantiator = embeddableMappingType.getRepresentationStrategy()
.getInstantiator();
final Object rawArray = array.getArray();
final Object[] domainObjects = new Object[Array.getLength( rawArray )];
for ( int i = 0; i < domainObjects.length; i++ ) {
final Object[] aggregateRawValues = aggregateJdbcType.extractJdbcValues( Array.get( rawArray, i ), options );
final Object[] attributeValues = StructHelper.getAttributeValues(
embeddableMappingType,
aggregateRawValues,
options
);
domainObjects[i] = instantiator.instantiate( () -> attributeValues, options.getSessionFactory() );
}
return extractor.getJavaType().wrap( domainObjects, options );
}
else {
return extractor.getJavaType().wrap( array, options );
}
}
@Override
public <X> ValueBinder<X> getBinder(final JavaType<X> javaTypeDescriptor) {
return new BasicBinder<>( javaTypeDescriptor, this ) {
@ -114,26 +193,8 @@ public class ArrayJdbcType implements JdbcType {
}
private java.sql.Array getArray(X value, WrapperOptions options) throws SQLException {
final TypeConfiguration typeConfiguration = options.getSessionFactory().getTypeConfiguration();
final JdbcType elementJdbcType = ( (ArrayJdbcType) getJdbcType() ).getElementJdbcType();
final JdbcType underlyingJdbcType = typeConfiguration.getJdbcTypeRegistry()
.getDescriptor( elementJdbcType.getDefaultSqlTypeCode() );
final Class<?> preferredJavaTypeClass = elementJdbcType.getPreferredJavaTypeClass( options );
final Class<?> elementJdbcJavaTypeClass;
if ( preferredJavaTypeClass == null ) {
elementJdbcJavaTypeClass = underlyingJdbcType.getJdbcRecommendedJavaTypeMapping(
null,
null,
typeConfiguration
).getJavaTypeClass();
}
else {
elementJdbcJavaTypeClass = preferredJavaTypeClass;
}
//noinspection unchecked
final Class<Object[]> arrayClass = (Class<Object[]>)
Array.newInstance( elementJdbcJavaTypeClass, 0 ).getClass();
final Object[] objects = getJavaType().unwrap( value, arrayClass, options );
final Object[] objects = ArrayJdbcType.this.getArray( this, value, options );
final SharedSessionContractImplementor session = options.getSession();
final String typeName = getElementTypeName( elementJdbcType, session );
@ -144,6 +205,9 @@ public class ArrayJdbcType implements JdbcType {
private String getElementTypeName(JdbcType elementJdbcType, SharedSessionContractImplementor session) {
// TODO: ideally, we would have the actual size or the actual type/column accessible
// this is something that we would need for supporting composite types anyway
if ( elementJdbcType instanceof StructJdbcType ) {
return ( (StructJdbcType) elementJdbcType ).getStructTypeName();
}
final JavaType<X> elementJavaType;
if ( getJavaType() instanceof ByteArrayJavaType ) {
// Special handling needed for Byte[], because that would conflict with the VARBINARY mapping
@ -179,17 +243,17 @@ public class ArrayJdbcType implements JdbcType {
return new BasicExtractor<>( javaTypeDescriptor, this ) {
@Override
protected X doExtract(ResultSet rs, int paramIndex, WrapperOptions options) throws SQLException {
return javaTypeDescriptor.wrap( rs.getArray( paramIndex ), options );
return getArray( this, rs.getArray( paramIndex ), options );
}
@Override
protected X doExtract(CallableStatement statement, int index, WrapperOptions options) throws SQLException {
return javaTypeDescriptor.wrap( statement.getArray( index ), options );
return getArray( this, statement.getArray( index ), options );
}
@Override
protected X doExtract(CallableStatement statement, String name, WrapperOptions options) throws SQLException {
return javaTypeDescriptor.wrap( statement.getArray( name ), options );
return getArray( this, statement.getArray( name ), options );
}
};
}

View File

@ -85,6 +85,12 @@ public abstract class BasicBinder<J> implements ValueBinder<J>, Serializable {
}
}
@Override
public Object getBindValue(J value, WrapperOptions options) throws SQLException {
final Class<?> preferredJavaTypeClass = jdbcType.getPreferredJavaTypeClass( options );
return preferredJavaTypeClass == null ? value : getJavaType().unwrap( value, preferredJavaTypeClass, options );
}
/**
* Perform the null binding.
*

View File

@ -0,0 +1,19 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.type.descriptor.jdbc;
/**
* A {@link JdbcType} with a fixed SQL type name.
*
* @see StructJdbcType
* @see org.hibernate.dialect.OracleArrayJdbcType
* @see org.hibernate.dialect.OracleNestedTableJdbcType
*/
public interface SqlTypedJdbcType extends JdbcType {
String getSqlTypeName();
}

View File

@ -0,0 +1,22 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.type.descriptor.jdbc;
import org.hibernate.type.SqlTypes;
/**
* Descriptor for aggregate handling like {@link SqlTypes#STRUCT STRUCT}, {@link SqlTypes#JSON JSON} and {@link SqlTypes#SQLXML SQLXML}.
*/
public interface StructJdbcType extends AggregateJdbcType, SqlTypedJdbcType {
String getStructTypeName();
@Override
default String getSqlTypeName() {
return getStructTypeName();
}
}

View File

@ -0,0 +1,128 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.type.descriptor.jdbc.internal;
import java.sql.SQLException;
import java.sql.Types;
import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.metamodel.spi.RuntimeModelCreationContext;
import org.hibernate.type.descriptor.ValueBinder;
import org.hibernate.type.descriptor.ValueExtractor;
import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.java.spi.EmbeddableAggregateJavaType;
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
import org.hibernate.type.descriptor.jdbc.JdbcLiteralFormatter;
import org.hibernate.type.descriptor.jdbc.StructJdbcType;
import org.hibernate.type.spi.TypeConfiguration;
/**
* Descriptor for {@link Types#STRUCT STRUCT} handling, which is only a temporary placeholder.
* During bootstrap, {@link EmbeddableAggregateJavaType} will report {@link DelayedStructJdbcType} as recommended
* {@link org.hibernate.type.descriptor.jdbc.JdbcType}, because the real {@link StructJdbcType} can only be built later,
* as that requires runtime model information in the form of {@link EmbeddableMappingType}.
* The real {@link StructJdbcType} is built right after {@link EmbeddableMappingType} is created,
* which will then cause a rebuild of the respective {@link org.hibernate.type.BasicType} as well as updating
* the {@link org.hibernate.mapping.BasicValue.Resolution} of the owning attribute.
*
* @see EmbeddableAggregateJavaType
*/
public class DelayedStructJdbcType implements StructJdbcType {
private final EmbeddableAggregateJavaType<?> embeddableAggregateJavaType;
private final String structName;
public DelayedStructJdbcType(EmbeddableAggregateJavaType<?> embeddableAggregateJavaType, String structName) {
this.embeddableAggregateJavaType = embeddableAggregateJavaType;
this.structName = structName;
}
@Override
public int getJdbcTypeCode() {
return Types.STRUCT;
}
@Override
public String getStructTypeName() {
return structName;
}
@Override
public <T> JavaType<T> getJdbcRecommendedJavaTypeMapping(
Integer precision,
Integer scale,
TypeConfiguration typeConfiguration) {
//noinspection unchecked
return (JavaType<T>) embeddableAggregateJavaType;
}
@Override
public Class<?> getPreferredJavaTypeClass(WrapperOptions options) {
return embeddableAggregateJavaType.getJavaTypeClass();
}
@Override
public <T> JdbcLiteralFormatter<T> getJdbcLiteralFormatter(JavaType<T> javaTypeDescriptor) {
return null;
}
@Override
public <X> ValueBinder<X> getBinder(final JavaType<X> javaTypeDescriptor) {
return null;
}
@Override
public <X> ValueExtractor<X> getExtractor(final JavaType<X> javaTypeDescriptor) {
return null;
}
@Override
public EmbeddableMappingType getEmbeddableMappingType() {
return null;
}
@Override
public AggregateJdbcType resolveAggregateJdbcType(
EmbeddableMappingType mappingType,
String sqlType,
RuntimeModelCreationContext creationContext) {
throw new UnsupportedOperationException();
}
@Override
public Object createJdbcValue(Object domainValue, WrapperOptions options) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public Object[] extractJdbcValues(Object rawJdbcValue, WrapperOptions options) throws SQLException {
throw new UnsupportedOperationException();
}
@Override
public String getFriendlyName() {
return "STRUCT";
}
@Override
public String toString() {
return "UnresolvedStructTypeDescriptor";
}
@Override
public boolean equals(Object o) {
return o != null &&
getClass() == o.getClass() &&
structName.equals( ( (DelayedStructJdbcType) o ).structName );
}
@Override
public int hashCode() {
return structName.hashCode();
}
}

View File

@ -20,10 +20,10 @@ import org.hibernate.tool.schema.extract.spi.ColumnTypeInformation;
import org.hibernate.type.BasicType;
import org.hibernate.type.descriptor.JdbcTypeNameMapper;
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
import org.hibernate.type.descriptor.jdbc.ArrayJdbcType;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.JdbcTypeConstructor;
import org.hibernate.type.descriptor.jdbc.JdbcTypeFamilyInformation;
import org.hibernate.type.descriptor.jdbc.SqlTypedJdbcType;
import org.hibernate.type.descriptor.jdbc.ObjectJdbcType;
import org.hibernate.type.descriptor.jdbc.internal.JdbcTypeBaseline;
import org.hibernate.type.spi.TypeConfiguration;
@ -55,6 +55,7 @@ public class JdbcTypeRegistry implements JdbcTypeBaseline.BaselineTarget, Serial
* map.
*/
private final ConcurrentHashMap<TypeConstructedJdbcTypeKey, JdbcType> typeConstructorDescriptorMap = new ConcurrentHashMap<>();
private final ConcurrentHashMap<String, SqlTypedJdbcType> sqlTypedDescriptorMap = new ConcurrentHashMap<>();
public JdbcTypeRegistry(TypeConfiguration typeConfiguration) {
this.typeConfiguration = typeConfiguration;
@ -182,6 +183,10 @@ public class JdbcTypeRegistry implements JdbcTypeBaseline.BaselineTarget, Serial
);
if ( registrationKey != null ) {
aggregateDescriptorMap.put( registrationKey, resolvedJdbcType );
if ( resolvedJdbcType instanceof SqlTypedJdbcType ) {
final SqlTypedJdbcType sqlTypedJdbcType = (SqlTypedJdbcType) resolvedJdbcType;
sqlTypedDescriptorMap.put( sqlTypedJdbcType.getSqlTypeName().toLowerCase( Locale.ROOT ), sqlTypedJdbcType );
}
}
return resolvedJdbcType;
}
@ -190,6 +195,10 @@ public class JdbcTypeRegistry implements JdbcTypeBaseline.BaselineTarget, Serial
return aggregateDescriptorMap.get( typeName.toLowerCase( Locale.ROOT ) );
}
public SqlTypedJdbcType findSqlTypedDescriptor(String sqlTypeName) {
return sqlTypedDescriptorMap.get( sqlTypeName.toLowerCase( Locale.ROOT ) );
}
/**
* Construct a {@link JdbcType} via {@link JdbcTypeConstructor#resolveType(TypeConfiguration, Dialect, BasicType, ColumnTypeInformation)}
* or return a compatible one from this registry.
@ -245,7 +254,14 @@ public class JdbcTypeRegistry implements JdbcTypeBaseline.BaselineTarget, Serial
);
}
final JdbcType existingType = typeConstructorDescriptorMap.putIfAbsent( key, jdbcType );
return existingType != null ? existingType : jdbcType;
if ( existingType != null ) {
return existingType;
}
if ( jdbcType instanceof SqlTypedJdbcType ) {
final SqlTypedJdbcType sqlTypedJdbcType = (SqlTypedJdbcType) jdbcType;
sqlTypedDescriptorMap.put( sqlTypedJdbcType.getSqlTypeName().toLowerCase( Locale.ROOT ), sqlTypedJdbcType );
}
return jdbcType;
}
else {
return getDescriptor( jdbcTypeConstructorCode );

View File

@ -12,7 +12,10 @@ import org.hibernate.metamodel.mapping.SqlExpressible;
import org.hibernate.type.BasicPluralType;
import org.hibernate.type.BasicType;
import org.hibernate.type.Type;
import org.hibernate.type.descriptor.java.BasicPluralJavaType;
import org.hibernate.type.descriptor.converter.spi.BasicValueConverter;
import org.hibernate.type.descriptor.converter.spi.JpaAttributeConverter;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.java.spi.EmbeddableAggregateJavaType;
import org.hibernate.type.descriptor.sql.spi.DdlTypeRegistry;
import static java.sql.Types.ARRAY;
@ -32,20 +35,25 @@ public class ArrayDdlTypeImpl extends DdlTypeImpl {
@Override
public String getCastTypeName(Size columnSize, SqlExpressible type, DdlTypeRegistry ddlTypeRegistry) {
final BasicPluralType<?, ?> pluralType = (BasicPluralType<?, ?>) type;
final BasicPluralJavaType<?> javaTypeDescriptor = (BasicPluralJavaType<?>) pluralType.getJavaTypeDescriptor();
final BasicType<?> elementType = pluralType.getElementType();
String arrayElementTypeName = ddlTypeRegistry.getDescriptor( elementType.getJdbcType().getDdlTypeCode() )
.getCastTypeName(
dialect.getSizeStrategy().resolveSize(
elementType.getJdbcMapping().getJdbcType(),
elementType.getJavaTypeDescriptor(),
columnSize.getPrecision(),
columnSize.getScale(),
columnSize.getLength()
),
elementType,
ddlTypeRegistry
);
String arrayElementTypeName;
if ( elementType.getJavaTypeDescriptor() instanceof EmbeddableAggregateJavaType<?> ) {
arrayElementTypeName = ( (EmbeddableAggregateJavaType<?>) elementType.getJavaTypeDescriptor() ).getStructName();
}
else {
arrayElementTypeName = ddlTypeRegistry.getDescriptor( elementType.getJdbcType().getDdlTypeCode() )
.getCastTypeName(
dialect.getSizeStrategy().resolveSize(
elementType.getJdbcMapping().getJdbcType(),
elementType.getJavaTypeDescriptor(),
columnSize.getPrecision(),
columnSize.getScale(),
columnSize.getLength()
),
elementType,
ddlTypeRegistry
);
}
if ( castRawElementType ) {
final int paren = arrayElementTypeName.indexOf( '(' );
if ( paren > 0 ) {
@ -56,7 +64,7 @@ public class ArrayDdlTypeImpl extends DdlTypeImpl {
}
}
return dialect.getArrayTypeName(
javaTypeDescriptor.getElementJavaType().getJavaTypeClass().getSimpleName(),
getElementTypeSimpleName( pluralType.getElementType(), dialect ),
arrayElementTypeName,
columnSize.getArrayLength()
);
@ -65,7 +73,6 @@ public class ArrayDdlTypeImpl extends DdlTypeImpl {
@Override
public String getTypeName(Size columnSize, Type type, DdlTypeRegistry ddlTypeRegistry) {
final BasicPluralType<?, ?> pluralType = (BasicPluralType<?, ?>) type;
final BasicPluralJavaType<?> javaTypeDescriptor = (BasicPluralJavaType<?>) pluralType.getJavaTypeDescriptor();
final BasicType<?> elementType = pluralType.getElementType();
final String arrayElementTypeName = ddlTypeRegistry.getTypeName(
elementType.getJdbcType().getDdlTypeCode(),
@ -79,10 +86,50 @@ public class ArrayDdlTypeImpl extends DdlTypeImpl {
elementType
);
return dialect.getArrayTypeName(
javaTypeDescriptor.getElementJavaType().getJavaTypeClass().getSimpleName(),
getElementTypeSimpleName( pluralType.getElementType(), dialect ),
arrayElementTypeName,
columnSize.getArrayLength()
);
}
private static String getElementTypeSimpleName(BasicType<?> elementType, Dialect dialect) {
final BasicValueConverter<?, ?> converter = elementType.getValueConverter();
if ( converter != null ) {
if ( converter instanceof JpaAttributeConverter<?, ?> ) {
return ( (JpaAttributeConverter<?, ?>) converter ).getConverterJavaType()
.getJavaTypeClass()
.getSimpleName();
}
else {
return converter.getClass().getSimpleName();
}
}
final JavaType<?> elementJavaType = elementType.getJavaTypeDescriptor();
if ( elementJavaType.getJavaTypeClass().isArray() ) {
return dialect.getArrayTypeName(
elementJavaType.getJavaTypeClass().getComponentType().getSimpleName(),
null,
null
);
}
else {
final Class<?> preferredJavaTypeClass = elementType.getJdbcType().getPreferredJavaTypeClass( null );
if ( preferredJavaTypeClass == null || preferredJavaTypeClass == elementJavaType.getJavaTypeClass() ) {
return elementJavaType.getJavaTypeClass().getSimpleName();
}
else {
if ( preferredJavaTypeClass.isArray() ) {
return elementJavaType.getJavaTypeClass().getSimpleName() + dialect.getArrayTypeName(
preferredJavaTypeClass.getComponentType().getSimpleName(),
null,
null
);
}
else {
return elementJavaType.getJavaTypeClass().getSimpleName() + preferredJavaTypeClass.getSimpleName();
}
}
}
}
}

View File

@ -0,0 +1,135 @@
package org.hibernate.orm.test.component;
import java.util.List;
import org.hibernate.annotations.Struct;
import org.hibernate.testing.orm.junit.DialectFeatureChecks;
import org.hibernate.testing.orm.junit.DomainModel;
import org.hibernate.testing.orm.junit.RequiresDialectFeature;
import org.hibernate.testing.orm.junit.SessionFactory;
import org.hibernate.testing.orm.junit.SessionFactoryScope;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import jakarta.persistence.Column;
import jakarta.persistence.Embeddable;
import jakarta.persistence.Entity;
import jakarta.persistence.GeneratedValue;
import jakarta.persistence.Id;
@DomainModel(
annotatedClasses = {
StructComponentArrayTest.Book.class,
StructComponentArrayTest.Label.class
}
)
@SessionFactory
@RequiresDialectFeature(feature = DialectFeatureChecks.SupportsStructAggregate.class)
@RequiresDialectFeature(feature = DialectFeatureChecks.SupportsStructuralArrays.class)
public class StructComponentArrayTest {
@BeforeEach
public void setUp(SessionFactoryScope scope){
scope.inTransaction(
session -> {
Publisher ebookPublisher = new Publisher();
ebookPublisher.setName( "eprint" );
Publisher paperPublisher = new Publisher();
paperPublisher.setName( "paperbooks" );
Book book = new Book();
book.title = "Hibernate";
book.author = "Steve";
book.publishers = new Publisher[] { ebookPublisher, paperPublisher };
book.labels = List.of( new Label( "kind", "Technical" ), new Label( "level", "Beginner" ) );
session.save( book );
}
);
}
@AfterEach
public void tearDown(SessionFactoryScope scope){
scope.inTransaction(
session ->
session.createQuery( "delete from Book" ).executeUpdate()
);
}
@Test
public void testGet(SessionFactoryScope scope){
scope.inTransaction(
session -> {
session.createQuery( "from Book" ).list();
}
);
}
@Entity(name = "Book")
public static class Book {
@Id
@GeneratedValue
private Long id;
private String title;
private String author;
private Publisher[] publishers;
private List<Label> labels;
}
@Embeddable
@Struct( name = "publisher_type")
public static class Publisher {
private String name;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
@Embeddable
@Struct( name = "label_type")
public static class Label {
private String name;
@Column(name = "val")
private String value;
public Label() {
}
public Label(String name, String value) {
this.name = name;
this.value = value;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
}
}

View File

@ -1,68 +0,0 @@
package org.hibernate.orm.test.component;
import java.util.List;
import org.hibernate.MappingException;
import org.hibernate.annotations.Struct;
import org.hibernate.boot.MetadataSources;
import org.hibernate.boot.registry.StandardServiceRegistry;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.testing.orm.junit.DialectFeatureChecks;
import org.hibernate.testing.orm.junit.JiraKey;
import org.hibernate.testing.orm.junit.RequiresDialectFeature;
import org.hibernate.testing.util.ServiceRegistryUtil;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import jakarta.persistence.Column;
import jakarta.persistence.ElementCollection;
import jakarta.persistence.Embeddable;
import jakarta.persistence.Entity;
import jakarta.persistence.GeneratedValue;
import jakarta.persistence.Id;
@RequiresDialectFeature(feature = DialectFeatureChecks.SupportsStructAggregate.class)
public class StructComponentCollectionError2Test {
@Test
@JiraKey( "HHH-15831" )
public void testError() {
final StandardServiceRegistry ssr = ServiceRegistryUtil.serviceRegistryBuilder()
.applySetting( AvailableSettings.HBM2DDL_AUTO, "create-drop" ).build();
try {
new MetadataSources( ssr )
.addAnnotatedClass( Book.class )
.getMetadataBuilder()
.build()
.buildSessionFactory();
Assertions.fail( "Expected a failure" );
}
catch (MappingException ex) {
Assertions.assertTrue( ex.getMessage().contains( "participants" ) );
}
finally {
StandardServiceRegistryBuilder.destroy( ssr );
}
}
@Entity(name = "Book")
public static class Book {
@Id
@GeneratedValue
private Long id;
private String title;
@Column(columnDefinition = "participants_type")
private List<Person> participants;
}
@Embeddable
@Struct(name = "person_type")
public static class Person {
private String name;
}
}

View File

@ -1,64 +0,0 @@
package org.hibernate.orm.test.component;
import org.hibernate.MappingException;
import org.hibernate.annotations.Struct;
import org.hibernate.boot.MetadataSources;
import org.hibernate.boot.registry.StandardServiceRegistry;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.testing.orm.junit.DialectFeatureChecks;
import org.hibernate.testing.orm.junit.JiraKey;
import org.hibernate.testing.orm.junit.RequiresDialectFeature;
import org.hibernate.testing.util.ServiceRegistryUtil;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import jakarta.persistence.Embeddable;
import jakarta.persistence.Entity;
import jakarta.persistence.GeneratedValue;
import jakarta.persistence.Id;
@RequiresDialectFeature(feature = DialectFeatureChecks.SupportsStructAggregate.class)
public class StructComponentCollectionError3Test {
@Test
@JiraKey( "HHH-15862" )
public void testError() {
final StandardServiceRegistry ssr = ServiceRegistryUtil.serviceRegistryBuilder()
.applySetting( AvailableSettings.HBM2DDL_AUTO, "create-drop" ).build();
try {
new MetadataSources( ssr )
.addAnnotatedClass( Book.class )
.getMetadataBuilder()
.build()
.buildSessionFactory();
Assertions.fail( "Expected a failure" );
}
catch (MappingException ex) {
Assertions.assertTrue( ex.getMessage().contains( "tags" ) );
}
finally {
StandardServiceRegistryBuilder.destroy( ssr );
}
}
@Entity(name = "Book")
public static class Book {
@Id
@GeneratedValue
private Long id;
private String title;
private Person author;
}
@Embeddable
@Struct(name = "person_type")
public static class Person {
private String name;
private String[] tags;
}
}

View File

@ -15,28 +15,33 @@ import org.hibernate.testing.util.ServiceRegistryUtil;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import jakarta.persistence.ElementCollection;
import jakarta.persistence.Embeddable;
import jakarta.persistence.Entity;
import jakarta.persistence.FetchType;
import jakarta.persistence.GeneratedValue;
import jakarta.persistence.Id;
import jakarta.persistence.ManyToOne;
import jakarta.persistence.OneToMany;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.MatcherAssert.assertThat;
@RequiresDialectFeature(feature = DialectFeatureChecks.SupportsStructAggregate.class)
public class StructComponentCollectionErrorTest {
@Test
@JiraKey( "HHH-15830" )
public void testError() {
@JiraKey( "HHH-15831" )
public void testError1() {
final StandardServiceRegistry ssr = ServiceRegistryUtil.serviceRegistry();
try {
new MetadataSources( ssr )
.addAnnotatedClass( Book.class )
.addAnnotatedClass( Book1.class )
.getMetadataBuilder()
.build();
Assertions.fail( "Expected a failure" );
}
catch (MappingException ex) {
Assertions.assertTrue( ex.getMessage().contains( "author.tags" ) );
assertThat( ex.getMessage(), containsString( "author.favoriteBook" ) );
}
finally {
StandardServiceRegistryBuilder.destroy( ssr );
@ -45,21 +50,58 @@ public class StructComponentCollectionErrorTest {
@Entity(name = "Book")
public static class Book {
public static class Book1 {
@Id
@GeneratedValue
private Long id;
private String title;
private Person author;
private Person1 author;
}
@Embeddable
@Struct(name = "person_type")
public static class Person {
public static class Person1 {
private String name;
@ElementCollection
private List<String> tags;
@ManyToOne(fetch = FetchType.LAZY)
private Book1 favoriteBook;
}
@Test
@JiraKey( "HHH-15831" )
public void testError2() {
final StandardServiceRegistry ssr = ServiceRegistryUtil.serviceRegistry();
try {
new MetadataSources( ssr )
.addAnnotatedClass( Book2.class )
.getMetadataBuilder()
.build();
Assertions.fail( "Expected a failure" );
}
catch (MappingException ex) {
assertThat( ex.getMessage(), containsString( "author.bookCollection" ) );
}
finally {
StandardServiceRegistryBuilder.destroy( ssr );
}
}
@Entity(name = "Book")
public static class Book2 {
@Id
@GeneratedValue
private Long id;
private String title;
private Person2 author;
}
@Embeddable
@Struct(name = "person_type")
public static class Person2 {
private String name;
@OneToMany
private List<Book2> bookCollection;
}
}

View File

@ -11,15 +11,15 @@ import java.util.Objects;
import org.hibernate.annotations.Instantiator;
import org.hibernate.annotations.Struct;
import org.hibernate.boot.registry.StandardServiceRegistry;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.OracleDialect;
import org.hibernate.dialect.PostgreSQLDialect;
import org.hibernate.engine.jdbc.connections.internal.DriverManagerConnectionProviderImpl;
import org.hibernate.testing.orm.junit.BaseSessionFactoryFunctionalTest;
import org.hibernate.testing.jdbc.SharedDriverManagerTypeCacheClearingIntegrator;
import org.hibernate.testing.orm.junit.BootstrapServiceRegistry;
import org.hibernate.testing.orm.junit.DomainModel;
import org.hibernate.testing.orm.junit.RequiresDialect;
import org.hibernate.testing.orm.junit.SessionFactory;
import org.hibernate.testing.orm.junit.SessionFactoryScope;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
@ -33,198 +33,178 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertInstanceOf;
import static org.junit.jupiter.api.Assertions.assertNull;
@RequiresDialect( PostgreSQLDialect.class )
@RequiresDialect( OracleDialect.class )
public class StructComponentInstantiatorTest extends BaseSessionFactoryFunctionalTest {
@Override
protected Class<?>[] getAnnotatedClasses() {
return new Class<?>[] {
RecordStructHolder.class
};
}
@Override
public StandardServiceRegistry produceServiceRegistry(StandardServiceRegistryBuilder ssrBuilder) {
// Make sure this stuff runs on a dedicated connection pool,
// otherwise we might run into ORA-21700: object does not exist or is marked for delete
// because the JDBC connection or database session caches something that should have been invalidated
ssrBuilder.applySetting( AvailableSettings.CONNECTION_PROVIDER, DriverManagerConnectionProviderImpl.class.getName() );
return super.produceServiceRegistry( ssrBuilder );
}
@BootstrapServiceRegistry(
// Clear the type cache, otherwise we might run into ORA-21700: object does not exist or is marked for delete
integrators = SharedDriverManagerTypeCacheClearingIntegrator.class
)
@DomainModel(annotatedClasses = StructComponentInstantiatorTest.RecordStructHolder.class)
@SessionFactory
@RequiresDialect(PostgreSQLDialect.class)
@RequiresDialect(OracleDialect.class)
public class StructComponentInstantiatorTest {
@BeforeEach
public void setUp() {
inTransaction(
session -> {
session.persist( new RecordStructHolder( 1L, Point.createAggregate1() ) );
session.persist( new RecordStructHolder( 2L, Point.createAggregate2() ) );
}
);
public void setUp(SessionFactoryScope scope) {
scope.inTransaction( session -> {
session.persist( new RecordStructHolder( 1L, Point.createAggregate1() ) );
session.persist( new RecordStructHolder( 2L, Point.createAggregate2() ) );
} );
}
@AfterEach
protected void cleanupTest() {
inTransaction(
session -> {
session.createQuery( "delete from RecordStructHolder h" ).executeUpdate();
}
);
protected void cleanupTest(SessionFactoryScope scope) {
scope.inTransaction( session -> {
session.createQuery( "delete from RecordStructHolder h" ).executeUpdate();
} );
}
@Test
public void testUpdate() {
sessionFactoryScope().inTransaction(
entityManager -> {
RecordStructHolder RecordStructHolder = entityManager.find( RecordStructHolder.class, 1L );
RecordStructHolder.setThePoint( Point.createAggregate2() );
entityManager.flush();
entityManager.clear();
assertStructEquals( Point.createAggregate2(), entityManager.find( RecordStructHolder.class, 1L ).getThePoint() );
}
);
public void testUpdate(SessionFactoryScope scope) {
scope.inTransaction( entityManager -> {
RecordStructHolder RecordStructHolder = entityManager.find( RecordStructHolder.class, 1L );
RecordStructHolder.setThePoint( Point.createAggregate2() );
entityManager.flush();
entityManager.clear();
assertStructEquals(
Point.createAggregate2(),
entityManager.find( RecordStructHolder.class, 1L ).getThePoint()
);
} );
}
@Test
public void testFetch() {
sessionFactoryScope().inSession(
entityManager -> {
List<RecordStructHolder> RecordStructHolders = entityManager.createQuery( "from RecordStructHolder b where b.id = 1", RecordStructHolder.class ).getResultList();
assertEquals( 1, RecordStructHolders.size() );
assertEquals( 1L, RecordStructHolders.get( 0 ).getId() );
assertStructEquals( Point.createAggregate1(), RecordStructHolders.get( 0 ).getThePoint() );
}
);
public void testFetch(SessionFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<RecordStructHolder> RecordStructHolders = entityManager.createQuery(
"from RecordStructHolder b where b.id = 1",
RecordStructHolder.class
).getResultList();
assertEquals( 1, RecordStructHolders.size() );
assertEquals( 1L, RecordStructHolders.get( 0 ).getId() );
assertStructEquals( Point.createAggregate1(), RecordStructHolders.get( 0 ).getThePoint() );
} );
}
@Test
public void testFetchNull() {
sessionFactoryScope().inSession(
entityManager -> {
List<RecordStructHolder> RecordStructHolders = entityManager.createQuery( "from RecordStructHolder b where b.id = 2", RecordStructHolder.class ).getResultList();
assertEquals( 1, RecordStructHolders.size() );
assertEquals( 2L, RecordStructHolders.get( 0 ).getId() );
assertStructEquals( Point.createAggregate2(), RecordStructHolders.get( 0 ).getThePoint() );
}
);
public void testFetchNull(SessionFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<RecordStructHolder> RecordStructHolders = entityManager.createQuery(
"from RecordStructHolder b where b.id = 2",
RecordStructHolder.class
).getResultList();
assertEquals( 1, RecordStructHolders.size() );
assertEquals( 2L, RecordStructHolders.get( 0 ).getId() );
assertStructEquals( Point.createAggregate2(), RecordStructHolders.get( 0 ).getThePoint() );
} );
}
@Test
public void testDomainResult() {
sessionFactoryScope().inSession(
entityManager -> {
List<Point> structs = entityManager.createQuery( "select b.thePoint from RecordStructHolder b where b.id = 1", Point.class ).getResultList();
assertEquals( 1, structs.size() );
assertStructEquals( Point.createAggregate1(), structs.get( 0 ) );
}
);
public void testDomainResult(SessionFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<Point> structs = entityManager.createQuery(
"select b.thePoint from RecordStructHolder b where b.id = 1",
Point.class
).getResultList();
assertEquals( 1, structs.size() );
assertStructEquals( Point.createAggregate1(), structs.get( 0 ) );
} );
}
@Test
public void testSelectionItems() {
sessionFactoryScope().inSession(
entityManager -> {
List<Tuple> tuples = entityManager.createQuery(
"select " +
"b.thePoint.x," +
"b.thePoint.y," +
"b.thePoint.z " +
"from RecordStructHolder b where b.id = 1",
Tuple.class
).getResultList();
assertEquals( 1, tuples.size() );
final Tuple tuple = tuples.get( 0 );
assertStructEquals(
Point.createAggregate1(),
new Point(
tuple.get( 1, String.class ),
tuple.get( 2, long.class ),
tuple.get( 0, int.class )
)
);
}
);
public void testSelectionItems(SessionFactoryScope scope) {
scope.inTransaction( entityManager -> {
List<Tuple> tuples = entityManager.createQuery(
"select " +
"b.thePoint.x," +
"b.thePoint.y," +
"b.thePoint.z " +
"from RecordStructHolder b where b.id = 1",
Tuple.class
).getResultList();
assertEquals( 1, tuples.size() );
final Tuple tuple = tuples.get( 0 );
assertStructEquals(
Point.createAggregate1(),
new Point(
tuple.get( 1, String.class ),
tuple.get( 2, long.class ),
tuple.get( 0, int.class )
)
);
} );
}
@Test
public void testDeleteWhere() {
sessionFactoryScope().inTransaction(
entityManager -> {
entityManager.createQuery( "delete RecordStructHolder b where b.thePoint is not null" ).executeUpdate();
assertNull( entityManager.find( RecordStructHolder.class, 1L ) );
}
);
public void testDeleteWhere(SessionFactoryScope scope) {
scope.inTransaction( entityManager -> {
entityManager.createQuery( "delete RecordStructHolder b where b.thePoint is not null" ).executeUpdate();
assertNull( entityManager.find( RecordStructHolder.class, 1L ) );
} );
}
@Test
public void testUpdateAggregate() {
sessionFactoryScope().inTransaction(
entityManager -> {
entityManager.createQuery( "update RecordStructHolder b set b.thePoint = null" ).executeUpdate();
assertNull( entityManager.find( RecordStructHolder.class, 1L ).getThePoint() );
}
);
public void testUpdateAggregate(SessionFactoryScope scope) {
scope.inTransaction( entityManager -> {
entityManager.createQuery( "update RecordStructHolder b set b.thePoint = null" ).executeUpdate();
assertNull( entityManager.find( RecordStructHolder.class, 1L ).getThePoint() );
} );
}
@Test
public void testUpdateAggregateMember() {
sessionFactoryScope().inTransaction(
entityManager -> {
entityManager.createQuery( "update RecordStructHolder b set b.thePoint.x = null" ).executeUpdate();
Point struct = Point.createAggregate1().withX( null );
assertStructEquals( struct, entityManager.find( RecordStructHolder.class, 1L ).getThePoint() );
}
);
public void testUpdateAggregateMember(SessionFactoryScope scope) {
scope.inTransaction( entityManager -> {
entityManager.createQuery( "update RecordStructHolder b set b.thePoint.x = null" ).executeUpdate();
Point struct = Point.createAggregate1().withX( null );
assertStructEquals( struct, entityManager.find( RecordStructHolder.class, 1L ).getThePoint() );
} );
}
@Test
public void testUpdateMultipleAggregateMembers() {
sessionFactoryScope().inTransaction(
entityManager -> {
entityManager.createQuery( "update RecordStructHolder b set b.thePoint.y = null, b.thePoint.z = 0" ).executeUpdate();
Point struct = Point.createAggregate1().withY( null ).withZ( 0 );
assertStructEquals( struct, entityManager.find( RecordStructHolder.class, 1L ).getThePoint() );
}
);
public void testUpdateMultipleAggregateMembers(SessionFactoryScope scope) {
scope.inTransaction( entityManager -> {
entityManager.createQuery( "update RecordStructHolder b set b.thePoint.y = null, b.thePoint.z = 0" )
.executeUpdate();
Point struct = Point.createAggregate1().withY( null ).withZ( 0 );
assertStructEquals( struct, entityManager.find( RecordStructHolder.class, 1L ).getThePoint() );
} );
}
@Test
public void testUpdateAllAggregateMembers() {
sessionFactoryScope().inTransaction(
entityManager -> {
Point struct = Point.createAggregate1();
entityManager.createQuery(
public void testUpdateAllAggregateMembers(SessionFactoryScope scope) {
scope.inTransaction( entityManager -> {
Point struct = Point.createAggregate1();
entityManager.createQuery(
"update RecordStructHolder b set " +
"b.thePoint.x = :x," +
"b.thePoint.y = :y," +
"b.thePoint.z = :z " +
"where b.id = 2"
)
.setParameter( "x", struct.getX() )
.setParameter( "y", struct.getY() )
.setParameter( "z", struct.getZ() )
.executeUpdate();
assertStructEquals( Point.createAggregate1(), entityManager.find( RecordStructHolder.class, 2L ).getThePoint() );
}
);
.setParameter( "x", struct.getX() )
.setParameter( "y", struct.getY() )
.setParameter( "z", struct.getZ() )
.executeUpdate();
assertStructEquals(
Point.createAggregate1(),
entityManager.find( RecordStructHolder.class, 2L ).getThePoint()
);
} );
}
@Test
public void testNativeQuery() {
sessionFactoryScope().inTransaction(
entityManager -> {
//noinspection unchecked
List<Object> resultList = entityManager.createNativeQuery(
"select b.thePoint from RecordStructHolder b where b.id = 1",
Object.class
)
.getResultList();
assertEquals( 1, resultList.size() );
assertInstanceOf( Point.class, resultList.get( 0 ) );
Point struct = (Point) resultList.get( 0 );
assertStructEquals( Point.createAggregate1(), struct );
}
);
public void testNativeQuery(SessionFactoryScope scope) {
scope.inTransaction( entityManager -> {
//noinspection unchecked
List<Object> resultList = entityManager.createNativeQuery(
"select b.thePoint from RecordStructHolder b where b.id = 1",
Object.class
)
.getResultList();
assertEquals( 1, resultList.size() );
assertInstanceOf( Point.class, resultList.get( 0 ) );
Point struct = (Point) resultList.get( 0 );
assertStructEquals( Point.createAggregate1(), struct );
} );
}
private void assertStructEquals(Point point1, Point point2) {
@ -274,7 +254,7 @@ public class StructComponentInstantiatorTest extends BaseSessionFactoryFunctiona
private final long z;
private final Integer x;
@Instantiator({"y","z","x"})
@Instantiator({ "y", "z", "x" })
public Point(String y, long z, Integer x) {
this.y = y;
this.x = x;
@ -296,15 +276,19 @@ public class StructComponentInstantiatorTest extends BaseSessionFactoryFunctiona
public Point withX(Integer x) {
return new Point( y, z, x );
}
public Point withY(String y) {
return new Point( y, z, x );
}
public Point withZ(long z) {
return new Point( y, z, x );
}
public static Point createAggregate1() {
return new Point( "1", -100, 10 );
}
public static Point createAggregate2() {
return new Point( "20", -200, 2 );
}

View File

@ -14,6 +14,7 @@ import org.hibernate.boot.spi.AdditionalMappingContributor;
import org.hibernate.boot.spi.InFlightMetadataCollector;
import org.hibernate.boot.spi.MetadataBuildingContext;
import org.hibernate.dialect.OracleArrayJdbcType;
import org.hibernate.dialect.OracleDialect;
import org.hibernate.dialect.SpannerDialect;
import org.hibernate.engine.jdbc.Size;
import org.hibernate.query.criteria.JpaCriteriaQuery;
@ -67,18 +68,20 @@ public class ArrayAggregateTest {
InFlightMetadataCollector metadata,
ResourceStreamLocator resourceStreamLocator,
MetadataBuildingContext buildingContext) {
final TypeConfiguration typeConfiguration = metadata.getTypeConfiguration();
final JavaTypeRegistry javaTypeRegistry = typeConfiguration.getJavaTypeRegistry();
final JdbcTypeRegistry jdbcTypeRegistry = typeConfiguration.getJdbcTypeRegistry();
new OracleArrayJdbcType(
jdbcTypeRegistry.getDescriptor( SqlTypes.VARCHAR ),
"StringArray"
).addAuxiliaryDatabaseObjects(
new ArrayJavaType<>( javaTypeRegistry.getDescriptor( String.class ) ),
Size.nil(),
metadata.getDatabase(),
typeConfiguration.getCurrentBaseSqlTypeIndicators()
);
if ( metadata.getDatabase().getDialect() instanceof OracleDialect ) {
final TypeConfiguration typeConfiguration = metadata.getTypeConfiguration();
final JavaTypeRegistry javaTypeRegistry = typeConfiguration.getJavaTypeRegistry();
final JdbcTypeRegistry jdbcTypeRegistry = typeConfiguration.getJdbcTypeRegistry();
new OracleArrayJdbcType(
jdbcTypeRegistry.getDescriptor( SqlTypes.VARCHAR ),
"StringArray"
).addAuxiliaryDatabaseObjects(
new ArrayJavaType<>( javaTypeRegistry.getDescriptor( String.class ) ),
Size.nil(),
metadata.getDatabase(),
typeConfiguration.getCurrentBaseSqlTypeIndicators()
);
}
}
}

View File

@ -6,7 +6,18 @@
*/
package org.hibernate.orm.test.jpa.criteria;
import jakarta.persistence.EntityManager;
import java.util.Arrays;
import java.util.List;
import org.hibernate.testing.jdbc.SharedDriverManagerTypeCacheClearingIntegrator;
import org.hibernate.testing.orm.junit.BootstrapServiceRegistry;
import org.hibernate.testing.orm.junit.DomainModel;
import org.hibernate.testing.orm.junit.JiraKey;
import org.hibernate.testing.orm.junit.SessionFactory;
import org.hibernate.testing.orm.junit.SessionFactoryScope;
import org.junit.jupiter.api.Test;
import jakarta.persistence.Parameter;
import jakarta.persistence.TypedQuery;
import jakarta.persistence.criteria.CriteriaBuilder;
@ -15,18 +26,6 @@ import jakarta.persistence.criteria.ParameterExpression;
import jakarta.persistence.criteria.Path;
import jakarta.persistence.criteria.Root;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import org.hibernate.cfg.AvailableSettings;
import org.junit.Test;
import org.hibernate.orm.test.jpa.BaseEntityManagerFunctionalTestCase;
import org.hibernate.testing.TestForIssue;
import org.hibernate.testing.transaction.TransactionUtil;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
@ -34,106 +33,94 @@ import static org.junit.Assert.assertThat;
/**
* @author Steve Ebersole
*/
public class ParameterTest extends BaseEntityManagerFunctionalTestCase {
@Override
protected void addConfigOptions(Map options) {
// Make sure this stuff runs on a dedicated connection pool,
// otherwise we might run into ORA-21700: object does not exist or is marked for delete
// because the JDBC connection or database session caches something that should have been invalidated
options.put( AvailableSettings.CONNECTION_PROVIDER, "" );
@BootstrapServiceRegistry(
// Clear the type cache, otherwise we might run into ORA-21700: object does not exist or is marked for delete
integrators = SharedDriverManagerTypeCacheClearingIntegrator.class
)
@DomainModel(annotatedClasses = MultiTypedBasicAttributesEntity.class)
@SessionFactory
public class ParameterTest {
@Test
public void testPrimitiveArrayParameterBinding(SessionFactoryScope scope) {
scope.inTransaction( em -> {
CriteriaQuery<MultiTypedBasicAttributesEntity> criteria = em.getCriteriaBuilder()
.createQuery( MultiTypedBasicAttributesEntity.class );
Root<MultiTypedBasicAttributesEntity> rootEntity = criteria.from( MultiTypedBasicAttributesEntity.class );
Path<int[]> someIntsPath = rootEntity.get( MultiTypedBasicAttributesEntity_.someInts );
ParameterExpression<int[]> param = em.getCriteriaBuilder().parameter( int[].class, "theInts" );
criteria.where( em.getCriteriaBuilder().equal( someIntsPath, param ) );
TypedQuery<MultiTypedBasicAttributesEntity> query = em.createQuery( criteria );
query.setParameter( param, new int[] { 1,1,1 } );
assertThat( query.getParameterValue( param.getName() ), instanceOf( int[].class) );
query.getResultList();
} );
}
@Test
public void testPrimitiveArrayParameterBinding() {
EntityManager em = getOrCreateEntityManager();
em.getTransaction().begin();
CriteriaQuery<MultiTypedBasicAttributesEntity> criteria = em.getCriteriaBuilder()
.createQuery( MultiTypedBasicAttributesEntity.class );
Root<MultiTypedBasicAttributesEntity> rootEntity = criteria.from( MultiTypedBasicAttributesEntity.class );
Path<int[]> someIntsPath = rootEntity.get( MultiTypedBasicAttributesEntity_.someInts );
ParameterExpression<int[]> param = em.getCriteriaBuilder().parameter( int[].class, "theInts" );
criteria.where( em.getCriteriaBuilder().equal( someIntsPath, param ) );
TypedQuery<MultiTypedBasicAttributesEntity> query = em.createQuery( criteria );
query.setParameter( param, new int[] { 1,1,1 } );
assertThat( query.getParameterValue( param.getName() ), instanceOf( int[].class) );
query.getResultList();
em.getTransaction().commit();
em.close();
public void testNonPrimitiveArrayParameterBinding(SessionFactoryScope scope) {
scope.inTransaction( em -> {
CriteriaQuery<MultiTypedBasicAttributesEntity> criteria = em.getCriteriaBuilder()
.createQuery( MultiTypedBasicAttributesEntity.class );
Root<MultiTypedBasicAttributesEntity> rootEntity = criteria.from( MultiTypedBasicAttributesEntity.class );
Path<Integer[]> thePath = rootEntity.get( MultiTypedBasicAttributesEntity_.someWrappedIntegers );
ParameterExpression<Integer[]> param = em.getCriteriaBuilder().parameter( Integer[].class, "theIntegers" );
criteria.where( em.getCriteriaBuilder().equal( thePath, param ) );
TypedQuery<MultiTypedBasicAttributesEntity> query = em.createQuery( criteria );
query.setParameter( param, new Integer[] { 1, 1, 1 } );
assertThat( query.getParameterValue( param.getName() ), instanceOf( Integer[].class ) );
query.getResultList();
} );
}
@Test
public void testNonPrimitiveArrayParameterBinding() {
EntityManager em = getOrCreateEntityManager();
em.getTransaction().begin();
CriteriaQuery<MultiTypedBasicAttributesEntity> criteria = em.getCriteriaBuilder()
.createQuery( MultiTypedBasicAttributesEntity.class );
Root<MultiTypedBasicAttributesEntity> rootEntity = criteria.from( MultiTypedBasicAttributesEntity.class );
Path<Integer[]> thePath = rootEntity.get( MultiTypedBasicAttributesEntity_.someWrappedIntegers );
ParameterExpression<Integer[]> param = em.getCriteriaBuilder().parameter( Integer[].class, "theIntegers" );
criteria.where( em.getCriteriaBuilder().equal( thePath, param ) );
TypedQuery<MultiTypedBasicAttributesEntity> query = em.createQuery( criteria );
query.setParameter( param, new Integer[] {1, 1, 1} );
assertThat( query.getParameterValue( param.getName() ), instanceOf( Integer[].class ) );
query.getResultList();
em.getTransaction().commit();
em.close();
public void testNamedParameterMetadata(SessionFactoryScope scope) {
scope.inTransaction( em -> {
CriteriaQuery<MultiTypedBasicAttributesEntity> criteria = em.getCriteriaBuilder()
.createQuery( MultiTypedBasicAttributesEntity.class );
Root<MultiTypedBasicAttributesEntity> rootEntity = criteria.from( MultiTypedBasicAttributesEntity.class );
criteria.where(
em.getCriteriaBuilder().equal(
rootEntity.get( MultiTypedBasicAttributesEntity_.id ),
em.getCriteriaBuilder().parameter( Long.class, "id" )
)
);
TypedQuery<MultiTypedBasicAttributesEntity> query = em.createQuery( criteria );
Parameter<?> parameter = query.getParameter( "id" );
assertEquals( "id", parameter.getName() );
} );
}
@Test
public void testNamedParameterMetadata() {
EntityManager em = getOrCreateEntityManager();
em.getTransaction().begin();
CriteriaQuery<MultiTypedBasicAttributesEntity> criteria = em.getCriteriaBuilder()
.createQuery( MultiTypedBasicAttributesEntity.class );
Root<MultiTypedBasicAttributesEntity> rootEntity = criteria.from( MultiTypedBasicAttributesEntity.class );
criteria.where(
em.getCriteriaBuilder().equal(
rootEntity.get( MultiTypedBasicAttributesEntity_.id ),
em.getCriteriaBuilder().parameter( Long.class, "id" )
)
);
TypedQuery<MultiTypedBasicAttributesEntity> query = em.createQuery( criteria );
Parameter<?> parameter = query.getParameter( "id" );
assertEquals( "id", parameter.getName() );
em.getTransaction().commit();
em.close();
}
@Test
public void testParameterInParameterList() {
public void testParameterInParameterList(SessionFactoryScope scope) {
// Yes, this test makes no semantic sense. But the JPA TCK does it...
// it causes a problem on Derby, which does not like the form "... where ? in (?,?)"
// Derby wants one side or the other to be CAST (I assume so it can check typing).
scope.inTransaction( em -> {
CriteriaQuery<MultiTypedBasicAttributesEntity> criteria = em.getCriteriaBuilder()
.createQuery( MultiTypedBasicAttributesEntity.class );
criteria.from( MultiTypedBasicAttributesEntity.class );
EntityManager em = getOrCreateEntityManager();
em.getTransaction().begin();
CriteriaQuery<MultiTypedBasicAttributesEntity> criteria = em.getCriteriaBuilder()
.createQuery( MultiTypedBasicAttributesEntity.class );
criteria.from( MultiTypedBasicAttributesEntity.class );
criteria.where(
em.getCriteriaBuilder().in( em.getCriteriaBuilder().parameter( Long.class, "p1" ) )
.value( em.getCriteriaBuilder().parameter( Long.class, "p2" ) )
.value( em.getCriteriaBuilder().parameter( Long.class, "p3" ) )
);
criteria.where(
em.getCriteriaBuilder().in( em.getCriteriaBuilder().parameter( Long.class, "p1" ) )
.value( em.getCriteriaBuilder().parameter( Long.class, "p2" ) )
.value( em.getCriteriaBuilder().parameter( Long.class, "p3" ) )
);
TypedQuery<MultiTypedBasicAttributesEntity> query = em.createQuery( criteria );
query.setParameter( "p1", 1L );
query.setParameter( "p2", 2L );
query.setParameter( "p3", 3L );
query.getResultList();
em.getTransaction().commit();
em.close();
TypedQuery<MultiTypedBasicAttributesEntity> query = em.createQuery( criteria );
query.setParameter( "p1", 1L );
query.setParameter( "p2", 2L );
query.setParameter( "p3", 3L );
query.getResultList();
} );
}
@Test
@TestForIssue(jiraKey = "HHH-10870")
public void testParameterInParameterList2() {
TransactionUtil.doInJPA( this::entityManagerFactory, em -> {
@JiraKey("HHH-10870")
public void testParameterInParameterList2(SessionFactoryScope scope) {
scope.inTransaction( em -> {
final CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder();
final CriteriaQuery<MultiTypedBasicAttributesEntity> criteria = criteriaBuilder
.createQuery( MultiTypedBasicAttributesEntity.class );
@ -150,9 +137,9 @@ public class ParameterTest extends BaseEntityManagerFunctionalTestCase {
}
@Test
@TestForIssue(jiraKey = "HHH-17912")
public void testAttributeEqualListParameter() {
TransactionUtil.doInJPA( this::entityManagerFactory, em -> {
@JiraKey("HHH-17912")
public void testAttributeEqualListParameter(SessionFactoryScope scope) {
scope.inTransaction( em -> {
final CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder();
final CriteriaQuery<MultiTypedBasicAttributesEntity> criteria = criteriaBuilder
.createQuery( MultiTypedBasicAttributesEntity.class );
@ -167,9 +154,4 @@ public class ParameterTest extends BaseEntityManagerFunctionalTestCase {
query1.getResultList();
} );
}
@Override
public Class<?>[] getAnnotatedClasses() {
return new Class[] { MultiTypedBasicAttributesEntity.class };
}
}

View File

@ -8,41 +8,31 @@ package org.hibernate.orm.test.mapping.collections;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.orm.test.jpa.BaseEntityManagerFunctionalTestCase;
import org.junit.Test;
import org.hibernate.testing.jdbc.SharedDriverManagerTypeCacheClearingIntegrator;
import org.hibernate.testing.orm.junit.BootstrapServiceRegistry;
import org.hibernate.testing.orm.junit.DomainModel;
import org.hibernate.testing.orm.junit.SessionFactory;
import org.hibernate.testing.orm.junit.SessionFactoryScope;
import org.junit.jupiter.api.Test;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import static org.hibernate.testing.transaction.TransactionUtil.doInJPA;
/**
* @author Christian Beikov
*/
public class CollectionTest extends BaseEntityManagerFunctionalTestCase {
@Override
protected Class<?>[] getAnnotatedClasses() {
return new Class<?>[] {
Person.class
};
}
@Override
protected void addConfigOptions(Map options) {
// Make sure this stuff runs on a dedicated connection pool,
// otherwise we might run into ORA-21700: object does not exist or is marked for delete
// because the JDBC connection or database session caches something that should have been invalidated
options.put( AvailableSettings.CONNECTION_PROVIDER, "" );
}
@BootstrapServiceRegistry(
// Clear the type cache, otherwise we might run into ORA-21700: object does not exist or is marked for delete
integrators = SharedDriverManagerTypeCacheClearingIntegrator.class
)
@DomainModel(annotatedClasses = CollectionTest.Person.class)
@SessionFactory
public class CollectionTest {
@Test
public void testLifecycle() {
doInJPA(this::entityManagerFactory, entityManager -> {
public void testLifecycle(SessionFactoryScope scope) {
scope.inTransaction( entityManager -> {
Person person = new Person(1L);
List<String> phones = new ArrayList<>();
phones.add( "028-234-9876" );
@ -50,7 +40,7 @@ public class CollectionTest extends BaseEntityManagerFunctionalTestCase {
person.setPhones(phones);
entityManager.persist(person);
});
doInJPA(this::entityManagerFactory, entityManager -> {
scope.inTransaction( entityManager -> {
Person person = entityManager.find(Person.class, 1L);
List<String> phones = new ArrayList<>();
phones.add( "072-122-9876" );

View File

@ -25,7 +25,9 @@ import java.util.Date;
import java.util.Objects;
import java.util.UUID;
import org.hibernate.Length;
import org.hibernate.annotations.JdbcTypeCode;
import org.hibernate.engine.jdbc.ClobProxy;
import org.hibernate.testing.orm.domain.gambit.EntityOfBasics;
import org.hibernate.testing.orm.domain.gambit.MutableValue;
@ -58,7 +60,7 @@ public class EmbeddableAggregate {
private int theInt;
private double theDouble;
private URL theUrl;
private Clob theClob;
private String theClob;
private byte[] theBinary;
private Date theDate;
private Date theTime;
@ -121,11 +123,12 @@ public class EmbeddableAggregate {
this.theUrl = theUrl;
}
public Clob getTheClob() {
@Column(length = Length.LONG32)
public String getTheClob() {
return theClob;
}
public void setTheClob(Clob theClob) {
public void setTheClob(String theClob) {
this.theClob = theClob;
}
@ -301,6 +304,7 @@ public class EmbeddableAggregate {
Assertions.assertEquals( a1.theStringBoolean, a2.theStringBoolean );
Assertions.assertEquals( a1.theString, a2.theString );
Assertions.assertEquals( a1.theInteger, a2.theInteger );
Assertions.assertEquals( a1.theUrl, a2.theUrl );
Assertions.assertEquals( a1.theClob, a2.theClob );
assertArrayEquals( a1.theBinary, a2.theBinary );
Assertions.assertEquals( a1.theDate, a2.theDate );
@ -353,6 +357,7 @@ public class EmbeddableAggregate {
catch (MalformedURLException e) {
throw new RuntimeException( e );
}
aggregate.theClob = "Abc";
aggregate.theBinary = new byte[] { 1 };
aggregate.theDate = new java.sql.Date( 2000 - 1900, 0, 1 );
aggregate.theTime = new Time( 1, 0, 0 );
@ -387,6 +392,11 @@ public class EmbeddableAggregate {
return aggregate;
}
@Override
public int hashCode() {
return 1;
}
@Override
public boolean equals(Object o) {
if ( this == o ) {

View File

@ -0,0 +1,537 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.orm.test.mapping.embeddable;
import java.net.MalformedURLException;
import java.net.URL;
import java.sql.Blob;
import java.sql.Clob;
import java.sql.Time;
import java.sql.Timestamp;
import java.time.Duration;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.Arrays;
import java.util.Date;
import java.util.UUID;
import org.hibernate.Length;
import org.hibernate.annotations.JdbcTypeCode;
import org.hibernate.engine.jdbc.BlobProxy;
import org.hibernate.engine.jdbc.ClobProxy;
import org.hibernate.type.NumericBooleanConverter;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.YesNoConverter;
import org.hibernate.testing.orm.domain.gambit.EntityOfBasics;
import org.hibernate.testing.orm.domain.gambit.MutableValue;
import org.junit.jupiter.api.Assertions;
import jakarta.persistence.Access;
import jakarta.persistence.AccessType;
import jakarta.persistence.Column;
import jakarta.persistence.Convert;
import jakarta.persistence.Embeddable;
import jakarta.persistence.EnumType;
import jakarta.persistence.Enumerated;
import jakarta.persistence.Temporal;
import jakarta.persistence.TemporalType;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
@Embeddable
@Access( AccessType.PROPERTY )
public class EmbeddableWithArrayAggregate {
private Boolean[] theBoolean;
private Boolean[] theNumericBoolean;
private Boolean[] theStringBoolean;
private String[] theString;
private Integer[] theInteger;
private int[] theInt;
private double[] theDouble;
private URL[] theUrl;
private String[] theClob;
private byte[][] theBinary;
private Date[] theDate;
private Date[] theTime;
private Date[] theTimestamp;
private Instant[] theInstant;
private UUID[] theUuid;
private EntityOfBasics.Gender[] gender;
private EntityOfBasics.Gender[] convertedGender;
private EntityOfBasics.Gender[] ordinalGender;
private Duration[] theDuration;
private LocalDateTime[] theLocalDateTime;
private LocalDate[] theLocalDate;
private LocalTime[] theLocalTime;
private ZonedDateTime[] theZonedDateTime;
private OffsetDateTime[] theOffsetDateTime;
private MutableValue[] mutableValue;
public EmbeddableWithArrayAggregate() {
}
@JdbcTypeCode(SqlTypes.ARRAY)
public String[] getTheString() {
return theString;
}
public void setTheString(String[] theString) {
this.theString = theString;
}
@JdbcTypeCode(SqlTypes.ARRAY)
public Integer[] getTheInteger() {
return theInteger;
}
public void setTheInteger(Integer[] theInteger) {
this.theInteger = theInteger;
}
@JdbcTypeCode(SqlTypes.ARRAY)
public int[] getTheInt() {
return theInt;
}
public void setTheInt(int[] theInt) {
this.theInt = theInt;
}
@JdbcTypeCode(SqlTypes.ARRAY)
public double[] getTheDouble() {
return theDouble;
}
public void setTheDouble(double[] theDouble) {
this.theDouble = theDouble;
}
@JdbcTypeCode(SqlTypes.ARRAY)
public URL[] getTheUrl() {
return theUrl;
}
public void setTheUrl(URL[] theUrl) {
this.theUrl = theUrl;
}
@Column(length = Length.LONG32)
@JdbcTypeCode(SqlTypes.ARRAY)
public String[] getTheClob() {
return theClob;
}
public void setTheClob(String[] theClob) {
this.theClob = theClob;
}
@JdbcTypeCode(SqlTypes.ARRAY)
public byte[][] getTheBinary() {
return theBinary;
}
public void setTheBinary(byte[][] theBinary) {
this.theBinary = theBinary;
}
@Enumerated( EnumType.STRING )
@JdbcTypeCode(SqlTypes.ARRAY)
public EntityOfBasics.Gender[] getGender() {
return gender;
}
public void setGender(EntityOfBasics.Gender[] gender) {
this.gender = gender;
}
@Convert( converter = EntityOfBasics.GenderConverter.class )
@Column(name = "converted_gender", length = 1)
@JdbcTypeCode(SqlTypes.ARRAY)
public EntityOfBasics.Gender[] getConvertedGender() {
return convertedGender;
}
public void setConvertedGender(EntityOfBasics.Gender[] convertedGender) {
this.convertedGender = convertedGender;
}
@Column(name = "ordinal_gender")
@JdbcTypeCode(SqlTypes.ARRAY)
public EntityOfBasics.Gender[] getOrdinalGender() {
return ordinalGender;
}
public void setOrdinalGender(EntityOfBasics.Gender[] ordinalGender) {
this.ordinalGender = ordinalGender;
}
@Temporal( TemporalType.DATE )
@JdbcTypeCode(SqlTypes.ARRAY)
public Date[] getTheDate() {
return theDate;
}
public void setTheDate(Date[] theDate) {
this.theDate = theDate;
}
@Temporal( TemporalType.TIME )
@JdbcTypeCode(SqlTypes.ARRAY)
public Date[] getTheTime() {
return theTime;
}
public void setTheTime(Date[] theTime) {
this.theTime = theTime;
}
@Temporal( TemporalType.TIMESTAMP )
@JdbcTypeCode(SqlTypes.ARRAY)
public Date[] getTheTimestamp() {
return theTimestamp;
}
public void setTheTimestamp(Date[] theTimestamp) {
this.theTimestamp = theTimestamp;
}
@Temporal( TemporalType.TIMESTAMP )
@JdbcTypeCode(SqlTypes.ARRAY)
public Instant[] getTheInstant() {
return theInstant;
}
public void setTheInstant(Instant[] theInstant) {
this.theInstant = theInstant;
}
@JdbcTypeCode(SqlTypes.ARRAY)
public UUID[] getTheUuid() {
return theUuid;
}
public void setTheUuid(UUID[] theUuid) {
this.theUuid = theUuid;
}
@JdbcTypeCode(SqlTypes.ARRAY)
public LocalDateTime[] getTheLocalDateTime() {
return theLocalDateTime;
}
public void setTheLocalDateTime(LocalDateTime[] theLocalDateTime) {
this.theLocalDateTime = theLocalDateTime;
}
@JdbcTypeCode(SqlTypes.ARRAY)
public LocalDate[] getTheLocalDate() {
return theLocalDate;
}
public void setTheLocalDate(LocalDate[] theLocalDate) {
this.theLocalDate = theLocalDate;
}
@JdbcTypeCode(SqlTypes.ARRAY)
public LocalTime[] getTheLocalTime() {
return theLocalTime;
}
public void setTheLocalTime(LocalTime[] theLocalTime) {
this.theLocalTime = theLocalTime;
}
@JdbcTypeCode(SqlTypes.ARRAY)
public OffsetDateTime[] getTheOffsetDateTime() {
return theOffsetDateTime;
}
public void setTheOffsetDateTime(OffsetDateTime[] theOffsetDateTime) {
this.theOffsetDateTime = theOffsetDateTime;
}
@JdbcTypeCode(SqlTypes.ARRAY)
public ZonedDateTime[] getTheZonedDateTime() {
return theZonedDateTime;
}
public void setTheZonedDateTime(ZonedDateTime[] theZonedDateTime) {
this.theZonedDateTime = theZonedDateTime;
}
@JdbcTypeCode(SqlTypes.ARRAY)
public Duration[] getTheDuration() {
return theDuration;
}
public void setTheDuration(Duration[] theDuration) {
this.theDuration = theDuration;
}
@JdbcTypeCode(SqlTypes.ARRAY)
public Boolean[] isTheBoolean() {
return theBoolean;
}
public void setTheBoolean(Boolean[] theBoolean) {
this.theBoolean = theBoolean;
}
@Convert(converter = NumericBooleanConverter.class)
@JdbcTypeCode(SqlTypes.ARRAY)
public Boolean[] isTheNumericBoolean() {
return theNumericBoolean;
}
public void setTheNumericBoolean(Boolean[] theNumericBoolean) {
this.theNumericBoolean = theNumericBoolean;
}
@Convert(converter = YesNoConverter.class)
@JdbcTypeCode(SqlTypes.ARRAY)
public Boolean[] isTheStringBoolean() {
return theStringBoolean;
}
public void setTheStringBoolean(Boolean[] theStringBoolean) {
this.theStringBoolean = theStringBoolean;
}
@Convert( converter = EntityOfBasics.MutableValueConverter.class )
@JdbcTypeCode(SqlTypes.ARRAY)
public MutableValue[] getMutableValue() {
return mutableValue;
}
public void setMutableValue(MutableValue[] mutableValue) {
this.mutableValue = mutableValue;
}
static void assertEquals(EmbeddableWithArrayAggregate a1, EmbeddableWithArrayAggregate a2) {
Assertions.assertArrayEquals( a1.theInt, a2.theInt );
Assertions.assertArrayEquals( a1.theDouble, a2.theDouble );
Assertions.assertArrayEquals( a1.theBoolean, a2.theBoolean );
Assertions.assertArrayEquals( a1.theNumericBoolean, a2.theNumericBoolean );
Assertions.assertArrayEquals( a1.theStringBoolean, a2.theStringBoolean );
Assertions.assertArrayEquals( a1.theString, a2.theString );
Assertions.assertArrayEquals( a1.theInteger, a2.theInteger );
Assertions.assertArrayEquals( a1.theUrl, a2.theUrl );
Assertions.assertArrayEquals( a1.theClob, a2.theClob );
Assertions.assertArrayEquals( a1.theBinary, a2.theBinary );
Assertions.assertArrayEquals( a1.theDate, a2.theDate );
Assertions.assertArrayEquals( a1.theTime, a2.theTime );
Assertions.assertArrayEquals( a1.theTimestamp, a2.theTimestamp );
Assertions.assertArrayEquals( a1.theInstant, a2.theInstant );
Assertions.assertArrayEquals( a1.theUuid, a2.theUuid );
Assertions.assertArrayEquals( a1.gender, a2.gender );
Assertions.assertArrayEquals( a1.convertedGender, a2.convertedGender );
Assertions.assertArrayEquals( a1.ordinalGender, a2.ordinalGender );
Assertions.assertArrayEquals( a1.theDuration, a2.theDuration );
Assertions.assertArrayEquals( a1.theLocalDateTime, a2.theLocalDateTime );
Assertions.assertArrayEquals( a1.theLocalDate, a2.theLocalDate );
Assertions.assertArrayEquals( a1.theLocalTime, a2.theLocalTime );
if ( a1.theZonedDateTime == null ) {
assertNull( a2.theZonedDateTime );
}
else {
assertNotNull( a2.theZonedDateTime );
Assertions.assertEquals( a1.theZonedDateTime.length, a2.theZonedDateTime.length );
for ( int i = 0; i < a1.theZonedDateTime.length; i++ ) {
if ( a1.theZonedDateTime[i] == null ) {
assertNull( a2.theZonedDateTime[i] );
}
else {
assertNotNull( a2.theZonedDateTime[i] );
Assertions.assertEquals( a1.theZonedDateTime[i].toInstant(), a2.theZonedDateTime[i].toInstant() );
}
}
}
if ( a1.theOffsetDateTime == null ) {
assertNull( a2.theOffsetDateTime );
}
else {
assertNotNull( a2.theOffsetDateTime );
Assertions.assertEquals( a1.theOffsetDateTime.length, a2.theOffsetDateTime.length );
for ( int i = 0; i < a1.theOffsetDateTime.length; i++ ) {
if ( a1.theOffsetDateTime[i] == null ) {
assertNull( a2.theOffsetDateTime[i] );
}
else {
assertNotNull( a2.theOffsetDateTime[i] );
Assertions.assertEquals( a1.theOffsetDateTime[i].toInstant(), a2.theOffsetDateTime[i].toInstant() );
}
}
}
if ( a1.mutableValue == null ) {
assertNull( a2.mutableValue );
}
else {
assertNotNull( a2.mutableValue );
Assertions.assertEquals( a1.mutableValue.length, a2.mutableValue.length );
for ( int i = 0; i < a1.mutableValue.length; i++ ) {
if ( a1.mutableValue[i] == null ) {
assertNull( a2.mutableValue[i] );
}
else {
assertNotNull( a2.mutableValue[i] );
Assertions.assertEquals( a1.mutableValue[i].getState(), a2.mutableValue[i].getState() );
}
}
}
}
public static EmbeddableWithArrayAggregate createAggregate1() {
final EmbeddableWithArrayAggregate aggregate = new EmbeddableWithArrayAggregate();
aggregate.theBoolean = new Boolean[]{ true, false, true };
aggregate.theNumericBoolean = new Boolean[]{ true, false, true };
aggregate.theStringBoolean = new Boolean[]{ true, false, true };
aggregate.theString = new String[]{ "String \"<abc>A&B</abc>\"", "{\"text\":\"<abc>A&B</abc>\"}" };
aggregate.theInteger = new Integer[]{ -1, };
aggregate.theInt = new int[]{ Integer.MAX_VALUE };
aggregate.theDouble = new double[]{ 1.3e20 };
try {
aggregate.theUrl = new URL[]{ new URL( "https://hibernate.org" ) };
}
catch (MalformedURLException e) {
throw new RuntimeException( e );
}
aggregate.theClob = new String[]{ "Abc" };
aggregate.theBinary = new byte[][] { new byte[]{ 1 } };
aggregate.theDate = new java.sql.Date[]{ new java.sql.Date( 2000 - 1900, 0, 1 ) };
aggregate.theTime = new Time[]{ new Time( 1, 0, 0 ) };
aggregate.theTimestamp = new Timestamp[]{ new Timestamp( 2000 - 1900, 0, 1, 1, 0, 0, 1000 ) };
aggregate.theInstant = new Instant[]{LocalDateTime.of( 2000, 1, 1, 0, 0, 0 ).toInstant( ZoneOffset.UTC ) };
aggregate.theUuid = new UUID[]{ UUID.fromString( "53886a8a-7082-4879-b430-25cb94415be8" ) };
aggregate.gender = new EntityOfBasics.Gender[]{ EntityOfBasics.Gender.FEMALE };
aggregate.convertedGender = new EntityOfBasics.Gender[]{ EntityOfBasics.Gender.MALE };
aggregate.ordinalGender = new EntityOfBasics.Gender[]{ EntityOfBasics.Gender.OTHER };
aggregate.theDuration = new Duration[]{ Duration.ofHours( 1 ) };
aggregate.theLocalDateTime = new LocalDateTime[]{ LocalDateTime.of( 2000, 1, 1, 0, 0, 0 ) };
aggregate.theLocalDate = new LocalDate[]{ LocalDate.of( 2000, 1, 1 ) };
aggregate.theLocalTime = new LocalTime[]{ LocalTime.of( 1, 0, 0 ) };
aggregate.theZonedDateTime = new ZonedDateTime[]{ LocalDateTime.of( 2000, 1, 1, 0, 0, 0 ).atZone( ZoneOffset.UTC ) };
aggregate.theOffsetDateTime = new OffsetDateTime[]{ LocalDateTime.of( 2000, 1, 1, 0, 0, 0 ).atOffset( ZoneOffset.UTC ) };
aggregate.mutableValue = new MutableValue[]{ new MutableValue( "some state" ) };
return aggregate;
}
public static EmbeddableWithArrayAggregate createAggregate2() {
final EmbeddableWithArrayAggregate aggregate = new EmbeddableWithArrayAggregate();
aggregate.theString = new String[]{ "String 'abc'" };
return aggregate;
}
public static EmbeddableWithArrayAggregate createAggregate3() {
final EmbeddableWithArrayAggregate aggregate = new EmbeddableWithArrayAggregate();
aggregate.theString = new String[]{ "ABC" };
aggregate.theBinary = new byte[][] { new byte[]{ 1 } };
aggregate.theUuid = new UUID[]{ UUID.fromString( "53886a8a-7082-4879-b430-25cb94415be8" ) };
aggregate.theLocalDateTime = new LocalDateTime[]{ LocalDateTime.of( 2022, 12, 1, 1, 0, 0 ) };
return aggregate;
}
@Override
public int hashCode() {
return 1;
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( o == null || getClass() != o.getClass() ) {
return false;
}
EmbeddableWithArrayAggregate that = (EmbeddableWithArrayAggregate) o;
if ( !Arrays.equals( theBoolean, that.theBoolean ) ) {
return false;
}
if ( !Arrays.equals( theNumericBoolean, that.theNumericBoolean ) ) {
return false;
}
if ( !Arrays.equals( theStringBoolean, that.theStringBoolean ) ) {
return false;
}
if ( !Arrays.equals( theString, that.theString ) ) {
return false;
}
if ( !Arrays.equals( theInteger, that.theInteger ) ) {
return false;
}
if ( !Arrays.equals( theInt, that.theInt ) ) {
return false;
}
if ( !Arrays.equals( theDouble, that.theDouble ) ) {
return false;
}
if ( !Arrays.equals( theUrl, that.theUrl ) ) {
return false;
}
if ( !Arrays.equals( theClob, that.theClob ) ) {
return false;
}
if ( !Arrays.deepEquals( theBinary, that.theBinary ) ) {
return false;
}
if ( !Arrays.equals( theDate, that.theDate ) ) {
return false;
}
if ( !Arrays.equals( theTime, that.theTime ) ) {
return false;
}
if ( !Arrays.equals( theTimestamp, that.theTimestamp ) ) {
return false;
}
if ( !Arrays.equals( theInstant, that.theInstant ) ) {
return false;
}
if ( !Arrays.equals( theUuid, that.theUuid ) ) {
return false;
}
if ( !Arrays.equals( gender, that.gender ) ) {
return false;
}
if ( !Arrays.equals( convertedGender, that.convertedGender ) ) {
return false;
}
if ( !Arrays.equals( ordinalGender, that.ordinalGender ) ) {
return false;
}
if ( !Arrays.equals( theDuration, that.theDuration ) ) {
return false;
}
if ( !Arrays.equals( theLocalDateTime, that.theLocalDateTime ) ) {
return false;
}
if ( !Arrays.equals( theLocalDate, that.theLocalDate ) ) {
return false;
}
if ( !Arrays.equals( theLocalTime, that.theLocalTime ) ) {
return false;
}
if ( !Arrays.equals( theZonedDateTime, that.theZonedDateTime ) ) {
return false;
}
if ( !Arrays.equals( theOffsetDateTime, that.theOffsetDateTime ) ) {
return false;
}
return Arrays.equals( mutableValue, that.mutableValue );
}
}

View File

@ -6,7 +6,7 @@
*/
package org.hibernate.orm.test.mapping.embeddable;
import java.sql.Clob;
import java.net.URL;
import java.sql.Time;
import java.sql.Timestamp;
import java.time.Duration;
@ -144,6 +144,7 @@ public class JsonEmbeddableTest extends BaseSessionFactoryFunctionalTest {
"b.aggregate.theStringBoolean," +
"b.aggregate.theString," +
"b.aggregate.theInteger," +
"b.aggregate.theUrl," +
"b.aggregate.theClob," +
"b.aggregate.theBinary," +
"b.aggregate.theDate," +
@ -174,23 +175,24 @@ public class JsonEmbeddableTest extends BaseSessionFactoryFunctionalTest {
struct.setTheStringBoolean( tuple.get( 4, Boolean.class ) );
struct.setTheString( tuple.get( 5, String.class ) );
struct.setTheInteger( tuple.get( 6, Integer.class ) );
struct.setTheClob( tuple.get( 7, Clob.class ) );
struct.setTheBinary( tuple.get( 8, byte[].class ) );
struct.setTheDate( tuple.get( 9, Date.class ) );
struct.setTheTime( tuple.get( 10, Time.class ) );
struct.setTheTimestamp( tuple.get( 11, Timestamp.class ) );
struct.setTheInstant( tuple.get( 12, Instant.class ) );
struct.setTheUuid( tuple.get( 13, UUID.class ) );
struct.setGender( tuple.get( 14, EntityOfBasics.Gender.class ) );
struct.setConvertedGender( tuple.get( 15, EntityOfBasics.Gender.class ) );
struct.setOrdinalGender( tuple.get( 16, EntityOfBasics.Gender.class ) );
struct.setTheDuration( tuple.get( 17, Duration.class ) );
struct.setTheLocalDateTime( tuple.get( 18, LocalDateTime.class ) );
struct.setTheLocalDate( tuple.get( 19, LocalDate.class ) );
struct.setTheLocalTime( tuple.get( 20, LocalTime.class ) );
struct.setTheZonedDateTime( tuple.get( 21, ZonedDateTime.class ) );
struct.setTheOffsetDateTime( tuple.get( 22, OffsetDateTime.class ) );
struct.setMutableValue( tuple.get( 23, MutableValue.class ) );
struct.setTheUrl( tuple.get( 7, URL.class ) );
struct.setTheClob( tuple.get( 8, String.class ) );
struct.setTheBinary( tuple.get( 9, byte[].class ) );
struct.setTheDate( tuple.get( 10, Date.class ) );
struct.setTheTime( tuple.get( 11, Time.class ) );
struct.setTheTimestamp( tuple.get( 12, Timestamp.class ) );
struct.setTheInstant( tuple.get( 13, Instant.class ) );
struct.setTheUuid( tuple.get( 14, UUID.class ) );
struct.setGender( tuple.get( 15, EntityOfBasics.Gender.class ) );
struct.setConvertedGender( tuple.get( 16, EntityOfBasics.Gender.class ) );
struct.setOrdinalGender( tuple.get( 17, EntityOfBasics.Gender.class ) );
struct.setTheDuration( tuple.get( 18, Duration.class ) );
struct.setTheLocalDateTime( tuple.get( 19, LocalDateTime.class ) );
struct.setTheLocalDate( tuple.get( 20, LocalDate.class ) );
struct.setTheLocalTime( tuple.get( 21, LocalTime.class ) );
struct.setTheZonedDateTime( tuple.get( 22, ZonedDateTime.class ) );
struct.setTheOffsetDateTime( tuple.get( 23, OffsetDateTime.class ) );
struct.setMutableValue( tuple.get( 24, MutableValue.class ) );
EmbeddableAggregate.assertEquals( EmbeddableAggregate.createAggregate1(), struct );
}
);
@ -259,6 +261,7 @@ public class JsonEmbeddableTest extends BaseSessionFactoryFunctionalTest {
"b.aggregate.theStringBoolean = :theStringBoolean," +
"b.aggregate.theString = :theString," +
"b.aggregate.theInteger = :theInteger," +
"b.aggregate.theUrl = :theUrl," +
"b.aggregate.theClob = :theClob," +
"b.aggregate.theBinary = :theBinary," +
"b.aggregate.theDate = :theDate," +
@ -285,6 +288,7 @@ public class JsonEmbeddableTest extends BaseSessionFactoryFunctionalTest {
.setParameter( "theStringBoolean", struct.isTheStringBoolean() )
.setParameter( "theString", struct.getTheString() )
.setParameter( "theInteger", struct.getTheInteger() )
.setParameter( "theUrl", struct.getTheUrl() )
.setParameter( "theClob", struct.getTheClob() )
.setParameter( "theBinary", struct.getTheBinary() )
.setParameter( "theDate", struct.getTheDate() )

View File

@ -0,0 +1,354 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.orm.test.mapping.embeddable;
import java.net.URL;
import java.sql.Clob;
import java.sql.Time;
import java.sql.Timestamp;
import java.time.Duration;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.OffsetDateTime;
import java.time.ZonedDateTime;
import java.util.Date;
import java.util.List;
import java.util.UUID;
import org.hibernate.annotations.JdbcTypeCode;
import org.hibernate.annotations.Struct;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.Dialect;
import org.hibernate.dialect.PostgreSQLDialect;
import org.hibernate.procedure.ProcedureCall;
import org.hibernate.query.procedure.ProcedureParameter;
import org.hibernate.type.SqlTypes;
import org.hibernate.testing.jdbc.SharedDriverManagerTypeCacheClearingIntegrator;
import org.hibernate.testing.orm.domain.gambit.EntityOfBasics;
import org.hibernate.testing.orm.domain.gambit.MutableValue;
import org.hibernate.testing.orm.junit.BootstrapServiceRegistry;
import org.hibernate.testing.orm.junit.DialectFeatureChecks;
import org.hibernate.testing.orm.junit.DomainModel;
import org.hibernate.testing.orm.junit.JiraKey;
import org.hibernate.testing.orm.junit.RequiresDialectFeature;
import org.hibernate.testing.orm.junit.ServiceRegistry;
import org.hibernate.testing.orm.junit.SessionFactory;
import org.hibernate.testing.orm.junit.SessionFactoryScope;
import org.hibernate.testing.orm.junit.Setting;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.ParameterMode;
import jakarta.persistence.Tuple;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertInstanceOf;
import static org.junit.jupiter.api.Assertions.assertNull;
@JiraKey("HHH-15862")
@RequiresDialectFeature(feature = DialectFeatureChecks.SupportsJsonAggregate.class)
@BootstrapServiceRegistry(
// Clear the type cache, otherwise we might run into ORA-21700: object does not exist or is marked for delete
integrators = SharedDriverManagerTypeCacheClearingIntegrator.class
)
// Don't reorder columns in the types here to avoid the need to rewrite the test
@ServiceRegistry(settings = @Setting(name = AvailableSettings.COLUMN_ORDERING_STRATEGY, value = "legacy"))
@DomainModel(annotatedClasses = JsonWithArrayEmbeddableTest.JsonHolder.class)
@SessionFactory
public class JsonWithArrayEmbeddableTest {
@BeforeEach
public void setUp(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
session.persist( new JsonHolder( 1L, EmbeddableWithArrayAggregate.createAggregate1() ) );
session.persist( new JsonHolder( 2L, EmbeddableWithArrayAggregate.createAggregate2() ) );
}
);
}
@AfterEach
protected void cleanupTest(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
session.createMutationQuery( "delete from JsonHolder h" ).executeUpdate();
}
);
}
@Test
public void testUpdate(SessionFactoryScope scope) {
scope.inTransaction(
entityManager -> {
JsonHolder jsonHolder = entityManager.find( JsonHolder.class, 1L );
jsonHolder.setAggregate( EmbeddableWithArrayAggregate.createAggregate2() );
entityManager.flush();
entityManager.clear();
EmbeddableWithArrayAggregate.assertEquals( EmbeddableWithArrayAggregate.createAggregate2(), entityManager.find( JsonHolder.class, 1L ).getAggregate() );
}
);
}
@Test
public void testFetch(SessionFactoryScope scope) {
scope.inSession(
entityManager -> {
List<JsonHolder> jsonHolders = entityManager.createQuery( "from JsonHolder b where b.id = 1", JsonHolder.class ).getResultList();
assertEquals( 1, jsonHolders.size() );
assertEquals( 1L, jsonHolders.get( 0 ).getId() );
EmbeddableWithArrayAggregate.assertEquals( EmbeddableWithArrayAggregate.createAggregate1(), jsonHolders.get( 0 ).getAggregate() );
}
);
}
@Test
public void testFetchNull(SessionFactoryScope scope) {
scope.inSession(
entityManager -> {
List<JsonHolder> jsonHolders = entityManager.createQuery( "from JsonHolder b where b.id = 2", JsonHolder.class ).getResultList();
assertEquals( 1, jsonHolders.size() );
assertEquals( 2L, jsonHolders.get( 0 ).getId() );
EmbeddableWithArrayAggregate.assertEquals( EmbeddableWithArrayAggregate.createAggregate2(), jsonHolders.get( 0 ).getAggregate() );
}
);
}
@Test
public void testDomainResult(SessionFactoryScope scope) {
scope.inSession(
entityManager -> {
List<EmbeddableWithArrayAggregate> structs = entityManager.createQuery( "select b.aggregate from JsonHolder b where b.id = 1", EmbeddableWithArrayAggregate.class ).getResultList();
assertEquals( 1, structs.size() );
EmbeddableWithArrayAggregate.assertEquals( EmbeddableWithArrayAggregate.createAggregate1(), structs.get( 0 ) );
}
);
}
@Test
public void testSelectionItems(SessionFactoryScope scope) {
scope.inSession(
entityManager -> {
List<Tuple> tuples = entityManager.createQuery(
"select " +
"b.aggregate.theInt," +
"b.aggregate.theDouble," +
"b.aggregate.theBoolean," +
"b.aggregate.theNumericBoolean," +
"b.aggregate.theStringBoolean," +
"b.aggregate.theString," +
"b.aggregate.theInteger," +
"b.aggregate.theUrl," +
"b.aggregate.theClob," +
"b.aggregate.theBinary," +
"b.aggregate.theDate," +
"b.aggregate.theTime," +
"b.aggregate.theTimestamp," +
"b.aggregate.theInstant," +
"b.aggregate.theUuid," +
"b.aggregate.gender," +
"b.aggregate.convertedGender," +
"b.aggregate.ordinalGender," +
"b.aggregate.theDuration," +
"b.aggregate.theLocalDateTime," +
"b.aggregate.theLocalDate," +
"b.aggregate.theLocalTime," +
"b.aggregate.theZonedDateTime," +
"b.aggregate.theOffsetDateTime," +
"b.aggregate.mutableValue " +
"from JsonHolder b where b.id = 1",
Tuple.class
).getResultList();
assertEquals( 1, tuples.size() );
final Tuple tuple = tuples.get( 0 );
final EmbeddableWithArrayAggregate struct = new EmbeddableWithArrayAggregate();
struct.setTheInt( tuple.get( 0, int[].class ) );
struct.setTheDouble( tuple.get( 1, double[].class ) );
struct.setTheBoolean( tuple.get( 2, Boolean[].class ) );
struct.setTheNumericBoolean( tuple.get( 3, Boolean[].class ) );
struct.setTheStringBoolean( tuple.get( 4, Boolean[].class ) );
struct.setTheString( tuple.get( 5, String[].class ) );
struct.setTheInteger( tuple.get( 6, Integer[].class ) );
struct.setTheUrl( tuple.get( 7, URL[].class ) );
struct.setTheClob( tuple.get( 8, String[].class ) );
struct.setTheBinary( tuple.get( 9, byte[][].class ) );
struct.setTheDate( tuple.get( 10, Date[].class ) );
struct.setTheTime( tuple.get( 11, Time[].class ) );
struct.setTheTimestamp( tuple.get( 12, Timestamp[].class ) );
struct.setTheInstant( tuple.get( 13, Instant[].class ) );
struct.setTheUuid( tuple.get( 14, UUID[].class ) );
struct.setGender( tuple.get( 15, EntityOfBasics.Gender[].class ) );
struct.setConvertedGender( tuple.get( 16, EntityOfBasics.Gender[].class ) );
struct.setOrdinalGender( tuple.get( 17, EntityOfBasics.Gender[].class ) );
struct.setTheDuration( tuple.get( 18, Duration[].class ) );
struct.setTheLocalDateTime( tuple.get( 19, LocalDateTime[].class ) );
struct.setTheLocalDate( tuple.get( 20, LocalDate[].class ) );
struct.setTheLocalTime( tuple.get( 21, LocalTime[].class ) );
struct.setTheZonedDateTime( tuple.get( 22, ZonedDateTime[].class ) );
struct.setTheOffsetDateTime( tuple.get( 23, OffsetDateTime[].class ) );
struct.setMutableValue( tuple.get( 24, MutableValue[].class ) );
EmbeddableWithArrayAggregate.assertEquals( EmbeddableWithArrayAggregate.createAggregate1(), struct );
}
);
}
@Test
public void testDeleteWhere(SessionFactoryScope scope) {
scope.inTransaction(
entityManager -> {
entityManager.createMutationQuery( "delete JsonHolder b where b.aggregate is not null" ).executeUpdate();
assertNull( entityManager.find( JsonHolder.class, 1L ) );
}
);
}
@Test
public void testUpdateAggregate(SessionFactoryScope scope) {
scope.inTransaction(
entityManager -> {
entityManager.createMutationQuery( "update JsonHolder b set b.aggregate = null" ).executeUpdate();
assertNull( entityManager.find( JsonHolder.class, 1L ).getAggregate() );
}
);
}
@Test
@RequiresDialectFeature(feature = DialectFeatureChecks.SupportsJsonComponentUpdate.class)
public void testUpdateAggregateMember(SessionFactoryScope scope) {
scope.inTransaction(
entityManager -> {
entityManager.createMutationQuery( "update JsonHolder b set b.aggregate.theString = null" ).executeUpdate();
EmbeddableWithArrayAggregate struct = EmbeddableWithArrayAggregate.createAggregate1();
struct.setTheString( null );
EmbeddableWithArrayAggregate.assertEquals( struct, entityManager.find( JsonHolder.class, 1L ).getAggregate() );
}
);
}
@Test
@RequiresDialectFeature(feature = DialectFeatureChecks.SupportsJsonComponentUpdate.class)
public void testUpdateMultipleAggregateMembers(SessionFactoryScope scope) {
scope.inTransaction(
entityManager -> {
entityManager.createMutationQuery( "update JsonHolder b set b.aggregate.theString = null, b.aggregate.theUuid = null" ).executeUpdate();
EmbeddableWithArrayAggregate struct = EmbeddableWithArrayAggregate.createAggregate1();
struct.setTheString( null );
struct.setTheUuid( null );
EmbeddableWithArrayAggregate.assertEquals( struct, entityManager.find( JsonHolder.class, 1L ).getAggregate() );
}
);
}
@Test
@RequiresDialectFeature(feature = DialectFeatureChecks.SupportsJsonComponentUpdate.class)
public void testUpdateAllAggregateMembers(SessionFactoryScope scope) {
scope.inTransaction(
entityManager -> {
EmbeddableWithArrayAggregate struct = EmbeddableWithArrayAggregate.createAggregate1();
entityManager.createMutationQuery(
"update JsonHolder b set " +
"b.aggregate.theInt = :theInt," +
"b.aggregate.theDouble = :theDouble," +
"b.aggregate.theBoolean = :theBoolean," +
"b.aggregate.theNumericBoolean = :theNumericBoolean," +
"b.aggregate.theStringBoolean = :theStringBoolean," +
"b.aggregate.theString = :theString," +
"b.aggregate.theInteger = :theInteger," +
"b.aggregate.theUrl = :theUrl," +
"b.aggregate.theClob = :theClob," +
"b.aggregate.theBinary = :theBinary," +
"b.aggregate.theDate = :theDate," +
"b.aggregate.theTime = :theTime," +
"b.aggregate.theTimestamp = :theTimestamp," +
"b.aggregate.theInstant = :theInstant," +
"b.aggregate.theUuid = :theUuid," +
"b.aggregate.gender = :gender," +
"b.aggregate.convertedGender = :convertedGender," +
"b.aggregate.ordinalGender = :ordinalGender," +
"b.aggregate.theDuration = :theDuration," +
"b.aggregate.theLocalDateTime = :theLocalDateTime," +
"b.aggregate.theLocalDate = :theLocalDate," +
"b.aggregate.theLocalTime = :theLocalTime," +
"b.aggregate.theZonedDateTime = :theZonedDateTime," +
"b.aggregate.theOffsetDateTime = :theOffsetDateTime," +
"b.aggregate.mutableValue = :mutableValue " +
"where b.id = 2"
)
.setParameter( "theInt", struct.getTheInt() )
.setParameter( "theDouble", struct.getTheDouble() )
.setParameter( "theBoolean", struct.isTheBoolean() )
.setParameter( "theNumericBoolean", struct.isTheNumericBoolean() )
.setParameter( "theStringBoolean", struct.isTheStringBoolean() )
.setParameter( "theString", struct.getTheString() )
.setParameter( "theInteger", struct.getTheInteger() )
.setParameter( "theUrl", struct.getTheUrl() )
.setParameter( "theClob", struct.getTheClob() )
.setParameter( "theBinary", struct.getTheBinary() )
.setParameter( "theDate", struct.getTheDate() )
.setParameter( "theTime", struct.getTheTime() )
.setParameter( "theTimestamp", struct.getTheTimestamp() )
.setParameter( "theInstant", struct.getTheInstant() )
.setParameter( "theUuid", struct.getTheUuid() )
.setParameter( "gender", struct.getGender() )
.setParameter( "convertedGender", struct.getConvertedGender() )
.setParameter( "ordinalGender", struct.getOrdinalGender() )
.setParameter( "theDuration", struct.getTheDuration() )
.setParameter( "theLocalDateTime", struct.getTheLocalDateTime() )
.setParameter( "theLocalDate", struct.getTheLocalDate() )
.setParameter( "theLocalTime", struct.getTheLocalTime() )
.setParameter( "theZonedDateTime", struct.getTheZonedDateTime() )
.setParameter( "theOffsetDateTime", struct.getTheOffsetDateTime() )
.setParameter( "mutableValue", struct.getMutableValue() )
.executeUpdate();
EmbeddableWithArrayAggregate.assertEquals( EmbeddableWithArrayAggregate.createAggregate1(), entityManager.find( JsonHolder.class, 2L ).getAggregate() );
}
);
}
@Entity(name = "JsonHolder")
public static class JsonHolder {
@Id
private Long id;
@JdbcTypeCode(SqlTypes.JSON)
private EmbeddableWithArrayAggregate aggregate;
//Getters and setters are omitted for brevity
public JsonHolder() {
}
public JsonHolder(Long id, EmbeddableWithArrayAggregate aggregate) {
this.id = id;
this.aggregate = aggregate;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public EmbeddableWithArrayAggregate getAggregate() {
return aggregate;
}
public void setAggregate(EmbeddableWithArrayAggregate aggregate) {
this.aggregate = aggregate;
}
}
}

View File

@ -6,6 +6,7 @@
*/
package org.hibernate.orm.test.mapping.embeddable;
import java.net.URL;
import java.sql.Clob;
import java.sql.Time;
import java.sql.Timestamp;
@ -147,6 +148,7 @@ public class NestedJsonEmbeddableTest extends BaseSessionFactoryFunctionalTest {
"b.theJson.nested.theStringBoolean," +
"b.theJson.nested.theString," +
"b.theJson.nested.theInteger," +
"b.theJson.nested.theUrl," +
"b.theJson.nested.theClob," +
"b.theJson.nested.theBinary," +
"b.theJson.nested.theDate," +
@ -181,29 +183,30 @@ public class NestedJsonEmbeddableTest extends BaseSessionFactoryFunctionalTest {
struct.setTheStringBoolean( tuple.get( 4, Boolean.class ) );
struct.setTheString( tuple.get( 5, String.class ) );
struct.setTheInteger( tuple.get( 6, Integer.class ) );
struct.setTheClob( tuple.get( 7, Clob.class ) );
struct.setTheBinary( tuple.get( 8, byte[].class ) );
struct.setTheDate( tuple.get( 9, Date.class ) );
struct.setTheTime( tuple.get( 10, Time.class ) );
struct.setTheTimestamp( tuple.get( 11, Timestamp.class ) );
struct.setTheInstant( tuple.get( 12, Instant.class ) );
struct.setTheUuid( tuple.get( 13, UUID.class ) );
struct.setGender( tuple.get( 14, EntityOfBasics.Gender.class ) );
struct.setConvertedGender( tuple.get( 15, EntityOfBasics.Gender.class ) );
struct.setOrdinalGender( tuple.get( 16, EntityOfBasics.Gender.class ) );
struct.setTheDuration( tuple.get( 17, Duration.class ) );
struct.setTheLocalDateTime( tuple.get( 18, LocalDateTime.class ) );
struct.setTheLocalDate( tuple.get( 19, LocalDate.class ) );
struct.setTheLocalTime( tuple.get( 20, LocalTime.class ) );
struct.setTheZonedDateTime( tuple.get( 21, ZonedDateTime.class ) );
struct.setTheOffsetDateTime( tuple.get( 22, OffsetDateTime.class ) );
struct.setMutableValue( tuple.get( 23, MutableValue.class ) );
struct.setTheUrl( tuple.get( 7, URL.class ) );
struct.setTheClob( tuple.get( 8, String.class ) );
struct.setTheBinary( tuple.get( 9, byte[].class ) );
struct.setTheDate( tuple.get( 10, Date.class ) );
struct.setTheTime( tuple.get( 11, Time.class ) );
struct.setTheTimestamp( tuple.get( 12, Timestamp.class ) );
struct.setTheInstant( tuple.get( 13, Instant.class ) );
struct.setTheUuid( tuple.get( 14, UUID.class ) );
struct.setGender( tuple.get( 15, EntityOfBasics.Gender.class ) );
struct.setConvertedGender( tuple.get( 16, EntityOfBasics.Gender.class ) );
struct.setOrdinalGender( tuple.get( 17, EntityOfBasics.Gender.class ) );
struct.setTheDuration( tuple.get( 18, Duration.class ) );
struct.setTheLocalDateTime( tuple.get( 19, LocalDateTime.class ) );
struct.setTheLocalDate( tuple.get( 20, LocalDate.class ) );
struct.setTheLocalTime( tuple.get( 21, LocalTime.class ) );
struct.setTheZonedDateTime( tuple.get( 22, ZonedDateTime.class ) );
struct.setTheOffsetDateTime( tuple.get( 23, OffsetDateTime.class ) );
struct.setMutableValue( tuple.get( 24, MutableValue.class ) );
EmbeddableAggregate.assertEquals( EmbeddableAggregate.createAggregate1(), struct );
SimpleEmbeddable simpleEmbeddable = tuple.get( 24, SimpleEmbeddable.class );
assertEquals( simpleEmbeddable.doubleNested, tuple.get( 25, DoubleNested.class ) );
assertEquals( simpleEmbeddable.doubleNested.theNested, tuple.get( 26, Nested.class ) );
assertEquals( simpleEmbeddable.doubleNested.theNested.theLeaf, tuple.get( 27, Leaf.class ) );
SimpleEmbeddable simpleEmbeddable = tuple.get( 25, SimpleEmbeddable.class );
assertEquals( simpleEmbeddable.doubleNested, tuple.get( 26, DoubleNested.class ) );
assertEquals( simpleEmbeddable.doubleNested.theNested, tuple.get( 27, Nested.class ) );
assertEquals( simpleEmbeddable.doubleNested.theNested.theLeaf, tuple.get( 28, Leaf.class ) );
assertEquals( 10, simpleEmbeddable.integerField );
assertEquals( "String \"<abc>A&B</abc>\"", simpleEmbeddable.doubleNested.theNested.theLeaf.stringField );
}
@ -335,6 +338,7 @@ public class NestedJsonEmbeddableTest extends BaseSessionFactoryFunctionalTest {
"b.theJson.nested.theStringBoolean = :theStringBoolean," +
"b.theJson.nested.theString = :theString," +
"b.theJson.nested.theInteger = :theInteger," +
"b.theJson.nested.theUrl = :theUrl," +
"b.theJson.nested.theClob = :theClob," +
"b.theJson.nested.theBinary = :theBinary," +
"b.theJson.nested.theDate = :theDate," +
@ -362,6 +366,7 @@ public class NestedJsonEmbeddableTest extends BaseSessionFactoryFunctionalTest {
.setParameter( "theStringBoolean", struct.isTheStringBoolean() )
.setParameter( "theString", struct.getTheString() )
.setParameter( "theInteger", struct.getTheInteger() )
.setParameter( "theUrl", struct.getTheUrl() )
.setParameter( "theClob", struct.getTheClob() )
.setParameter( "theBinary", struct.getTheBinary() )
.setParameter( "theDate", struct.getTheDate() )

Some files were not shown because too many files have changed in this diff Show More