HHH-15327 Support mapping aggregate embeddables as struct/json

This commit is contained in:
Christian Beikov 2022-11-28 21:23:01 +01:00
parent 74f3c1715b
commit bc36eb3eeb
227 changed files with 14515 additions and 1191 deletions

View File

@ -0,0 +1,469 @@
package org.hibernate.userguide.mapping.embeddable;
import java.net.MalformedURLException;
import java.net.URL;
import java.sql.Clob;
import java.sql.Time;
import java.sql.Timestamp;
import java.sql.Types;
import java.time.Duration;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.Arrays;
import java.util.Date;
import java.util.Objects;
import java.util.UUID;
import org.hibernate.annotations.JdbcTypeCode;
import org.hibernate.testing.orm.domain.gambit.EntityOfBasics;
import org.hibernate.testing.orm.domain.gambit.MutableValue;
import org.junit.jupiter.api.Assertions;
import jakarta.persistence.Access;
import jakarta.persistence.AccessType;
import jakarta.persistence.Column;
import jakarta.persistence.Convert;
import jakarta.persistence.Embeddable;
import jakarta.persistence.EnumType;
import jakarta.persistence.Enumerated;
import jakarta.persistence.Temporal;
import jakarta.persistence.TemporalType;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
@Embeddable
@Access( AccessType.PROPERTY )
public class EmbeddableAggregate {
private Boolean theBoolean = false;
private Boolean theNumericBoolean = false;
private Boolean theStringBoolean = false;
private String theString;
private Integer theInteger;
private int theInt;
private double theDouble;
private URL theUrl;
private Clob theClob;
private byte[] theBinary;
private Date theDate;
private Date theTime;
private Date theTimestamp;
private Instant theInstant;
private UUID theUuid;
private EntityOfBasics.Gender gender;
private EntityOfBasics.Gender convertedGender;
private EntityOfBasics.Gender ordinalGender;
private Duration theDuration;
private LocalDateTime theLocalDateTime;
private LocalDate theLocalDate;
private LocalTime theLocalTime;
private ZonedDateTime theZonedDateTime;
private OffsetDateTime theOffsetDateTime;
private MutableValue mutableValue;
public EmbeddableAggregate() {
}
public String getTheString() {
return theString;
}
public void setTheString(String theString) {
this.theString = theString;
}
public Integer getTheInteger() {
return theInteger;
}
public void setTheInteger(Integer theInteger) {
this.theInteger = theInteger;
}
public int getTheInt() {
return theInt;
}
public void setTheInt(int theInt) {
this.theInt = theInt;
}
public double getTheDouble() {
return theDouble;
}
public void setTheDouble(double theDouble) {
this.theDouble = theDouble;
}
public URL getTheUrl() {
return theUrl;
}
public void setTheUrl(URL theUrl) {
this.theUrl = theUrl;
}
public Clob getTheClob() {
return theClob;
}
public void setTheClob(Clob theClob) {
this.theClob = theClob;
}
public byte[] getTheBinary() {
return theBinary;
}
public void setTheBinary(byte[] theBinary) {
this.theBinary = theBinary;
}
@Enumerated( EnumType.STRING )
public EntityOfBasics.Gender getGender() {
return gender;
}
public void setGender(EntityOfBasics.Gender gender) {
this.gender = gender;
}
@Convert( converter = EntityOfBasics.GenderConverter.class )
@Column(name = "converted_gender", length = 1)
@JdbcTypeCode( Types.CHAR )
public EntityOfBasics.Gender getConvertedGender() {
return convertedGender;
}
public void setConvertedGender(EntityOfBasics.Gender convertedGender) {
this.convertedGender = convertedGender;
}
@Column(name = "ordinal_gender")
public EntityOfBasics.Gender getOrdinalGender() {
return ordinalGender;
}
public void setOrdinalGender(EntityOfBasics.Gender ordinalGender) {
this.ordinalGender = ordinalGender;
}
@Temporal( TemporalType.DATE )
public Date getTheDate() {
return theDate;
}
public void setTheDate(Date theDate) {
this.theDate = theDate;
}
@Temporal( TemporalType.TIME )
public Date getTheTime() {
return theTime;
}
public void setTheTime(Date theTime) {
this.theTime = theTime;
}
@Temporal( TemporalType.TIMESTAMP )
public Date getTheTimestamp() {
return theTimestamp;
}
public void setTheTimestamp(Date theTimestamp) {
this.theTimestamp = theTimestamp;
}
@Temporal( TemporalType.TIMESTAMP )
public Instant getTheInstant() {
return theInstant;
}
public void setTheInstant(Instant theInstant) {
this.theInstant = theInstant;
}
public UUID getTheUuid() {
return theUuid;
}
public void setTheUuid(UUID theUuid) {
this.theUuid = theUuid;
}
public LocalDateTime getTheLocalDateTime() {
return theLocalDateTime;
}
public void setTheLocalDateTime(LocalDateTime theLocalDateTime) {
this.theLocalDateTime = theLocalDateTime;
}
public LocalDate getTheLocalDate() {
return theLocalDate;
}
public void setTheLocalDate(LocalDate theLocalDate) {
this.theLocalDate = theLocalDate;
}
public LocalTime getTheLocalTime() {
return theLocalTime;
}
public void setTheLocalTime(LocalTime theLocalTime) {
this.theLocalTime = theLocalTime;
}
public OffsetDateTime getTheOffsetDateTime() {
return theOffsetDateTime;
}
public void setTheOffsetDateTime(OffsetDateTime theOffsetDateTime) {
this.theOffsetDateTime = theOffsetDateTime;
}
public ZonedDateTime getTheZonedDateTime() {
return theZonedDateTime;
}
public void setTheZonedDateTime(ZonedDateTime theZonedDateTime) {
this.theZonedDateTime = theZonedDateTime;
}
public Duration getTheDuration() {
return theDuration;
}
public void setTheDuration(Duration theDuration) {
this.theDuration = theDuration;
}
public Boolean isTheBoolean() {
return theBoolean;
}
public void setTheBoolean(Boolean theBoolean) {
this.theBoolean = theBoolean;
}
@JdbcTypeCode( Types.INTEGER )
public Boolean isTheNumericBoolean() {
return theNumericBoolean;
}
public void setTheNumericBoolean(Boolean theNumericBoolean) {
this.theNumericBoolean = theNumericBoolean;
}
@JdbcTypeCode( Types.CHAR )
public Boolean isTheStringBoolean() {
return theStringBoolean;
}
public void setTheStringBoolean(Boolean theStringBoolean) {
this.theStringBoolean = theStringBoolean;
}
@Convert( converter = EntityOfBasics.MutableValueConverter.class )
public MutableValue getMutableValue() {
return mutableValue;
}
public void setMutableValue(MutableValue mutableValue) {
this.mutableValue = mutableValue;
}
static void assertEquals(EmbeddableAggregate a1, EmbeddableAggregate a2) {
Assertions.assertEquals( a1.theInt, a2.theInt );
Assertions.assertEquals( a1.theDouble, a2.theDouble );
Assertions.assertEquals( a1.theBoolean, a2.theBoolean );
Assertions.assertEquals( a1.theNumericBoolean, a2.theNumericBoolean );
Assertions.assertEquals( a1.theStringBoolean, a2.theStringBoolean );
Assertions.assertEquals( a1.theString, a2.theString );
Assertions.assertEquals( a1.theInteger, a2.theInteger );
Assertions.assertEquals( a1.theClob, a2.theClob );
assertArrayEquals( a1.theBinary, a2.theBinary );
Assertions.assertEquals( a1.theDate, a2.theDate );
Assertions.assertEquals( a1.theTime, a2.theTime );
Assertions.assertEquals( a1.theTimestamp, a2.theTimestamp );
Assertions.assertEquals( a1.theInstant, a2.theInstant );
Assertions.assertEquals( a1.theUuid, a2.theUuid );
Assertions.assertEquals( a1.gender, a2.gender );
Assertions.assertEquals( a1.convertedGender, a2.convertedGender );
Assertions.assertEquals( a1.ordinalGender, a2.ordinalGender );
Assertions.assertEquals( a1.theDuration, a2.theDuration );
Assertions.assertEquals( a1.theLocalDateTime, a2.theLocalDateTime );
Assertions.assertEquals( a1.theLocalDate, a2.theLocalDate );
Assertions.assertEquals( a1.theLocalTime, a2.theLocalTime );
if ( a1.theZonedDateTime == null ) {
assertNull( a2.theZonedDateTime );
}
else {
assertNotNull( a2.theZonedDateTime );
Assertions.assertEquals( a1.theZonedDateTime.toInstant(), a2.theZonedDateTime.toInstant() );
}
if ( a1.theOffsetDateTime == null ) {
assertNull( a2.theOffsetDateTime );
}
else {
assertNotNull( a2.theOffsetDateTime );
Assertions.assertEquals( a1.theOffsetDateTime.toInstant(), a2.theOffsetDateTime.toInstant() );
}
if ( a1.mutableValue == null ) {
assertNull( a2.mutableValue );
}
else {
assertNotNull( a2.mutableValue );
Assertions.assertEquals( a1.mutableValue.getState(), a2.mutableValue.getState() );
}
}
public static EmbeddableAggregate createAggregate1() {
final EmbeddableAggregate aggregate = new EmbeddableAggregate();
aggregate.theBoolean = true;
aggregate.theNumericBoolean = true;
aggregate.theStringBoolean = true;
aggregate.theString = "String \"<abc>A&B</abc>\"";
aggregate.theInteger = -1;
aggregate.theInt = Integer.MAX_VALUE;
aggregate.theDouble = 1.3e20;
try {
aggregate.theUrl = new URL( "https://hibernate.org" );
}
catch (MalformedURLException e) {
throw new RuntimeException( e );
}
aggregate.theBinary = new byte[] { 1 };
aggregate.theDate = new java.sql.Date( 2000 - 1900, 0, 1 );
aggregate.theTime = new Time( 1, 0, 0 );
aggregate.theTimestamp = new Timestamp( 2000 - 1900, 0, 1, 1, 0, 0, 1000 );
aggregate.theInstant = LocalDateTime.of( 2000, 1, 1, 0, 0, 0 ).toInstant( ZoneOffset.UTC );
aggregate.theUuid = UUID.fromString( "53886a8a-7082-4879-b430-25cb94415be8" );
aggregate.gender = EntityOfBasics.Gender.FEMALE;
aggregate.convertedGender = EntityOfBasics.Gender.MALE;
aggregate.ordinalGender = EntityOfBasics.Gender.OTHER;
aggregate.theDuration = Duration.ofHours( 1 );
aggregate.theLocalDateTime = LocalDateTime.of( 2000, 1, 1, 0, 0, 0 );
aggregate.theLocalDate = LocalDate.of( 2000, 1, 1 );
aggregate.theLocalTime = LocalTime.of( 1, 0, 0 );
aggregate.theZonedDateTime = LocalDateTime.of( 2000, 1, 1, 0, 0, 0 ).atZone( ZoneOffset.UTC );
aggregate.theOffsetDateTime = LocalDateTime.of( 2000, 1, 1, 0, 0, 0 ).atOffset( ZoneOffset.UTC );
aggregate.mutableValue = new MutableValue( "some state" );
return aggregate;
}
public static EmbeddableAggregate createAggregate2() {
final EmbeddableAggregate aggregate = new EmbeddableAggregate();
aggregate.theString = "String 'abc'";
return aggregate;
}
public static EmbeddableAggregate createAggregate3() {
final EmbeddableAggregate aggregate = new EmbeddableAggregate();
aggregate.theString = "ABC";
aggregate.theBinary = new byte[] { 1 };
aggregate.theUuid = UUID.fromString( "53886a8a-7082-4879-b430-25cb94415be8" );
aggregate.theLocalDateTime = LocalDateTime.of( 2022, 12, 1, 1, 0, 0 );
return aggregate;
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( o == null || getClass() != o.getClass() ) {
return false;
}
EmbeddableAggregate that = (EmbeddableAggregate) o;
if ( theInt != that.theInt ) {
return false;
}
if ( Double.compare( that.theDouble, theDouble ) != 0 ) {
return false;
}
if ( !Objects.equals( theBoolean, that.theBoolean ) ) {
return false;
}
if ( !Objects.equals( theNumericBoolean, that.theNumericBoolean ) ) {
return false;
}
if ( !Objects.equals( theStringBoolean, that.theStringBoolean ) ) {
return false;
}
if ( !Objects.equals( theString, that.theString ) ) {
return false;
}
if ( !Objects.equals( theInteger, that.theInteger ) ) {
return false;
}
if ( !Objects.equals( theUrl, that.theUrl ) ) {
return false;
}
if ( !Objects.equals( theClob, that.theClob ) ) {
return false;
}
if ( !Arrays.equals( theBinary, that.theBinary ) ) {
return false;
}
if ( !Objects.equals( theDate, that.theDate ) ) {
return false;
}
if ( !Objects.equals( theTime, that.theTime ) ) {
return false;
}
if ( !Objects.equals( theTimestamp, that.theTimestamp ) ) {
return false;
}
if ( !Objects.equals( theInstant, that.theInstant ) ) {
return false;
}
if ( !Objects.equals( theUuid, that.theUuid ) ) {
return false;
}
if ( gender != that.gender ) {
return false;
}
if ( convertedGender != that.convertedGender ) {
return false;
}
if ( ordinalGender != that.ordinalGender ) {
return false;
}
if ( !Objects.equals( theDuration, that.theDuration ) ) {
return false;
}
if ( !Objects.equals( theLocalDateTime, that.theLocalDateTime ) ) {
return false;
}
if ( !Objects.equals( theLocalDate, that.theLocalDate ) ) {
return false;
}
if ( !Objects.equals( theLocalTime, that.theLocalTime ) ) {
return false;
}
if ( !Objects.equals( theZonedDateTime, that.theZonedDateTime ) ) {
return false;
}
if ( !Objects.equals( theOffsetDateTime, that.theOffsetDateTime ) ) {
return false;
}
return Objects.equals( mutableValue, that.mutableValue );
}
}

View File

@ -0,0 +1,336 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.userguide.mapping.embeddable;
import java.sql.Clob;
import java.sql.Time;
import java.sql.Timestamp;
import java.time.Duration;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.OffsetDateTime;
import java.time.ZonedDateTime;
import java.util.Date;
import java.util.List;
import java.util.UUID;
import org.hibernate.annotations.JdbcTypeCode;
import org.hibernate.type.SqlTypes;
import org.hibernate.testing.orm.domain.gambit.EntityOfBasics;
import org.hibernate.testing.orm.domain.gambit.MutableValue;
import org.hibernate.testing.orm.junit.BaseSessionFactoryFunctionalTest;
import org.hibernate.testing.orm.junit.DialectFeatureChecks;
import org.hibernate.testing.orm.junit.RequiresDialectFeature;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Tuple;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
@RequiresDialectFeature(feature = DialectFeatureChecks.SupportsJsonAggregate.class)
public class JsonEmbeddableTest extends BaseSessionFactoryFunctionalTest {
@Override
protected Class<?>[] getAnnotatedClasses() {
return new Class<?>[] {
JsonHolder.class
};
}
@BeforeEach
public void setUp() {
inTransaction(
session -> {
session.persist( new JsonHolder( 1L, EmbeddableAggregate.createAggregate1() ) );
session.persist( new JsonHolder( 2L, EmbeddableAggregate.createAggregate2() ) );
}
);
}
@AfterEach
protected void cleanupTest() {
inTransaction(
session -> {
session.createQuery( "delete from JsonHolder h" ).executeUpdate();
}
);
}
@Test
public void testUpdate() {
sessionFactoryScope().inTransaction(
entityManager -> {
JsonHolder jsonHolder = entityManager.find( JsonHolder.class, 1L );
jsonHolder.setAggregate( EmbeddableAggregate.createAggregate2() );
entityManager.flush();
entityManager.clear();
EmbeddableAggregate.assertEquals( EmbeddableAggregate.createAggregate2(), entityManager.find( JsonHolder.class, 1L ).getAggregate() );
}
);
}
@Test
public void testFetch() {
sessionFactoryScope().inSession(
entityManager -> {
List<JsonHolder> jsonHolders = entityManager.createQuery( "from JsonHolder b where b.id = 1", JsonHolder.class ).getResultList();
assertEquals( 1, jsonHolders.size() );
assertEquals( 1L, jsonHolders.get( 0 ).getId() );
EmbeddableAggregate.assertEquals( EmbeddableAggregate.createAggregate1(), jsonHolders.get( 0 ).getAggregate() );
}
);
}
@Test
public void testFetchNull() {
sessionFactoryScope().inSession(
entityManager -> {
List<JsonHolder> jsonHolders = entityManager.createQuery( "from JsonHolder b where b.id = 2", JsonHolder.class ).getResultList();
assertEquals( 1, jsonHolders.size() );
assertEquals( 2L, jsonHolders.get( 0 ).getId() );
EmbeddableAggregate.assertEquals( EmbeddableAggregate.createAggregate2(), jsonHolders.get( 0 ).getAggregate() );
}
);
}
@Test
public void testDomainResult() {
sessionFactoryScope().inSession(
entityManager -> {
List<EmbeddableAggregate> structs = entityManager.createQuery( "select b.aggregate from JsonHolder b where b.id = 1", EmbeddableAggregate.class ).getResultList();
assertEquals( 1, structs.size() );
EmbeddableAggregate.assertEquals( EmbeddableAggregate.createAggregate1(), structs.get( 0 ) );
}
);
}
@Test
public void testSelectionItems() {
sessionFactoryScope().inSession(
entityManager -> {
List<Tuple> tuples = entityManager.createQuery(
"select " +
"b.aggregate.theInt," +
"b.aggregate.theDouble," +
"b.aggregate.theBoolean," +
"b.aggregate.theNumericBoolean," +
"b.aggregate.theStringBoolean," +
"b.aggregate.theString," +
"b.aggregate.theInteger," +
"b.aggregate.theClob," +
"b.aggregate.theBinary," +
"b.aggregate.theDate," +
"b.aggregate.theTime," +
"b.aggregate.theTimestamp," +
"b.aggregate.theInstant," +
"b.aggregate.theUuid," +
"b.aggregate.gender," +
"b.aggregate.convertedGender," +
"b.aggregate.ordinalGender," +
"b.aggregate.theDuration," +
"b.aggregate.theLocalDateTime," +
"b.aggregate.theLocalDate," +
"b.aggregate.theLocalTime," +
"b.aggregate.theZonedDateTime," +
"b.aggregate.theOffsetDateTime," +
"b.aggregate.mutableValue " +
"from JsonHolder b where b.id = 1",
Tuple.class
).getResultList();
assertEquals( 1, tuples.size() );
final Tuple tuple = tuples.get( 0 );
final EmbeddableAggregate struct = new EmbeddableAggregate();
struct.setTheInt( tuple.get( 0, int.class ) );
struct.setTheDouble( tuple.get( 1, Double.class ) );
struct.setTheBoolean( tuple.get( 2, Boolean.class ) );
struct.setTheNumericBoolean( tuple.get( 3, Boolean.class ) );
struct.setTheStringBoolean( tuple.get( 4, Boolean.class ) );
struct.setTheString( tuple.get( 5, String.class ) );
struct.setTheInteger( tuple.get( 6, Integer.class ) );
struct.setTheClob( tuple.get( 7, Clob.class ) );
struct.setTheBinary( tuple.get( 8, byte[].class ) );
struct.setTheDate( tuple.get( 9, Date.class ) );
struct.setTheTime( tuple.get( 10, Time.class ) );
struct.setTheTimestamp( tuple.get( 11, Timestamp.class ) );
struct.setTheInstant( tuple.get( 12, Instant.class ) );
struct.setTheUuid( tuple.get( 13, UUID.class ) );
struct.setGender( tuple.get( 14, EntityOfBasics.Gender.class ) );
struct.setConvertedGender( tuple.get( 15, EntityOfBasics.Gender.class ) );
struct.setOrdinalGender( tuple.get( 16, EntityOfBasics.Gender.class ) );
struct.setTheDuration( tuple.get( 17, Duration.class ) );
struct.setTheLocalDateTime( tuple.get( 18, LocalDateTime.class ) );
struct.setTheLocalDate( tuple.get( 19, LocalDate.class ) );
struct.setTheLocalTime( tuple.get( 20, LocalTime.class ) );
struct.setTheZonedDateTime( tuple.get( 21, ZonedDateTime.class ) );
struct.setTheOffsetDateTime( tuple.get( 22, OffsetDateTime.class ) );
struct.setMutableValue( tuple.get( 23, MutableValue.class ) );
EmbeddableAggregate.assertEquals( EmbeddableAggregate.createAggregate1(), struct );
}
);
}
@Test
public void testDeleteWhere() {
sessionFactoryScope().inTransaction(
entityManager -> {
entityManager.createQuery( "delete JsonHolder b where b.aggregate is not null" ).executeUpdate();
assertNull( entityManager.find( JsonHolder.class, 1L ) );
}
);
}
@Test
public void testUpdateAggregate() {
sessionFactoryScope().inTransaction(
entityManager -> {
entityManager.createQuery( "update JsonHolder b set b.aggregate = null" ).executeUpdate();
assertNull( entityManager.find( JsonHolder.class, 1L ).getAggregate() );
}
);
}
@Test
@RequiresDialectFeature(feature = DialectFeatureChecks.SupportsJsonComponentUpdate.class)
public void testUpdateAggregateMember() {
sessionFactoryScope().inTransaction(
entityManager -> {
entityManager.createQuery( "update JsonHolder b set b.aggregate.theString = null" ).executeUpdate();
EmbeddableAggregate struct = EmbeddableAggregate.createAggregate1();
struct.setTheString( null );
EmbeddableAggregate.assertEquals( struct, entityManager.find( JsonHolder.class, 1L ).getAggregate() );
}
);
}
@Test
@RequiresDialectFeature(feature = DialectFeatureChecks.SupportsJsonComponentUpdate.class)
public void testUpdateMultipleAggregateMembers() {
sessionFactoryScope().inTransaction(
entityManager -> {
entityManager.createQuery( "update JsonHolder b set b.aggregate.theString = null, b.aggregate.theUuid = null" ).executeUpdate();
EmbeddableAggregate struct = EmbeddableAggregate.createAggregate1();
struct.setTheString( null );
struct.setTheUuid( null );
EmbeddableAggregate.assertEquals( struct, entityManager.find( JsonHolder.class, 1L ).getAggregate() );
}
);
}
@Test
@RequiresDialectFeature(feature = DialectFeatureChecks.SupportsJsonComponentUpdate.class)
public void testUpdateAllAggregateMembers() {
sessionFactoryScope().inTransaction(
entityManager -> {
EmbeddableAggregate struct = EmbeddableAggregate.createAggregate1();
entityManager.createQuery(
"update JsonHolder b set " +
"b.aggregate.theInt = :theInt," +
"b.aggregate.theDouble = :theDouble," +
"b.aggregate.theBoolean = :theBoolean," +
"b.aggregate.theNumericBoolean = :theNumericBoolean," +
"b.aggregate.theStringBoolean = :theStringBoolean," +
"b.aggregate.theString = :theString," +
"b.aggregate.theInteger = :theInteger," +
"b.aggregate.theClob = :theClob," +
"b.aggregate.theBinary = :theBinary," +
"b.aggregate.theDate = :theDate," +
"b.aggregate.theTime = :theTime," +
"b.aggregate.theTimestamp = :theTimestamp," +
"b.aggregate.theInstant = :theInstant," +
"b.aggregate.theUuid = :theUuid," +
"b.aggregate.gender = :gender," +
"b.aggregate.convertedGender = :convertedGender," +
"b.aggregate.ordinalGender = :ordinalGender," +
"b.aggregate.theDuration = :theDuration," +
"b.aggregate.theLocalDateTime = :theLocalDateTime," +
"b.aggregate.theLocalDate = :theLocalDate," +
"b.aggregate.theLocalTime = :theLocalTime," +
"b.aggregate.theZonedDateTime = :theZonedDateTime," +
"b.aggregate.theOffsetDateTime = :theOffsetDateTime," +
"b.aggregate.mutableValue = :mutableValue " +
"where b.id = 2"
)
.setParameter( "theInt", struct.getTheInt() )
.setParameter( "theDouble", struct.getTheDouble() )
.setParameter( "theBoolean", struct.isTheBoolean() )
.setParameter( "theNumericBoolean", struct.isTheNumericBoolean() )
.setParameter( "theStringBoolean", struct.isTheStringBoolean() )
.setParameter( "theString", struct.getTheString() )
.setParameter( "theInteger", struct.getTheInteger() )
.setParameter( "theClob", struct.getTheClob() )
.setParameter( "theBinary", struct.getTheBinary() )
.setParameter( "theDate", struct.getTheDate() )
.setParameter( "theTime", struct.getTheTime() )
.setParameter( "theTimestamp", struct.getTheTimestamp() )
.setParameter( "theInstant", struct.getTheInstant() )
.setParameter( "theUuid", struct.getTheUuid() )
.setParameter( "gender", struct.getGender() )
.setParameter( "convertedGender", struct.getConvertedGender() )
.setParameter( "ordinalGender", struct.getOrdinalGender() )
.setParameter( "theDuration", struct.getTheDuration() )
.setParameter( "theLocalDateTime", struct.getTheLocalDateTime() )
.setParameter( "theLocalDate", struct.getTheLocalDate() )
.setParameter( "theLocalTime", struct.getTheLocalTime() )
.setParameter( "theZonedDateTime", struct.getTheZonedDateTime() )
.setParameter( "theOffsetDateTime", struct.getTheOffsetDateTime() )
.setParameter( "mutableValue", struct.getMutableValue() )
.executeUpdate();
EmbeddableAggregate.assertEquals( EmbeddableAggregate.createAggregate1(), entityManager.find( JsonHolder.class, 2L ).getAggregate() );
}
);
}
//tag::embeddable-json-type-mapping-example[]
@Entity(name = "JsonHolder")
public static class JsonHolder {
@Id
private Long id;
@JdbcTypeCode(SqlTypes.JSON)
private EmbeddableAggregate aggregate;
//Getters and setters are omitted for brevity
//end::embeddable-json-type-mapping-example[]
public JsonHolder() {
}
public JsonHolder(Long id, EmbeddableAggregate aggregate) {
this.id = id;
this.aggregate = aggregate;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public EmbeddableAggregate getAggregate() {
return aggregate;
}
public void setAggregate(EmbeddableAggregate aggregate) {
this.aggregate = aggregate;
}
//tag::embeddable-json-type-mapping-example[]
}
//end::embeddable-json-type-mapping-example[]
}

View File

@ -0,0 +1,531 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.userguide.mapping.embeddable;
import java.sql.Clob;
import java.sql.Time;
import java.sql.Timestamp;
import java.time.Duration;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.OffsetDateTime;
import java.time.ZonedDateTime;
import java.util.Date;
import java.util.List;
import java.util.Objects;
import java.util.UUID;
import org.hibernate.annotations.JdbcTypeCode;
import org.hibernate.type.SqlTypes;
import org.hibernate.testing.orm.domain.gambit.EntityOfBasics;
import org.hibernate.testing.orm.domain.gambit.MutableValue;
import org.hibernate.testing.orm.junit.BaseSessionFactoryFunctionalTest;
import org.hibernate.testing.orm.junit.DialectFeatureChecks;
import org.hibernate.testing.orm.junit.RequiresDialectFeature;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import jakarta.persistence.Embeddable;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Tuple;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
@RequiresDialectFeature(feature = DialectFeatureChecks.SupportsJsonAggregate.class)
public class NestedJsonEmbeddableTest extends BaseSessionFactoryFunctionalTest {
@Override
protected Class<?>[] getAnnotatedClasses() {
return new Class<?>[] {
JsonHolder.class
};
}
@BeforeEach
public void setUp() {
inTransaction(
session -> {
session.persist( new JsonHolder( 1L, "XYZ", 10, "String \"<abc>A&B</abc>\"", EmbeddableAggregate.createAggregate1() ) );
session.persist( new JsonHolder( 2L, null, 20, "String 'abc'", EmbeddableAggregate.createAggregate2() ) );
}
);
}
@AfterEach
protected void cleanupTest() {
inTransaction(
session -> {
session.createQuery( "delete from JsonHolder h" ).executeUpdate();
}
);
}
@Test
public void testUpdate() {
sessionFactoryScope().inTransaction(
entityManager -> {
JsonHolder jsonHolder = entityManager.find( JsonHolder.class, 1L );
jsonHolder.setAggregate( EmbeddableAggregate.createAggregate2() );
entityManager.flush();
entityManager.clear();
jsonHolder = entityManager.find( JsonHolder.class, 1L );
assertEquals( "XYZ", jsonHolder.theJson.stringField );
assertEquals( 10, jsonHolder.theJson.simpleEmbeddable.integerField );
assertStructEquals( EmbeddableAggregate.createAggregate2(), jsonHolder.getAggregate() );
}
);
}
@Test
public void testFetch() {
sessionFactoryScope().inSession(
entityManager -> {
List<JsonHolder> jsonHolders = entityManager.createQuery( "from JsonHolder b where b.id = 1", JsonHolder.class ).getResultList();
assertEquals( 1, jsonHolders.size() );
JsonHolder jsonHolder = jsonHolders.get( 0 );
assertEquals( 1L, jsonHolder.getId() );
assertEquals( "XYZ", jsonHolder.theJson.stringField );
assertEquals( 10, jsonHolder.theJson.simpleEmbeddable.integerField );
assertEquals( "String \"<abc>A&B</abc>\"", jsonHolder.theJson.simpleEmbeddable.doubleNested.theNested.theLeaf.stringField );
assertStructEquals( EmbeddableAggregate.createAggregate1(), jsonHolder.getAggregate() );
}
);
}
@Test
public void testFetchNull() {
sessionFactoryScope().inSession(
entityManager -> {
List<JsonHolder> jsonHolders = entityManager.createQuery( "from JsonHolder b where b.id = 2", JsonHolder.class ).getResultList();
assertEquals( 1, jsonHolders.size() );
JsonHolder jsonHolder = jsonHolders.get( 0 );
assertEquals( 2L, jsonHolder.getId() );
assertNull( jsonHolder.theJson.stringField );
assertEquals( 20, jsonHolder.theJson.simpleEmbeddable.integerField );
assertStructEquals( EmbeddableAggregate.createAggregate2(), jsonHolder.getAggregate() );
}
);
}
@Test
public void testDomainResult() {
sessionFactoryScope().inSession(
entityManager -> {
List<TheJson> structs = entityManager.createQuery( "select b.theJson from JsonHolder b where b.id = 1", TheJson.class ).getResultList();
assertEquals( 1, structs.size() );
TheJson theJson = structs.get( 0 );
assertEquals( "XYZ", theJson.stringField );
assertEquals( 10, theJson.simpleEmbeddable.integerField );
assertEquals( "String \"<abc>A&B</abc>\"", theJson.simpleEmbeddable.doubleNested.theNested.theLeaf.stringField );
assertStructEquals( EmbeddableAggregate.createAggregate1(), theJson.nested );
}
);
}
@Test
public void testSelectionItems() {
sessionFactoryScope().inSession(
entityManager -> {
List<Tuple> tuples = entityManager.createQuery(
"select " +
"b.theJson.nested.theInt," +
"b.theJson.nested.theDouble," +
"b.theJson.nested.theBoolean," +
"b.theJson.nested.theNumericBoolean," +
"b.theJson.nested.theStringBoolean," +
"b.theJson.nested.theString," +
"b.theJson.nested.theInteger," +
"b.theJson.nested.theClob," +
"b.theJson.nested.theBinary," +
"b.theJson.nested.theDate," +
"b.theJson.nested.theTime," +
"b.theJson.nested.theTimestamp," +
"b.theJson.nested.theInstant," +
"b.theJson.nested.theUuid," +
"b.theJson.nested.gender," +
"b.theJson.nested.convertedGender," +
"b.theJson.nested.ordinalGender," +
"b.theJson.nested.theDuration," +
"b.theJson.nested.theLocalDateTime," +
"b.theJson.nested.theLocalDate," +
"b.theJson.nested.theLocalTime," +
"b.theJson.nested.theZonedDateTime," +
"b.theJson.nested.theOffsetDateTime," +
"b.theJson.nested.mutableValue," +
"b.theJson.simpleEmbeddable," +
"b.theJson.simpleEmbeddable.doubleNested," +
"b.theJson.simpleEmbeddable.doubleNested.theNested," +
"b.theJson.simpleEmbeddable.doubleNested.theNested.theLeaf " +
"from JsonHolder b where b.id = 1",
Tuple.class
).getResultList();
assertEquals( 1, tuples.size() );
final Tuple tuple = tuples.get( 0 );
final EmbeddableAggregate struct = new EmbeddableAggregate();
struct.setTheInt( tuple.get( 0, int.class ) );
struct.setTheDouble( tuple.get( 1, Double.class ) );
struct.setTheBoolean( tuple.get( 2, Boolean.class ) );
struct.setTheNumericBoolean( tuple.get( 3, Boolean.class ) );
struct.setTheStringBoolean( tuple.get( 4, Boolean.class ) );
struct.setTheString( tuple.get( 5, String.class ) );
struct.setTheInteger( tuple.get( 6, Integer.class ) );
struct.setTheClob( tuple.get( 7, Clob.class ) );
struct.setTheBinary( tuple.get( 8, byte[].class ) );
struct.setTheDate( tuple.get( 9, Date.class ) );
struct.setTheTime( tuple.get( 10, Time.class ) );
struct.setTheTimestamp( tuple.get( 11, Timestamp.class ) );
struct.setTheInstant( tuple.get( 12, Instant.class ) );
struct.setTheUuid( tuple.get( 13, UUID.class ) );
struct.setGender( tuple.get( 14, EntityOfBasics.Gender.class ) );
struct.setConvertedGender( tuple.get( 15, EntityOfBasics.Gender.class ) );
struct.setOrdinalGender( tuple.get( 16, EntityOfBasics.Gender.class ) );
struct.setTheDuration( tuple.get( 17, Duration.class ) );
struct.setTheLocalDateTime( tuple.get( 18, LocalDateTime.class ) );
struct.setTheLocalDate( tuple.get( 19, LocalDate.class ) );
struct.setTheLocalTime( tuple.get( 20, LocalTime.class ) );
struct.setTheZonedDateTime( tuple.get( 21, ZonedDateTime.class ) );
struct.setTheOffsetDateTime( tuple.get( 22, OffsetDateTime.class ) );
struct.setMutableValue( tuple.get( 23, MutableValue.class ) );
EmbeddableAggregate.assertEquals( EmbeddableAggregate.createAggregate1(), struct );
SimpleEmbeddable simpleEmbeddable = tuple.get( 24, SimpleEmbeddable.class );
assertEquals( simpleEmbeddable.doubleNested, tuple.get( 25, DoubleNested.class ) );
assertEquals( simpleEmbeddable.doubleNested.theNested, tuple.get( 26, Nested.class ) );
assertEquals( simpleEmbeddable.doubleNested.theNested.theLeaf, tuple.get( 27, Leaf.class ) );
assertEquals( 10, simpleEmbeddable.integerField );
assertEquals( "String \"<abc>A&B</abc>\"", simpleEmbeddable.doubleNested.theNested.theLeaf.stringField );
}
);
}
@Test
public void testDeleteWhere() {
sessionFactoryScope().inTransaction(
entityManager -> {
entityManager.createQuery( "delete JsonHolder b where b.theJson is not null" ).executeUpdate();
assertNull( entityManager.find( JsonHolder.class, 1L ) );
}
);
}
@Test
public void testUpdateAggregate() {
sessionFactoryScope().inTransaction(
entityManager -> {
entityManager.createQuery( "update JsonHolder b set b.theJson = null" ).executeUpdate();
assertNull( entityManager.find( JsonHolder.class, 1L ).getAggregate() );
}
);
}
@Test
@RequiresDialectFeature(feature = DialectFeatureChecks.SupportsJsonComponentUpdate.class)
public void testUpdateAggregateMember() {
sessionFactoryScope().inTransaction(
entityManager -> {
entityManager.createQuery( "update JsonHolder b set b.theJson.nested.theString = null" ).executeUpdate();
EmbeddableAggregate struct = EmbeddableAggregate.createAggregate1();
struct.setTheString( null );
assertStructEquals( struct, entityManager.find( JsonHolder.class, 1L ).getAggregate() );
}
);
}
@Test
@RequiresDialectFeature(feature = DialectFeatureChecks.SupportsJsonComponentUpdate.class)
public void testUpdateMultipleAggregateMembers() {
sessionFactoryScope().inTransaction(
entityManager -> {
entityManager.createQuery( "update JsonHolder b set b.theJson.nested.theString = null, b.theJson.nested.theUuid = null" ).executeUpdate();
EmbeddableAggregate struct = EmbeddableAggregate.createAggregate1();
struct.setTheString( null );
struct.setTheUuid( null );
assertStructEquals( struct, entityManager.find( JsonHolder.class, 1L ).getAggregate() );
}
);
}
@Test
@RequiresDialectFeature(feature = DialectFeatureChecks.SupportsJsonComponentUpdate.class)
public void testUpdateAllAggregateMembers() {
sessionFactoryScope().inTransaction(
entityManager -> {
EmbeddableAggregate struct = EmbeddableAggregate.createAggregate1();
entityManager.createQuery(
"update JsonHolder b set " +
"b.theJson.nested.theInt = :theInt," +
"b.theJson.nested.theDouble = :theDouble," +
"b.theJson.nested.theBoolean = :theBoolean," +
"b.theJson.nested.theNumericBoolean = :theNumericBoolean," +
"b.theJson.nested.theStringBoolean = :theStringBoolean," +
"b.theJson.nested.theString = :theString," +
"b.theJson.nested.theInteger = :theInteger," +
"b.theJson.nested.theClob = :theClob," +
"b.theJson.nested.theBinary = :theBinary," +
"b.theJson.nested.theDate = :theDate," +
"b.theJson.nested.theTime = :theTime," +
"b.theJson.nested.theTimestamp = :theTimestamp," +
"b.theJson.nested.theInstant = :theInstant," +
"b.theJson.nested.theUuid = :theUuid," +
"b.theJson.nested.gender = :gender," +
"b.theJson.nested.convertedGender = :convertedGender," +
"b.theJson.nested.ordinalGender = :ordinalGender," +
"b.theJson.nested.theDuration = :theDuration," +
"b.theJson.nested.theLocalDateTime = :theLocalDateTime," +
"b.theJson.nested.theLocalDate = :theLocalDate," +
"b.theJson.nested.theLocalTime = :theLocalTime," +
"b.theJson.nested.theZonedDateTime = :theZonedDateTime," +
"b.theJson.nested.theOffsetDateTime = :theOffsetDateTime," +
"b.theJson.nested.mutableValue = :mutableValue," +
"b.theJson.simpleEmbeddable.integerField = :integerField " +
"where b.id = 2"
)
.setParameter( "theInt", struct.getTheInt() )
.setParameter( "theDouble", struct.getTheDouble() )
.setParameter( "theBoolean", struct.isTheBoolean() )
.setParameter( "theNumericBoolean", struct.isTheNumericBoolean() )
.setParameter( "theStringBoolean", struct.isTheStringBoolean() )
.setParameter( "theString", struct.getTheString() )
.setParameter( "theInteger", struct.getTheInteger() )
.setParameter( "theClob", struct.getTheClob() )
.setParameter( "theBinary", struct.getTheBinary() )
.setParameter( "theDate", struct.getTheDate() )
.setParameter( "theTime", struct.getTheTime() )
.setParameter( "theTimestamp", struct.getTheTimestamp() )
.setParameter( "theInstant", struct.getTheInstant() )
.setParameter( "theUuid", struct.getTheUuid() )
.setParameter( "gender", struct.getGender() )
.setParameter( "convertedGender", struct.getConvertedGender() )
.setParameter( "ordinalGender", struct.getOrdinalGender() )
.setParameter( "theDuration", struct.getTheDuration() )
.setParameter( "theLocalDateTime", struct.getTheLocalDateTime() )
.setParameter( "theLocalDate", struct.getTheLocalDate() )
.setParameter( "theLocalTime", struct.getTheLocalTime() )
.setParameter( "theZonedDateTime", struct.getTheZonedDateTime() )
.setParameter( "theOffsetDateTime", struct.getTheOffsetDateTime() )
.setParameter( "mutableValue", struct.getMutableValue() )
.setParameter( "integerField", 5 )
.executeUpdate();
JsonHolder jsonHolder = entityManager.find( JsonHolder.class, 2L );
assertEquals( 5, jsonHolder.theJson.simpleEmbeddable.integerField );
assertStructEquals( EmbeddableAggregate.createAggregate1(), jsonHolder.getAggregate() );
}
);
}
private static void assertStructEquals(EmbeddableAggregate struct, EmbeddableAggregate struct2) {
assertArrayEquals( struct.getTheBinary(), struct2.getTheBinary() );
assertEquals( struct.getTheString(), struct2.getTheString() );
assertEquals( struct.getTheLocalDateTime(), struct2.getTheLocalDateTime() );
assertEquals( struct.getTheUuid(), struct2.getTheUuid() );
}
@Entity(name = "JsonHolder")
public static class JsonHolder {
@Id
private Long id;
@JdbcTypeCode(SqlTypes.JSON)
private TheJson theJson;
public JsonHolder() {
}
public JsonHolder(Long id, String stringField, Integer integerField, String leaf, EmbeddableAggregate aggregate) {
this.id = id;
this.theJson = new TheJson( stringField, integerField, leaf, aggregate );
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public TheJson getTheJson() {
return theJson;
}
public void setTheJson(TheJson struct) {
this.theJson = struct;
}
public EmbeddableAggregate getAggregate() {
return theJson == null ? null : theJson.nested;
}
public void setAggregate(EmbeddableAggregate aggregate) {
if ( theJson == null ) {
theJson = new TheJson( null, null, null, aggregate );
}
else {
theJson.nested = aggregate;
}
}
}
@Embeddable
public static class TheJson {
private String stringField;
private SimpleEmbeddable simpleEmbeddable;
@JdbcTypeCode(SqlTypes.JSON)
private EmbeddableAggregate nested;
public TheJson() {
}
public TheJson(String stringField, Integer integerField, String leaf, EmbeddableAggregate nested) {
this.stringField = stringField;
this.simpleEmbeddable = new SimpleEmbeddable( integerField, leaf );
this.nested = nested;
}
}
@Embeddable
public static class SimpleEmbeddable {
private Integer integerField;
@JdbcTypeCode(SqlTypes.JSON)
private DoubleNested doubleNested;
public SimpleEmbeddable() {
}
public SimpleEmbeddable(Integer integerField, String leaf) {
this.integerField = integerField;
this.doubleNested = new DoubleNested( leaf );
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( o == null || getClass() != o.getClass() ) {
return false;
}
SimpleEmbeddable that = (SimpleEmbeddable) o;
if ( !Objects.equals( integerField, that.integerField ) ) {
return false;
}
return Objects.equals( doubleNested, that.doubleNested );
}
@Override
public int hashCode() {
int result = integerField != null ? integerField.hashCode() : 0;
result = 31 * result + ( doubleNested != null ? doubleNested.hashCode() : 0 );
return result;
}
}
@Embeddable
public static class DoubleNested {
@JdbcTypeCode(SqlTypes.JSON)
private Nested theNested;
public DoubleNested() {
}
public DoubleNested(String leaf) {
this.theNested = new Nested( leaf );
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( o == null || getClass() != o.getClass() ) {
return false;
}
DoubleNested that = (DoubleNested) o;
return Objects.equals( theNested, that.theNested );
}
@Override
public int hashCode() {
return theNested != null ? theNested.hashCode() : 0;
}
}
@Embeddable
public static class Nested {
@JdbcTypeCode(SqlTypes.JSON)
private Leaf theLeaf;
public Nested() {
}
public Nested(String stringField) {
this.theLeaf = new Leaf( stringField );
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( o == null || getClass() != o.getClass() ) {
return false;
}
Nested nested = (Nested) o;
return Objects.equals( theLeaf, nested.theLeaf );
}
@Override
public int hashCode() {
return theLeaf != null ? theLeaf.hashCode() : 0;
}
}
@Embeddable
public static class Leaf {
private String stringField;
public Leaf() {
}
public Leaf(String stringField) {
this.stringField = stringField;
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( o == null || getClass() != o.getClass() ) {
return false;
}
Leaf leaf = (Leaf) o;
return Objects.equals( stringField, leaf.stringField );
}
@Override
public int hashCode() {
return stringField != null ? stringField.hashCode() : 0;
}
}
}

View File

@ -0,0 +1,785 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.userguide.mapping.embeddable;
import java.sql.Clob;
import java.sql.Time;
import java.sql.Timestamp;
import java.time.Duration;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.OffsetDateTime;
import java.time.ZonedDateTime;
import java.util.Date;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.UUID;
import org.hibernate.annotations.Struct;
import org.hibernate.boot.MetadataBuilder;
import org.hibernate.boot.model.naming.PhysicalNamingStrategyStandardImpl;
import org.hibernate.boot.model.relational.NamedAuxiliaryDatabaseObject;
import org.hibernate.boot.model.relational.Namespace;
import org.hibernate.boot.registry.StandardServiceRegistry;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.DB2Dialect;
import org.hibernate.dialect.OracleDialect;
import org.hibernate.dialect.PostgreSQLDialect;
import org.hibernate.dialect.PostgresPlusDialect;
import org.hibernate.engine.jdbc.connections.internal.DriverManagerConnectionProviderImpl;
import org.hibernate.procedure.ProcedureCall;
import org.hibernate.query.procedure.ProcedureParameter;
import org.hibernate.testing.orm.domain.gambit.EntityOfBasics;
import org.hibernate.testing.orm.domain.gambit.MutableValue;
import org.hibernate.testing.orm.junit.BaseSessionFactoryFunctionalTest;
import org.hibernate.testing.orm.junit.RequiresDialect;
import org.hibernate.testing.orm.junit.SkipForDialect;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import jakarta.persistence.Embeddable;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.ParameterMode;
import jakarta.persistence.Tuple;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertInstanceOf;
import static org.junit.jupiter.api.Assertions.assertNull;
@RequiresDialect( PostgreSQLDialect.class )
@RequiresDialect( OracleDialect.class )
@RequiresDialect( DB2Dialect.class )
public class NestedStructEmbeddableTest extends BaseSessionFactoryFunctionalTest {
@Override
protected Class<?>[] getAnnotatedClasses() {
return new Class<?>[] {
StructHolder.class
};
}
@Override
public StandardServiceRegistry produceServiceRegistry(StandardServiceRegistryBuilder ssrBuilder) {
// Make sure this stuff runs on a dedicated connection pool,
// otherwise we might run into ORA-21700: object does not exist or is marked for delete
// because the JDBC connection or database session caches something that should have been invalidated
ssrBuilder.applySetting( AvailableSettings.CONNECTION_PROVIDER, DriverManagerConnectionProviderImpl.class.getName() );
return super.produceServiceRegistry( ssrBuilder );
}
@Override
protected void applyMetadataBuilder(MetadataBuilder metadataBuilder) {
final Namespace namespace = new Namespace(
PhysicalNamingStrategyStandardImpl.INSTANCE,
null,
new Namespace.Name( null, null )
);
//---------------------------------------------------------
// PostgreSQL
//---------------------------------------------------------
metadataBuilder.applyAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
"PostgreSQL structFunction",
namespace,
"create function structFunction() returns theStruct as $$ declare result theStruct; struct structType; begin struct.theBinary = bytea '\\x01'; struct.theString = 'ABC'; struct.theDouble = 0; struct.theInt = 0; struct.theLocalDateTime = timestamp '2022-12-01 01:00:00'; struct.theUuid = '53886a8a-7082-4879-b430-25cb94415be8'::uuid; result.nested = struct; return result; end $$ language plpgsql",
"drop function structFunction",
Set.of( PostgreSQLDialect.class.getName() )
)
);
metadataBuilder.applyAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
"PostgreSQL structProcedure",
namespace,
"create procedure structProcedure(OUT result theStruct) AS $$ begin result.nested.theBinary = bytea '\\x01'; result.nested.theString = 'ABC'; result.nested.theDouble = 0; result.nested.theInt = 0; result.nested.theLocalDateTime = timestamp '2022-12-01 01:00:00'; result.nested.theUuid = '53886a8a-7082-4879-b430-25cb94415be8'::uuid; end $$ language plpgsql",
"drop procedure structProcedure",
Set.of( PostgreSQLDialect.class.getName() )
)
);
//---------------------------------------------------------
// PostgrePlus
//---------------------------------------------------------
metadataBuilder.applyAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
"PostgrePlus structFunction",
namespace,
"create function structFunction() returns theStruct as $$ declare result theStruct; struct structType; begin struct.theBinary = bytea '\\x01'; struct.theString = 'ABC'; struct.theDouble = 0; struct.theInt = 0; struct.theLocalDateTime = timestamp '2022-12-01 01:00:00'; struct.theUuid = '53886a8a-7082-4879-b430-25cb94415be8'::uuid; result.nested = struct; return result; end $$ language plpgsql",
"drop function structFunction",
Set.of( PostgresPlusDialect.class.getName() )
)
);
metadataBuilder.applyAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
"PostgrePlus structProcedure",
namespace,
"create procedure structProcedure(result OUT theStruct) AS $$ begin result.nested.theBinary = bytea '\\x01'; result.nested.theString = 'ABC'; result.nested.theDouble = 0; result.nested.theInt = 0; result.nested.theLocalDateTime = timestamp '2022-12-01 01:00:00'; result.nested.theUuid = '53886a8a-7082-4879-b430-25cb94415be8'::uuid; end $$ language plpgsql",
"drop procedure structProcedure",
Set.of( PostgresPlusDialect.class.getName() )
)
);
//---------------------------------------------------------
// DB2
//---------------------------------------------------------
final String binaryType;
final String binaryLiteralPrefix;
if ( getDialect().getVersion().isBefore( 11 ) ) {
binaryType = "char(16) for bit data";
binaryLiteralPrefix = "x";
}
else {
binaryType = "binary(16)";
binaryLiteralPrefix = "bx";
}
metadataBuilder.applyAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
"DB2 structFunction",
namespace,
"create function structFunction() returns theStruct language sql RETURN select theStruct()..nested(structType()..theBinary(" + binaryLiteralPrefix + "'01')..theString('ABC')..theDouble(0)..theInt(0)..theLocalDateTime(timestamp '2022-12-01 01:00:00')..theUuid(cast(" + binaryLiteralPrefix + "'" +
// UUID is already in HEX encoding, but we have to remove the dashes
"53886a8a-7082-4879-b430-25cb94415be8".replace( "-", "" )
+ "' as " + binaryType + "))) from (values (1)) t",
"drop function structFunction",
Set.of( DB2Dialect.class.getName() )
)
);
//---------------------------------------------------------
// Oracle
//---------------------------------------------------------
metadataBuilder.applyAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
"Oracle structFunction",
namespace,
"create function structFunction return theStruct is result theStruct; begin " +
"result := theStruct(" +
"stringField => null," +
"integerField => null," +
"doubleNested => null," +
"nested => structType(" +
"theBinary => hextoraw('01')," +
"theString => 'ABC'," +
"theDouble => 0," +
"theInt => 0," +
"theLocalDateTime => timestamp '2022-12-01 01:00:00'," +
"theUuid => hextoraw('53886a8a70824879b43025cb94415be8')," +
"converted_gender => null," +
"gender => null," +
"mutableValue => null," +
"ordinal_gender => null," +
"theBoolean => null," +
"theClob => null," +
"theDate => null," +
"theDuration => null," +
"theInstant => null," +
"theInteger => null," +
"theLocalDate => null," +
"theLocalTime => null," +
"theNumericBoolean => null," +
"theOffsetDateTime => null," +
"theStringBoolean => null," +
"theTime => null," +
"theTimestamp => null," +
"theUrl => null," +
"theZonedDateTime => null" +
")); return result; end;",
"drop function structFunction",
Set.of( OracleDialect.class.getName() )
)
);
metadataBuilder.applyAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
"Oracle structProcedure",
namespace,
"create procedure structProcedure(result OUT theStruct) AS begin " +
"result := theStruct(" +
"stringField => null," +
"integerField => null," +
"doubleNested => null," +
"nested => structType(" +
"theBinary => hextoraw('01')," +
"theString => 'ABC'," +
"theDouble => 0," +
"theInt => 0," +
"theLocalDateTime => timestamp '2022-12-01 01:00:00'," +
"theUuid => hextoraw('53886a8a70824879b43025cb94415be8')," +
"converted_gender => null," +
"gender => null," +
"mutableValue => null," +
"ordinal_gender => null," +
"theBoolean => null," +
"theClob => null," +
"theDate => null," +
"theDuration => null," +
"theInstant => null," +
"theInteger => null," +
"theLocalDate => null," +
"theLocalTime => null," +
"theNumericBoolean => null," +
"theOffsetDateTime => null," +
"theStringBoolean => null," +
"theTime => null," +
"theTimestamp => null," +
"theUrl => null," +
"theZonedDateTime => null" +
")); end;",
"drop procedure structProcedure",
Set.of( OracleDialect.class.getName() )
)
);
}
@BeforeEach
public void setUp() {
inTransaction(
session -> {
session.persist( new StructHolder( 1L, "XYZ", 10, "String \"<abc>A&B</abc>\"", EmbeddableAggregate.createAggregate1() ) );
session.persist( new StructHolder( 2L, null, 20, "String 'abc'", EmbeddableAggregate.createAggregate2() ) );
}
);
}
@AfterEach
protected void cleanupTest() {
inTransaction(
session -> {
session.createQuery( "delete from StructHolder h" ).executeUpdate();
}
);
}
@Test
public void testUpdate() {
sessionFactoryScope().inTransaction(
entityManager -> {
StructHolder structHolder = entityManager.find( StructHolder.class, 1L );
structHolder.setAggregate( EmbeddableAggregate.createAggregate2() );
entityManager.flush();
entityManager.clear();
structHolder = entityManager.find( StructHolder.class, 1L );
assertEquals( "XYZ", structHolder.struct.stringField );
assertEquals( 10, structHolder.struct.simpleEmbeddable.integerField );
assertStructEquals( EmbeddableAggregate.createAggregate2(), structHolder.getAggregate() );
}
);
}
@Test
public void testFetch() {
sessionFactoryScope().inSession(
entityManager -> {
List<StructHolder> structHolders = entityManager.createQuery( "from StructHolder b where b.id = 1", StructHolder.class ).getResultList();
assertEquals( 1, structHolders.size() );
StructHolder structHolder = structHolders.get( 0 );
assertEquals( 1L, structHolder.getId() );
assertEquals( "XYZ", structHolder.struct.stringField );
assertEquals( 10, structHolder.struct.simpleEmbeddable.integerField );
assertEquals( "String \"<abc>A&B</abc>\"", structHolder.struct.simpleEmbeddable.doubleNested.theNested.theLeaf.stringField );
assertStructEquals( EmbeddableAggregate.createAggregate1(), structHolder.getAggregate() );
}
);
}
@Test
public void testFetchNull() {
sessionFactoryScope().inSession(
entityManager -> {
List<StructHolder> structHolders = entityManager.createQuery( "from StructHolder b where b.id = 2", StructHolder.class ).getResultList();
assertEquals( 1, structHolders.size() );
StructHolder structHolder = structHolders.get( 0 );
assertEquals( 2L, structHolder.getId() );
assertNull( structHolder.struct.stringField );
assertEquals( 20, structHolder.struct.simpleEmbeddable.integerField );
assertStructEquals( EmbeddableAggregate.createAggregate2(), structHolder.getAggregate() );
}
);
}
@Test
public void testDomainResult() {
sessionFactoryScope().inSession(
entityManager -> {
List<TheStruct> structs = entityManager.createQuery( "select b.struct from StructHolder b where b.id = 1", TheStruct.class ).getResultList();
assertEquals( 1, structs.size() );
TheStruct theStruct = structs.get( 0 );
assertEquals( "XYZ", theStruct.stringField );
assertEquals( 10, theStruct.simpleEmbeddable.integerField );
assertEquals( "String \"<abc>A&B</abc>\"", theStruct.simpleEmbeddable.doubleNested.theNested.theLeaf.stringField );
assertStructEquals( EmbeddableAggregate.createAggregate1(), theStruct.nested );
}
);
}
@Test
public void testSelectionItems() {
sessionFactoryScope().inSession(
entityManager -> {
List<Tuple> tuples = entityManager.createQuery(
"select " +
"b.struct.nested.theInt," +
"b.struct.nested.theDouble," +
"b.struct.nested.theBoolean," +
"b.struct.nested.theNumericBoolean," +
"b.struct.nested.theStringBoolean," +
"b.struct.nested.theString," +
"b.struct.nested.theInteger," +
"b.struct.nested.theClob," +
"b.struct.nested.theBinary," +
"b.struct.nested.theDate," +
"b.struct.nested.theTime," +
"b.struct.nested.theTimestamp," +
"b.struct.nested.theInstant," +
"b.struct.nested.theUuid," +
"b.struct.nested.gender," +
"b.struct.nested.convertedGender," +
"b.struct.nested.ordinalGender," +
"b.struct.nested.theDuration," +
"b.struct.nested.theLocalDateTime," +
"b.struct.nested.theLocalDate," +
"b.struct.nested.theLocalTime," +
"b.struct.nested.theZonedDateTime," +
"b.struct.nested.theOffsetDateTime," +
"b.struct.nested.mutableValue," +
"b.struct.simpleEmbeddable," +
"b.struct.simpleEmbeddable.doubleNested," +
"b.struct.simpleEmbeddable.doubleNested.theNested," +
"b.struct.simpleEmbeddable.doubleNested.theNested.theLeaf " +
"from StructHolder b where b.id = 1",
Tuple.class
).getResultList();
assertEquals( 1, tuples.size() );
final Tuple tuple = tuples.get( 0 );
final EmbeddableAggregate struct = new EmbeddableAggregate();
struct.setTheInt( tuple.get( 0, int.class ) );
struct.setTheDouble( tuple.get( 1, Double.class ) );
struct.setTheBoolean( tuple.get( 2, Boolean.class ) );
struct.setTheNumericBoolean( tuple.get( 3, Boolean.class ) );
struct.setTheStringBoolean( tuple.get( 4, Boolean.class ) );
struct.setTheString( tuple.get( 5, String.class ) );
struct.setTheInteger( tuple.get( 6, Integer.class ) );
struct.setTheClob( tuple.get( 7, Clob.class ) );
struct.setTheBinary( tuple.get( 8, byte[].class ) );
struct.setTheDate( tuple.get( 9, Date.class ) );
struct.setTheTime( tuple.get( 10, Time.class ) );
struct.setTheTimestamp( tuple.get( 11, Timestamp.class ) );
struct.setTheInstant( tuple.get( 12, Instant.class ) );
struct.setTheUuid( tuple.get( 13, UUID.class ) );
struct.setGender( tuple.get( 14, EntityOfBasics.Gender.class ) );
struct.setConvertedGender( tuple.get( 15, EntityOfBasics.Gender.class ) );
struct.setOrdinalGender( tuple.get( 16, EntityOfBasics.Gender.class ) );
struct.setTheDuration( tuple.get( 17, Duration.class ) );
struct.setTheLocalDateTime( tuple.get( 18, LocalDateTime.class ) );
struct.setTheLocalDate( tuple.get( 19, LocalDate.class ) );
struct.setTheLocalTime( tuple.get( 20, LocalTime.class ) );
struct.setTheZonedDateTime( tuple.get( 21, ZonedDateTime.class ) );
struct.setTheOffsetDateTime( tuple.get( 22, OffsetDateTime.class ) );
struct.setMutableValue( tuple.get( 23, MutableValue.class ) );
EmbeddableAggregate.assertEquals( EmbeddableAggregate.createAggregate1(), struct );
SimpleEmbeddable simpleEmbeddable = tuple.get( 24, SimpleEmbeddable.class );
assertEquals( simpleEmbeddable.doubleNested, tuple.get( 25, DoubleNested.class ) );
assertEquals( simpleEmbeddable.doubleNested.theNested, tuple.get( 26, Nested.class ) );
assertEquals( simpleEmbeddable.doubleNested.theNested.theLeaf, tuple.get( 27, Leaf.class ) );
assertEquals( 10, simpleEmbeddable.integerField );
assertEquals( "String \"<abc>A&B</abc>\"", simpleEmbeddable.doubleNested.theNested.theLeaf.stringField );
}
);
}
@Test
public void testDeleteWhere() {
sessionFactoryScope().inTransaction(
entityManager -> {
entityManager.createQuery( "delete StructHolder b where b.struct is not null" ).executeUpdate();
assertNull( entityManager.find( StructHolder.class, 1L ) );
}
);
}
@Test
public void testUpdateAggregate() {
sessionFactoryScope().inTransaction(
entityManager -> {
entityManager.createQuery( "update StructHolder b set b.struct = null" ).executeUpdate();
assertNull( entityManager.find( StructHolder.class, 1L ).getAggregate() );
}
);
}
@Test
public void testUpdateAggregateMember() {
sessionFactoryScope().inTransaction(
entityManager -> {
entityManager.createQuery( "update StructHolder b set b.struct.nested.theString = null" ).executeUpdate();
EmbeddableAggregate struct = EmbeddableAggregate.createAggregate1();
struct.setTheString( null );
assertStructEquals( struct, entityManager.find( StructHolder.class, 1L ).getAggregate() );
}
);
}
@Test
public void testUpdateMultipleAggregateMembers() {
sessionFactoryScope().inTransaction(
entityManager -> {
entityManager.createQuery( "update StructHolder b set b.struct.nested.theString = null, b.struct.nested.theUuid = null" ).executeUpdate();
EmbeddableAggregate struct = EmbeddableAggregate.createAggregate1();
struct.setTheString( null );
struct.setTheUuid( null );
assertStructEquals( struct, entityManager.find( StructHolder.class, 1L ).getAggregate() );
}
);
}
@Test
public void testUpdateAllAggregateMembers() {
sessionFactoryScope().inTransaction(
entityManager -> {
EmbeddableAggregate struct = EmbeddableAggregate.createAggregate1();
entityManager.createQuery(
"update StructHolder b set " +
"b.struct.nested.theInt = :theInt," +
"b.struct.nested.theDouble = :theDouble," +
"b.struct.nested.theBoolean = :theBoolean," +
"b.struct.nested.theNumericBoolean = :theNumericBoolean," +
"b.struct.nested.theStringBoolean = :theStringBoolean," +
"b.struct.nested.theString = :theString," +
"b.struct.nested.theInteger = :theInteger," +
"b.struct.nested.theClob = :theClob," +
"b.struct.nested.theBinary = :theBinary," +
"b.struct.nested.theDate = :theDate," +
"b.struct.nested.theTime = :theTime," +
"b.struct.nested.theTimestamp = :theTimestamp," +
"b.struct.nested.theInstant = :theInstant," +
"b.struct.nested.theUuid = :theUuid," +
"b.struct.nested.gender = :gender," +
"b.struct.nested.convertedGender = :convertedGender," +
"b.struct.nested.ordinalGender = :ordinalGender," +
"b.struct.nested.theDuration = :theDuration," +
"b.struct.nested.theLocalDateTime = :theLocalDateTime," +
"b.struct.nested.theLocalDate = :theLocalDate," +
"b.struct.nested.theLocalTime = :theLocalTime," +
"b.struct.nested.theZonedDateTime = :theZonedDateTime," +
"b.struct.nested.theOffsetDateTime = :theOffsetDateTime," +
"b.struct.nested.mutableValue = :mutableValue," +
"b.struct.simpleEmbeddable.integerField = :integerField " +
"where b.id = 2"
)
.setParameter( "theInt", struct.getTheInt() )
.setParameter( "theDouble", struct.getTheDouble() )
.setParameter( "theBoolean", struct.isTheBoolean() )
.setParameter( "theNumericBoolean", struct.isTheNumericBoolean() )
.setParameter( "theStringBoolean", struct.isTheStringBoolean() )
.setParameter( "theString", struct.getTheString() )
.setParameter( "theInteger", struct.getTheInteger() )
.setParameter( "theClob", struct.getTheClob() )
.setParameter( "theBinary", struct.getTheBinary() )
.setParameter( "theDate", struct.getTheDate() )
.setParameter( "theTime", struct.getTheTime() )
.setParameter( "theTimestamp", struct.getTheTimestamp() )
.setParameter( "theInstant", struct.getTheInstant() )
.setParameter( "theUuid", struct.getTheUuid() )
.setParameter( "gender", struct.getGender() )
.setParameter( "convertedGender", struct.getConvertedGender() )
.setParameter( "ordinalGender", struct.getOrdinalGender() )
.setParameter( "theDuration", struct.getTheDuration() )
.setParameter( "theLocalDateTime", struct.getTheLocalDateTime() )
.setParameter( "theLocalDate", struct.getTheLocalDate() )
.setParameter( "theLocalTime", struct.getTheLocalTime() )
.setParameter( "theZonedDateTime", struct.getTheZonedDateTime() )
.setParameter( "theOffsetDateTime", struct.getTheOffsetDateTime() )
.setParameter( "mutableValue", struct.getMutableValue() )
.setParameter( "integerField", 5 )
.executeUpdate();
StructHolder structHolder = entityManager.find( StructHolder.class, 2L );
assertEquals( 5, structHolder.struct.simpleEmbeddable.integerField );
assertStructEquals( EmbeddableAggregate.createAggregate1(), structHolder.getAggregate() );
}
);
}
@Test
public void testNativeQuery() {
sessionFactoryScope().inTransaction(
entityManager -> {
//noinspection unchecked
List<Object> resultList = entityManager.createNativeQuery(
"select b.struct from StructHolder b where b.id = 1",
// DB2 does not support structs on the driver level, and we instead do a XML serialization/deserialization
// So in order to receive the correct value, we have to specify the actual type that we expect
getDialect() instanceof DB2Dialect
? (Class<Object>) (Class<?>) TheStruct.class
// Using Object.class on purpose to verify Dialect#resolveSqlTypeDescriptor works
: Object.class
)
.getResultList();
assertEquals( 1, resultList.size() );
assertInstanceOf( TheStruct.class, resultList.get( 0 ) );
TheStruct theStruct = (TheStruct) resultList.get( 0 );
assertEquals( "XYZ", theStruct.stringField );
assertEquals( 10, theStruct.simpleEmbeddable.integerField );
assertEquals( "String \"<abc>A&B</abc>\"", theStruct.simpleEmbeddable.doubleNested.theNested.theLeaf.stringField );
assertStructEquals( EmbeddableAggregate.createAggregate1(), theStruct.nested );
}
);
}
@Test
public void testFunction() {
sessionFactoryScope().inTransaction(
entityManager -> {
ProcedureCall structFunction = entityManager.createStoredProcedureCall( "structFunction" )
.markAsFunctionCall( TheStruct.class );
//noinspection unchecked
final List<Object> resultList = structFunction.getResultList();
assertEquals( 1, resultList.size() );
assertInstanceOf( TheStruct.class, resultList.get( 0 ) );
TheStruct result = (TheStruct) resultList.get( 0 );
EmbeddableAggregate struct = EmbeddableAggregate.createAggregate3();
assertStructEquals( struct, result.nested );
}
);
}
@Test
@SkipForDialect(dialectClass = PostgreSQLDialect.class, majorVersion = 10, reason = "Procedures were only introduced in version 11")
@SkipForDialect(dialectClass = DB2Dialect.class, reason = "DB2 does not support struct types in procedures")
public void testProcedure() {
sessionFactoryScope().inTransaction(
entityManager -> {
ProcedureCall structFunction = entityManager.createStoredProcedureCall( "structProcedure" );
ProcedureParameter<TheStruct> resultParameter = structFunction.registerParameter(
"structType",
TheStruct.class,
ParameterMode.OUT
);
structFunction.setParameter( resultParameter, null );
TheStruct result = structFunction.getOutputs().getOutputParameterValue( resultParameter );
EmbeddableAggregate struct = EmbeddableAggregate.createAggregate3();
assertStructEquals( struct, result.nested );
}
);
}
private static void assertStructEquals(EmbeddableAggregate struct, EmbeddableAggregate struct2) {
assertArrayEquals( struct.getTheBinary(), struct2.getTheBinary() );
assertEquals( struct.getTheString(), struct2.getTheString() );
assertEquals( struct.getTheLocalDateTime(), struct2.getTheLocalDateTime() );
assertEquals( struct.getTheUuid(), struct2.getTheUuid() );
}
@Entity(name = "StructHolder")
public static class StructHolder {
@Id
private Long id;
private TheStruct struct;
public StructHolder() {
}
public StructHolder(Long id, String stringField, Integer integerField, String leaf, EmbeddableAggregate aggregate) {
this.id = id;
this.struct = new TheStruct( stringField, integerField, leaf, aggregate );
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public TheStruct getStruct() {
return struct;
}
public void setStruct(TheStruct struct) {
this.struct = struct;
}
public EmbeddableAggregate getAggregate() {
return struct == null ? null : struct.nested;
}
public void setAggregate(EmbeddableAggregate aggregate) {
if ( struct == null ) {
struct = new TheStruct( null, null, null, aggregate );
}
else {
struct.nested = aggregate;
}
}
}
@Embeddable
@Struct( name = "theStruct" )
public static class TheStruct {
private String stringField;
private SimpleEmbeddable simpleEmbeddable;
@Struct(name = "structType")
private EmbeddableAggregate nested;
public TheStruct() {
}
public TheStruct(String stringField, Integer integerField, String leaf, EmbeddableAggregate nested) {
this.stringField = stringField;
this.simpleEmbeddable = new SimpleEmbeddable( integerField, leaf );
this.nested = nested;
}
}
@Embeddable
public static class SimpleEmbeddable {
private Integer integerField;
private DoubleNested doubleNested;
public SimpleEmbeddable() {
}
public SimpleEmbeddable(Integer integerField, String leaf) {
this.integerField = integerField;
this.doubleNested = new DoubleNested( leaf );
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( o == null || getClass() != o.getClass() ) {
return false;
}
SimpleEmbeddable that = (SimpleEmbeddable) o;
if ( !Objects.equals( integerField, that.integerField ) ) {
return false;
}
return Objects.equals( doubleNested, that.doubleNested );
}
@Override
public int hashCode() {
int result = integerField != null ? integerField.hashCode() : 0;
result = 31 * result + ( doubleNested != null ? doubleNested.hashCode() : 0 );
return result;
}
}
@Embeddable
@Struct( name = "double_nested")
public static class DoubleNested {
private Nested theNested;
public DoubleNested() {
}
public DoubleNested(String leaf) {
this.theNested = new Nested( leaf );
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( o == null || getClass() != o.getClass() ) {
return false;
}
DoubleNested that = (DoubleNested) o;
return Objects.equals( theNested, that.theNested );
}
@Override
public int hashCode() {
return theNested != null ? theNested.hashCode() : 0;
}
}
@Embeddable
@Struct( name = "nested")
public static class Nested {
private Leaf theLeaf;
public Nested() {
}
public Nested(String stringField) {
this.theLeaf = new Leaf( stringField );
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( o == null || getClass() != o.getClass() ) {
return false;
}
Nested nested = (Nested) o;
return Objects.equals( theLeaf, nested.theLeaf );
}
@Override
public int hashCode() {
return theLeaf != null ? theLeaf.hashCode() : 0;
}
}
@Embeddable
@Struct( name = "leaf")
public static class Leaf {
private String stringField;
public Leaf() {
}
public Leaf(String stringField) {
this.stringField = stringField;
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( o == null || getClass() != o.getClass() ) {
return false;
}
Leaf leaf = (Leaf) o;
return Objects.equals( stringField, leaf.stringField );
}
@Override
public int hashCode() {
return stringField != null ? stringField.hashCode() : 0;
}
}
}

View File

@ -0,0 +1,588 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.userguide.mapping.embeddable;
import java.sql.Clob;
import java.sql.Time;
import java.sql.Timestamp;
import java.time.Duration;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.OffsetDateTime;
import java.time.ZonedDateTime;
import java.util.Date;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import org.hibernate.annotations.Struct;
import org.hibernate.boot.MetadataBuilder;
import org.hibernate.boot.model.naming.PhysicalNamingStrategyStandardImpl;
import org.hibernate.boot.model.relational.NamedAuxiliaryDatabaseObject;
import org.hibernate.boot.model.relational.Namespace;
import org.hibernate.boot.registry.StandardServiceRegistry;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.DB2Dialect;
import org.hibernate.dialect.OracleDialect;
import org.hibernate.dialect.PostgreSQLDialect;
import org.hibernate.dialect.PostgresPlusDialect;
import org.hibernate.engine.jdbc.connections.internal.DriverManagerConnectionProviderImpl;
import org.hibernate.procedure.ProcedureCall;
import org.hibernate.query.procedure.ProcedureParameter;
import org.hibernate.testing.orm.domain.gambit.EntityOfBasics;
import org.hibernate.testing.orm.domain.gambit.MutableValue;
import org.hibernate.testing.orm.junit.BaseSessionFactoryFunctionalTest;
import org.hibernate.testing.orm.junit.RequiresDialect;
import org.hibernate.testing.orm.junit.SkipForDialect;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import jakarta.persistence.Embeddable;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.ParameterMode;
import jakarta.persistence.Tuple;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertInstanceOf;
import static org.junit.jupiter.api.Assertions.assertNull;
@RequiresDialect( PostgreSQLDialect.class )
@RequiresDialect( OracleDialect.class )
@RequiresDialect( DB2Dialect.class )
public class StructEmbeddableTest extends BaseSessionFactoryFunctionalTest {
@Override
protected Class<?>[] getAnnotatedClasses() {
return new Class<?>[] {
StructHolder.class
};
}
@Override
public StandardServiceRegistry produceServiceRegistry(StandardServiceRegistryBuilder ssrBuilder) {
// Make sure this stuff runs on a dedicated connection pool,
// otherwise we might run into ORA-21700: object does not exist or is marked for delete
// because the JDBC connection or database session caches something that should have been invalidated
ssrBuilder.applySetting( AvailableSettings.CONNECTION_PROVIDER, DriverManagerConnectionProviderImpl.class.getName() );
return super.produceServiceRegistry( ssrBuilder );
}
@Override
protected void applyMetadataBuilder(MetadataBuilder metadataBuilder) {
final Namespace namespace = new Namespace(
PhysicalNamingStrategyStandardImpl.INSTANCE,
null,
new Namespace.Name( null, null )
);
//---------------------------------------------------------
// PostgreSQL
//---------------------------------------------------------
metadataBuilder.applyAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
"PostgreSQL structFunction",
namespace,
"create function structFunction() returns structType as $$ declare result structType; begin result.theBinary = bytea '\\x01'; result.theString = 'ABC'; result.theDouble = 0; result.theInt = 0; result.theLocalDateTime = timestamp '2022-12-01 01:00:00'; result.theUuid = '53886a8a-7082-4879-b430-25cb94415be8'::uuid; return result; end $$ language plpgsql",
"drop function structFunction",
Set.of( PostgreSQLDialect.class.getName() )
)
);
metadataBuilder.applyAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
"PostgreSQL structProcedure",
namespace,
"create procedure structProcedure(OUT result structType) AS $$ begin result.theBinary = bytea '\\x01'; result.theString = 'ABC'; result.theDouble = 0; result.theInt = 0; result.theLocalDateTime = timestamp '2022-12-01 01:00:00'; result.theUuid = '53886a8a-7082-4879-b430-25cb94415be8'::uuid; end $$ language plpgsql",
"drop procedure structProcedure",
Set.of( PostgreSQLDialect.class.getName() )
)
);
//---------------------------------------------------------
// PostgrePlus
//---------------------------------------------------------
metadataBuilder.applyAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
"PostgrePlus structFunction",
namespace,
"create function structFunction() returns structType as $$ declare result structType; begin result.theBinary = bytea '\\x01'; result.theString = 'ABC'; result.theDouble = 0; result.theInt = 0; result.theLocalDateTime = timestamp '2022-12-01 01:00:00'; result.theUuid = '53886a8a-7082-4879-b430-25cb94415be8'::uuid; return result; end $$ language plpgsql",
"drop function structFunction",
Set.of( PostgresPlusDialect.class.getName() )
)
);
metadataBuilder.applyAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
"PostgrePlus structProcedure",
namespace,
"create procedure structProcedure(result OUT structType) AS $$ begin result.theBinary = bytea '\\x01'; result.theString = 'ABC'; result.theDouble = 0; result.theInt = 0; result.theLocalDateTime = timestamp '2022-12-01 01:00:00'; result.theUuid = '53886a8a-7082-4879-b430-25cb94415be8'::uuid; end $$ language plpgsql",
"drop procedure structProcedure",
Set.of( PostgresPlusDialect.class.getName() )
)
);
//---------------------------------------------------------
// DB2
//---------------------------------------------------------
final String binaryType;
final String binaryLiteralPrefix;
if ( getDialect().getVersion().isBefore( 11 ) ) {
binaryType = "char(16) for bit data";
binaryLiteralPrefix = "x";
}
else {
binaryType = "binary(16)";
binaryLiteralPrefix = "bx";
}
metadataBuilder.applyAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
"DB2 structFunction",
namespace,
"create function structFunction() returns structType language sql RETURN select structType()..theBinary(" + binaryLiteralPrefix + "'01')..theString('ABC')..theDouble(0)..theInt(0)..theLocalDateTime(timestamp '2022-12-01 01:00:00')..theUuid(cast(" + binaryLiteralPrefix + "'" +
// UUID is already in HEX encoding, but we have to remove the dashes
"53886a8a-7082-4879-b430-25cb94415be8".replace( "-", "" )
+ "' as " + binaryType + ")) from (values (1)) t",
"drop function structFunction",
Set.of( DB2Dialect.class.getName() )
)
);
//---------------------------------------------------------
// Oracle
//---------------------------------------------------------
metadataBuilder.applyAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
"Oracle structFunction",
namespace,
"create function structFunction return structType is result structType; begin " +
"result := structType(" +
"theBinary => hextoraw('01')," +
"theString => 'ABC'," +
"theDouble => 0," +
"theInt => 0," +
"theLocalDateTime => timestamp '2022-12-01 01:00:00'," +
"theUuid => hextoraw('53886a8a70824879b43025cb94415be8')," +
"converted_gender => null," +
"gender => null," +
"mutableValue => null," +
"ordinal_gender => null," +
"theBoolean => null," +
"theClob => null," +
"theDate => null," +
"theDuration => null," +
"theInstant => null," +
"theInteger => null," +
"theLocalDate => null," +
"theLocalTime => null," +
"theNumericBoolean => null," +
"theOffsetDateTime => null," +
"theStringBoolean => null," +
"theTime => null," +
"theTimestamp => null," +
"theUrl => null," +
"theZonedDateTime => null" +
"); return result; end;",
"drop function structFunction",
Set.of( OracleDialect.class.getName() )
)
);
metadataBuilder.applyAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
"Oracle structProcedure",
namespace,
"create procedure structProcedure(result OUT structType) AS begin " +
"result := structType(" +
"theBinary => hextoraw('01')," +
"theString => 'ABC'," +
"theDouble => 0," +
"theInt => 0," +
"theLocalDateTime => timestamp '2022-12-01 01:00:00'," +
"theUuid => hextoraw('53886a8a70824879b43025cb94415be8')," +
"converted_gender => null," +
"gender => null," +
"mutableValue => null," +
"ordinal_gender => null," +
"theBoolean => null," +
"theClob => null," +
"theDate => null," +
"theDuration => null," +
"theInstant => null," +
"theInteger => null," +
"theLocalDate => null," +
"theLocalTime => null," +
"theNumericBoolean => null," +
"theOffsetDateTime => null," +
"theStringBoolean => null," +
"theTime => null," +
"theTimestamp => null," +
"theUrl => null," +
"theZonedDateTime => null" +
"); end;",
"drop procedure structProcedure",
Set.of( OracleDialect.class.getName() )
)
);
}
@BeforeEach
public void setUp() {
inTransaction(
session -> {
session.persist( new StructHolder( 1L, EmbeddableAggregate.createAggregate1() ) );
session.persist( new StructHolder( 2L, EmbeddableAggregate.createAggregate2() ) );
}
);
}
@AfterEach
protected void cleanupTest() {
inTransaction(
session -> {
session.createQuery( "delete from StructHolder h" ).executeUpdate();
}
);
}
@Test
public void testUpdate() {
sessionFactoryScope().inTransaction(
entityManager -> {
StructHolder structHolder = entityManager.find( StructHolder.class, 1L );
structHolder.setAggregate( EmbeddableAggregate.createAggregate2() );
entityManager.flush();
entityManager.clear();
assertStructEquals( EmbeddableAggregate.createAggregate2(), entityManager.find( StructHolder.class, 1L ).getAggregate() );
}
);
}
@Test
public void testFetch() {
sessionFactoryScope().inSession(
entityManager -> {
List<StructHolder> structHolders = entityManager.createQuery( "from StructHolder b where b.id = 1", StructHolder.class ).getResultList();
assertEquals( 1, structHolders.size() );
assertEquals( 1L, structHolders.get( 0 ).getId() );
assertStructEquals( EmbeddableAggregate.createAggregate1(), structHolders.get( 0 ).getAggregate() );
}
);
}
@Test
public void testFetchNull() {
sessionFactoryScope().inSession(
entityManager -> {
List<StructHolder> structHolders = entityManager.createQuery( "from StructHolder b where b.id = 2", StructHolder.class ).getResultList();
assertEquals( 1, structHolders.size() );
assertEquals( 2L, structHolders.get( 0 ).getId() );
assertStructEquals( EmbeddableAggregate.createAggregate2(), structHolders.get( 0 ).getAggregate() );
}
);
}
@Test
public void testDomainResult() {
sessionFactoryScope().inSession(
entityManager -> {
List<EmbeddableAggregate> structs = entityManager.createQuery( "select b.aggregate from StructHolder b where b.id = 1", EmbeddableAggregate.class ).getResultList();
assertEquals( 1, structs.size() );
assertStructEquals( EmbeddableAggregate.createAggregate1(), structs.get( 0 ) );
}
);
}
@Test
public void testSelectionItems() {
sessionFactoryScope().inSession(
entityManager -> {
List<Tuple> tuples = entityManager.createQuery(
"select " +
"b.aggregate.theInt," +
"b.aggregate.theDouble," +
"b.aggregate.theBoolean," +
"b.aggregate.theNumericBoolean," +
"b.aggregate.theStringBoolean," +
"b.aggregate.theString," +
"b.aggregate.theInteger," +
"b.aggregate.theClob," +
"b.aggregate.theBinary," +
"b.aggregate.theDate," +
"b.aggregate.theTime," +
"b.aggregate.theTimestamp," +
"b.aggregate.theInstant," +
"b.aggregate.theUuid," +
"b.aggregate.gender," +
"b.aggregate.convertedGender," +
"b.aggregate.ordinalGender," +
"b.aggregate.theDuration," +
"b.aggregate.theLocalDateTime," +
"b.aggregate.theLocalDate," +
"b.aggregate.theLocalTime," +
"b.aggregate.theZonedDateTime," +
"b.aggregate.theOffsetDateTime," +
"b.aggregate.mutableValue " +
"from StructHolder b where b.id = 1",
Tuple.class
).getResultList();
assertEquals( 1, tuples.size() );
final Tuple tuple = tuples.get( 0 );
final EmbeddableAggregate struct = new EmbeddableAggregate();
struct.setTheInt( tuple.get( 0, int.class ) );
struct.setTheDouble( tuple.get( 1, Double.class ) );
struct.setTheBoolean( tuple.get( 2, Boolean.class ) );
struct.setTheNumericBoolean( tuple.get( 3, Boolean.class ) );
struct.setTheStringBoolean( tuple.get( 4, Boolean.class ) );
struct.setTheString( tuple.get( 5, String.class ) );
struct.setTheInteger( tuple.get( 6, Integer.class ) );
struct.setTheClob( tuple.get( 7, Clob.class ) );
struct.setTheBinary( tuple.get( 8, byte[].class ) );
struct.setTheDate( tuple.get( 9, Date.class ) );
struct.setTheTime( tuple.get( 10, Time.class ) );
struct.setTheTimestamp( tuple.get( 11, Timestamp.class ) );
struct.setTheInstant( tuple.get( 12, Instant.class ) );
struct.setTheUuid( tuple.get( 13, UUID.class ) );
struct.setGender( tuple.get( 14, EntityOfBasics.Gender.class ) );
struct.setConvertedGender( tuple.get( 15, EntityOfBasics.Gender.class ) );
struct.setOrdinalGender( tuple.get( 16, EntityOfBasics.Gender.class ) );
struct.setTheDuration( tuple.get( 17, Duration.class ) );
struct.setTheLocalDateTime( tuple.get( 18, LocalDateTime.class ) );
struct.setTheLocalDate( tuple.get( 19, LocalDate.class ) );
struct.setTheLocalTime( tuple.get( 20, LocalTime.class ) );
struct.setTheZonedDateTime( tuple.get( 21, ZonedDateTime.class ) );
struct.setTheOffsetDateTime( tuple.get( 22, OffsetDateTime.class ) );
struct.setMutableValue( tuple.get( 23, MutableValue.class ) );
EmbeddableAggregate.assertEquals( EmbeddableAggregate.createAggregate1(), struct );
}
);
}
@Test
public void testDeleteWhere() {
sessionFactoryScope().inTransaction(
entityManager -> {
entityManager.createQuery( "delete StructHolder b where b.aggregate is not null" ).executeUpdate();
assertNull( entityManager.find( StructHolder.class, 1L ) );
}
);
}
@Test
public void testUpdateAggregate() {
sessionFactoryScope().inTransaction(
entityManager -> {
entityManager.createQuery( "update StructHolder b set b.aggregate = null" ).executeUpdate();
assertNull( entityManager.find( StructHolder.class, 1L ).getAggregate() );
}
);
}
@Test
public void testUpdateAggregateMember() {
sessionFactoryScope().inTransaction(
entityManager -> {
entityManager.createQuery( "update StructHolder b set b.aggregate.theString = null" ).executeUpdate();
EmbeddableAggregate struct = EmbeddableAggregate.createAggregate1();
struct.setTheString( null );
assertStructEquals( struct, entityManager.find( StructHolder.class, 1L ).getAggregate() );
}
);
}
@Test
public void testUpdateMultipleAggregateMembers() {
sessionFactoryScope().inTransaction(
entityManager -> {
entityManager.createQuery( "update StructHolder b set b.aggregate.theString = null, b.aggregate.theUuid = null" ).executeUpdate();
EmbeddableAggregate struct = EmbeddableAggregate.createAggregate1();
struct.setTheString( null );
struct.setTheUuid( null );
assertStructEquals( struct, entityManager.find( StructHolder.class, 1L ).getAggregate() );
}
);
}
@Test
public void testUpdateAllAggregateMembers() {
sessionFactoryScope().inTransaction(
entityManager -> {
EmbeddableAggregate struct = EmbeddableAggregate.createAggregate1();
entityManager.createQuery(
"update StructHolder b set " +
"b.aggregate.theInt = :theInt," +
"b.aggregate.theDouble = :theDouble," +
"b.aggregate.theBoolean = :theBoolean," +
"b.aggregate.theNumericBoolean = :theNumericBoolean," +
"b.aggregate.theStringBoolean = :theStringBoolean," +
"b.aggregate.theString = :theString," +
"b.aggregate.theInteger = :theInteger," +
"b.aggregate.theClob = :theClob," +
"b.aggregate.theBinary = :theBinary," +
"b.aggregate.theDate = :theDate," +
"b.aggregate.theTime = :theTime," +
"b.aggregate.theTimestamp = :theTimestamp," +
"b.aggregate.theInstant = :theInstant," +
"b.aggregate.theUuid = :theUuid," +
"b.aggregate.gender = :gender," +
"b.aggregate.convertedGender = :convertedGender," +
"b.aggregate.ordinalGender = :ordinalGender," +
"b.aggregate.theDuration = :theDuration," +
"b.aggregate.theLocalDateTime = :theLocalDateTime," +
"b.aggregate.theLocalDate = :theLocalDate," +
"b.aggregate.theLocalTime = :theLocalTime," +
"b.aggregate.theZonedDateTime = :theZonedDateTime," +
"b.aggregate.theOffsetDateTime = :theOffsetDateTime," +
"b.aggregate.mutableValue = :mutableValue " +
"where b.id = 2"
)
.setParameter( "theInt", struct.getTheInt() )
.setParameter( "theDouble", struct.getTheDouble() )
.setParameter( "theBoolean", struct.isTheBoolean() )
.setParameter( "theNumericBoolean", struct.isTheNumericBoolean() )
.setParameter( "theStringBoolean", struct.isTheStringBoolean() )
.setParameter( "theString", struct.getTheString() )
.setParameter( "theInteger", struct.getTheInteger() )
.setParameter( "theClob", struct.getTheClob() )
.setParameter( "theBinary", struct.getTheBinary() )
.setParameter( "theDate", struct.getTheDate() )
.setParameter( "theTime", struct.getTheTime() )
.setParameter( "theTimestamp", struct.getTheTimestamp() )
.setParameter( "theInstant", struct.getTheInstant() )
.setParameter( "theUuid", struct.getTheUuid() )
.setParameter( "gender", struct.getGender() )
.setParameter( "convertedGender", struct.getConvertedGender() )
.setParameter( "ordinalGender", struct.getOrdinalGender() )
.setParameter( "theDuration", struct.getTheDuration() )
.setParameter( "theLocalDateTime", struct.getTheLocalDateTime() )
.setParameter( "theLocalDate", struct.getTheLocalDate() )
.setParameter( "theLocalTime", struct.getTheLocalTime() )
.setParameter( "theZonedDateTime", struct.getTheZonedDateTime() )
.setParameter( "theOffsetDateTime", struct.getTheOffsetDateTime() )
.setParameter( "mutableValue", struct.getMutableValue() )
.executeUpdate();
assertStructEquals( EmbeddableAggregate.createAggregate1(), entityManager.find( StructHolder.class, 2L ).getAggregate() );
}
);
}
@Test
public void testNativeQuery() {
sessionFactoryScope().inTransaction(
entityManager -> {
//noinspection unchecked
List<Object> resultList = entityManager.createNativeQuery(
"select b.aggregate from StructHolder b where b.id = 1",
// DB2 does not support structs on the driver level, and we instead do a XML serialization/deserialization
// So in order to receive the correct value, we have to specify the actual type that we expect
getDialect() instanceof DB2Dialect
? (Class<Object>) (Class<?>) EmbeddableAggregate.class
// Using Object.class on purpose to verify Dialect#resolveSqlTypeDescriptor works
: Object.class
)
.getResultList();
assertEquals( 1, resultList.size() );
assertInstanceOf( EmbeddableAggregate.class, resultList.get( 0 ) );
EmbeddableAggregate struct = (EmbeddableAggregate) resultList.get( 0 );
assertStructEquals( EmbeddableAggregate.createAggregate1(), struct );
}
);
}
@Test
public void testFunction() {
sessionFactoryScope().inTransaction(
entityManager -> {
ProcedureCall structFunction = entityManager.createStoredProcedureCall( "structFunction" )
.markAsFunctionCall( EmbeddableAggregate.class );
//noinspection unchecked
final List<Object> resultList = structFunction.getResultList();
assertEquals( 1, resultList.size() );
assertInstanceOf( EmbeddableAggregate.class, resultList.get( 0 ) );
EmbeddableAggregate result = (EmbeddableAggregate) resultList.get( 0 );
EmbeddableAggregate struct = EmbeddableAggregate.createAggregate3();
assertStructEquals( struct, result );
}
);
}
@Test
@SkipForDialect(dialectClass = PostgreSQLDialect.class, majorVersion = 10, reason = "Procedures were only introduced in version 11")
@SkipForDialect(dialectClass = DB2Dialect.class, reason = "DB2 does not support struct types in procedures")
public void testProcedure() {
sessionFactoryScope().inTransaction(
entityManager -> {
ProcedureCall structFunction = entityManager.createStoredProcedureCall( "structProcedure" );
ProcedureParameter<EmbeddableAggregate> resultParameter = structFunction.registerParameter(
"structType",
EmbeddableAggregate.class,
ParameterMode.OUT
);
structFunction.setParameter( resultParameter, null );
EmbeddableAggregate result = structFunction.getOutputs().getOutputParameterValue( resultParameter );
EmbeddableAggregate struct = EmbeddableAggregate.createAggregate3();
assertStructEquals( struct, result );
}
);
}
private static void assertStructEquals(EmbeddableAggregate struct, EmbeddableAggregate struct2) {
assertArrayEquals( struct.getTheBinary(), struct2.getTheBinary() );
assertEquals( struct.getTheString(), struct2.getTheString() );
assertEquals( struct.getTheLocalDateTime(), struct2.getTheLocalDateTime() );
assertEquals( struct.getTheUuid(), struct2.getTheUuid() );
}
//tag::embeddable-struct-type-mapping-example[]
@Entity(name = "StructHolder")
public static class StructHolder {
@Id
private Long id;
//end::embeddable-struct-type-mapping-example[]
@Struct(name = "structType")
//tag::embeddable-struct-type-mapping-example[]
private EmbeddableAggregate aggregate;
//Getters and setters are omitted for brevity
//end::embeddable-struct-type-mapping-example[]
public StructHolder() {
}
public StructHolder(Long id, EmbeddableAggregate aggregate) {
this.id = id;
this.aggregate = aggregate;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public EmbeddableAggregate getAggregate() {
return aggregate;
}
public void setAggregate(EmbeddableAggregate aggregate) {
this.aggregate = aggregate;
}
}
}

View File

@ -572,6 +572,11 @@ public class CockroachLegacyDialect extends Dialect {
return true;
}
@Override
public boolean supportsTemporalLiteralOffset() {
return true;
}
@Override
public void appendDateTimeLiteral(
SqlAppender appender,
@ -585,7 +590,7 @@ public class CockroachLegacyDialect extends Dialect {
appender.appendSql( '\'' );
break;
case TIME:
if ( temporalAccessor.isSupported( ChronoField.OFFSET_SECONDS ) ) {
if ( supportsTemporalLiteralOffset() && temporalAccessor.isSupported( ChronoField.OFFSET_SECONDS ) ) {
appender.appendSql( "time with time zone '" );
appendAsTime( appender, temporalAccessor, true, jdbcTimeZone );
}
@ -596,9 +601,16 @@ public class CockroachLegacyDialect extends Dialect {
appender.appendSql( '\'' );
break;
case TIMESTAMP:
appender.appendSql( "timestamp with time zone '" );
appendAsTimestampWithMicros( appender, temporalAccessor, supportsTemporalLiteralOffset(), jdbcTimeZone );
appender.appendSql( '\'' );
if ( supportsTemporalLiteralOffset() && temporalAccessor.isSupported( ChronoField.OFFSET_SECONDS ) ) {
appender.appendSql( "timestamp with time zone '" );
appendAsTimestampWithMicros( appender, temporalAccessor, true, jdbcTimeZone );
appender.appendSql( '\'' );
}
else {
appender.appendSql( "timestamp '" );
appendAsTimestampWithMicros( appender, temporalAccessor, false, jdbcTimeZone );
appender.appendSql( '\'' );
}
break;
default:
throw new IllegalArgumentException();

View File

@ -146,7 +146,7 @@ public class CockroachLegacySqlAstTranslator<T extends JdbcOperation> extends Ab
// This could theoretically be emulated by rendering all grouping variations of the query and
// connect them via union all but that's probably pretty inefficient and would have to happen
// on the query spec level
throw new UnsupportedOperationException( "Summarization is not supported by DBMS!" );
throw new UnsupportedOperationException( "Summarization is not supported by DBMS" );
}
else {
expression.accept( this );

View File

@ -16,9 +16,12 @@ import java.util.List;
import org.hibernate.LockOptions;
import org.hibernate.boot.model.TypeContributions;
import org.hibernate.dialect.DB2Dialect;
import org.hibernate.dialect.DB2StructJdbcType;
import org.hibernate.dialect.DatabaseVersion;
import org.hibernate.dialect.Dialect;
import org.hibernate.dialect.OracleDialect;
import org.hibernate.dialect.aggregate.AggregateSupport;
import org.hibernate.dialect.aggregate.DB2AggregateSupport;
import org.hibernate.dialect.function.CastingConcatFunction;
import org.hibernate.dialect.function.CommonFunctionFactory;
import org.hibernate.dialect.function.CountFunction;
@ -187,6 +190,9 @@ public class DB2LegacyDialect extends Dialect {
return "timestamp($p)";
case TIME_WITH_TIMEZONE:
return "time";
case BINARY:
// should use 'binary' since version 11
return getDB2Version().isBefore( 11 ) ? "char($l) for bit data" : super.columnType( sqlTypeCode );
case VARBINARY:
// should use 'varbinary' since version 11
return getDB2Version().isBefore( 11 ) ? "varchar($l) for bit data" : super.columnType( sqlTypeCode );
@ -205,8 +211,8 @@ public class DB2LegacyDialect extends Dialect {
if ( getDB2Version().isBefore( 11 ) ) {
// should use 'binary' since version 11
ddlTypeRegistry.addDescriptor(
CapacityDependentDdlType.builder( BINARY, "varchar($l) for bit data", this )
.withTypeCapacity( 254, "char($l) for bit data" )
CapacityDependentDdlType.builder( BINARY, columnType( VARBINARY ), this )
.withTypeCapacity( 254, columnType( BINARY ) )
.build()
);
}
@ -250,8 +256,10 @@ public class DB2LegacyDialect extends Dialect {
functionFactory.avg_castingNonDoubleArguments( this, SqlAstNodeRenderingMode.DEFAULT );
functionFactory.cot();
functionFactory.sinh();
functionFactory.cosh();
functionFactory.tanh();
functionFactory.degrees();
functionFactory.log();
functionFactory.log10();
functionFactory.radians();
functionFactory.rand();
@ -291,6 +299,7 @@ public class DB2LegacyDialect extends Dialect {
functionFactory.trunc();
functionFactory.truncate();
functionFactory.insert();
functionFactory.characterLength_length( SqlAstNodeRenderingMode.DEFAULT );
functionFactory.stddev();
functionFactory.regrLinearRegressionAggregates();
functionFactory.variance();
@ -424,14 +433,14 @@ public class DB2LegacyDialect extends Dialect {
else {
pattern.append( "?3" );
}
pattern.append( "," );
pattern.append( ',' );
if ( castFrom ) {
pattern.append( "cast(?2 as timestamp)" );
}
else {
pattern.append( "?2" );
}
pattern.append( ")" );
pattern.append( ')' );
switch ( unit ) {
case NATIVE:
pattern.append( "+(microsecond(?3)-microsecond(?2))/1e6)" );
@ -440,7 +449,7 @@ public class DB2LegacyDialect extends Dialect {
pattern.append( "*1e9+(microsecond(?3)-microsecond(?2))*1e3)" );
break;
case MONTH:
pattern.append( ")" );
pattern.append( ')' );
break;
case QUARTER:
pattern.append( "/3)" );
@ -720,6 +729,7 @@ public class DB2LegacyDialect extends Dialect {
jdbcTypeRegistry.addDescriptor( Types.NUMERIC, DecimalJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptor( XmlJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptor( DB2StructJdbcType.INSTANCE );
// DB2 requires a custom binder for binding untyped nulls that resolves the type through the statement
typeContributions.contributeJdbcType( ObjectNullResolvingJdbcType.INSTANCE );
@ -735,6 +745,11 @@ public class DB2LegacyDialect extends Dialect {
);
}
@Override
public AggregateSupport getAggregateSupport() {
return DB2AggregateSupport.INSTANCE;
}
@Override
public CallableStatementSupport getCallableStatementSupport() {
return DB2CallableStatementSupport.INSTANCE;
@ -897,4 +912,29 @@ public class DB2LegacyDialect extends Dialect {
builder.setAutoQuoteInitialUnderscore(true);
return super.buildIdentifierHelper(builder, dbMetaData);
}
@Override
public boolean canDisableConstraints() {
return true;
}
@Override
public String getDisableConstraintStatement(String tableName, String name) {
return "alter table " + tableName + " alter foreign key " + name + " not enforced";
}
@Override
public String getEnableConstraintStatement(String tableName, String name) {
return "alter table " + tableName + " alter foreign key " + name + " enforced";
}
@Override
public String getTruncateTableStatement(String tableName) {
return super.getTruncateTableStatement(tableName) + " immediate";
}
@Override
public String getCreateUserDefinedTypeExtensionsString() {
return " instantiable mode db2sql";
}
}

View File

@ -31,7 +31,9 @@ import org.hibernate.sql.ast.tree.from.QueryPartTableReference;
import org.hibernate.sql.ast.tree.from.TableGroup;
import org.hibernate.sql.ast.tree.from.TableGroupJoin;
import org.hibernate.sql.ast.tree.from.TableReferenceJoin;
import org.hibernate.sql.ast.tree.insert.InsertSelectStatement;
import org.hibernate.sql.ast.tree.predicate.BooleanExpressionPredicate;
import org.hibernate.sql.ast.tree.predicate.Predicate;
import org.hibernate.sql.ast.tree.select.QueryGroup;
import org.hibernate.sql.ast.tree.select.QueryPart;
import org.hibernate.sql.ast.tree.select.QuerySpec;
@ -119,7 +121,12 @@ public class DB2LegacySqlAstTranslator<T extends JdbcOperation> extends Abstract
@Override
protected void renderExpressionAsClauseItem(Expression expression) {
expression.accept( this );
if ( expression instanceof Predicate && getDB2Version().isBefore( 11 ) ) {
super.renderExpressionAsClauseItem( expression );
}
else {
expression.accept( this );
}
}
@Override
@ -332,6 +339,15 @@ public class DB2LegacySqlAstTranslator<T extends JdbcOperation> extends Abstract
}
}
@Override
protected void visitInsertStatementOnly(InsertSelectStatement statement) {
final boolean closeWrapper = renderReturningClause( statement );
super.visitInsertStatementOnly( statement );
if ( closeWrapper ) {
appendSql( ')' );
}
}
protected boolean renderReturningClause(MutationStatement statement) {
final List<ColumnReference> returningColumns = statement.getReturningColumns();
final int size = returningColumns.size();
@ -419,6 +435,11 @@ public class DB2LegacySqlAstTranslator<T extends JdbcOperation> extends Abstract
return getFromDual();
}
@Override
protected void visitReturningColumns(List<ColumnReference> returningColumns) {
// For DB2 we use #renderReturningClause to render a wrapper around the DML statement
}
public DatabaseVersion getDB2Version() {
return this.getDialect().getVersion();
}

View File

@ -299,7 +299,6 @@ public class DerbyLegacyDialect extends Dialect {
functionFactory.concat_pipeOperator();
functionFactory.cot();
functionFactory.chr_char();
functionFactory.degrees();
functionFactory.radians();
functionFactory.log10();

View File

@ -39,6 +39,8 @@ import org.hibernate.dialect.sequence.H2V2SequenceSupport;
import org.hibernate.dialect.sequence.SequenceSupport;
import org.hibernate.dialect.temptable.TemporaryTable;
import org.hibernate.dialect.temptable.TemporaryTableKind;
import org.hibernate.dialect.unique.CreateTableUniqueDelegate;
import org.hibernate.dialect.unique.UniqueDelegate;
import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.exception.ConstraintViolationException;
@ -73,6 +75,7 @@ import org.hibernate.tool.schema.extract.internal.SequenceInformationExtractorH2
import org.hibernate.tool.schema.extract.internal.SequenceInformationExtractorLegacyImpl;
import org.hibernate.tool.schema.extract.internal.SequenceInformationExtractorNoOpImpl;
import org.hibernate.tool.schema.extract.spi.SequenceInformationExtractor;
import org.hibernate.type.descriptor.DateTimeUtils;
import org.hibernate.type.descriptor.jdbc.InstantJdbcType;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.UUIDJdbcType;
@ -98,6 +101,7 @@ import static org.hibernate.type.SqlTypes.LONG32VARCHAR;
import static org.hibernate.type.SqlTypes.NCHAR;
import static org.hibernate.type.SqlTypes.NUMERIC;
import static org.hibernate.type.SqlTypes.NVARCHAR;
import static org.hibernate.type.SqlTypes.OTHER;
import static org.hibernate.type.SqlTypes.TIMESTAMP_UTC;
import static org.hibernate.type.SqlTypes.UUID;
import static org.hibernate.type.SqlTypes.VARBINARY;
@ -105,7 +109,8 @@ import static org.hibernate.type.SqlTypes.VARCHAR;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsDate;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsLocalTime;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsTime;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsTimestampWithMicros;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsTimestampWithMillis;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsTimestampWithNanos;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsTimestampWithMillis;
/**
@ -124,6 +129,7 @@ public class H2LegacyDialect extends Dialect {
private final SequenceInformationExtractor sequenceInformationExtractor;
private final String querySequenceString;
private final UniqueDelegate uniqueDelegate = new CreateTableUniqueDelegate( this );
public H2LegacyDialect(DialectResolutionInfo info) {
this( parseVersion( info ) );
@ -376,8 +382,17 @@ public class H2LegacyDialect extends Dialect {
int scale,
JdbcTypeRegistry jdbcTypeRegistry) {
// As of H2 2.0 we get a FLOAT type code even though it is a DOUBLE
if ( jdbcTypeCode == FLOAT && "DOUBLE PRECISION".equals( columnTypeName ) ) {
return jdbcTypeRegistry.getDescriptor( DOUBLE );
switch ( jdbcTypeCode ) {
case FLOAT:
if ( "DOUBLE PRECISION".equals( columnTypeName ) ) {
return jdbcTypeRegistry.getDescriptor( DOUBLE );
}
break;
case OTHER:
if ( "GEOMETRY".equals( columnTypeName ) ) {
return jdbcTypeRegistry.getDescriptor( GEOMETRY );
}
break;
}
return super.resolveSqlTypeDescriptor( columnTypeName, jdbcTypeCode, precision, scale, jdbcTypeRegistry );
}
@ -450,6 +465,7 @@ public class H2LegacyDialect extends Dialect {
return "datediff(?1,?2,?3)";
}
@Override
public void appendDateTimeLiteral(
SqlAppender appender,
@ -463,7 +479,7 @@ public class H2LegacyDialect extends Dialect {
appender.appendSql( '\'' );
break;
case TIME:
if ( temporalAccessor.isSupported( ChronoField.OFFSET_SECONDS ) && supportsTimeWithTimeZoneLiteral() ) {
if ( supportsTimeLiteralOffset() && temporalAccessor.isSupported( ChronoField.OFFSET_SECONDS ) ) {
appender.appendSql( "time with time zone '" );
appendAsTime( appender, temporalAccessor, supportsTemporalLiteralOffset(), jdbcTimeZone );
}
@ -474,9 +490,16 @@ public class H2LegacyDialect extends Dialect {
appender.appendSql( '\'' );
break;
case TIMESTAMP:
appender.appendSql( "timestamp with time zone '" );
appendAsTimestampWithMicros( appender, temporalAccessor, supportsTemporalLiteralOffset(), jdbcTimeZone );
appender.appendSql( '\'' );
if ( supportsTemporalLiteralOffset() && temporalAccessor.isSupported( ChronoField.OFFSET_SECONDS ) ) {
appender.appendSql( "timestamp with time zone '" );
appendAsTimestampWithNanos( appender, temporalAccessor, true, jdbcTimeZone );
appender.appendSql( '\'' );
}
else {
appender.appendSql( "timestamp '" );
appendAsTimestampWithNanos( appender, temporalAccessor, false, jdbcTimeZone );
appender.appendSql( '\'' );
}
break;
default:
throw new IllegalArgumentException();
@ -492,7 +515,7 @@ public class H2LegacyDialect extends Dialect {
appender.appendSql( '\'' );
break;
case TIME:
if ( supportsTimeWithTimeZoneLiteral() ) {
if ( supportsTimeLiteralOffset() ) {
appender.appendSql( "time with time zone '" );
appendAsTime( appender, date, jdbcTimeZone );
}
@ -504,7 +527,7 @@ public class H2LegacyDialect extends Dialect {
break;
case TIMESTAMP:
appender.appendSql( "timestamp with time zone '" );
appendAsTimestampWithMicros( appender, date, jdbcTimeZone );
appendAsTimestampWithNanos( appender, date, jdbcTimeZone );
appender.appendSql( '\'' );
break;
default:
@ -525,7 +548,7 @@ public class H2LegacyDialect extends Dialect {
appender.appendSql( '\'' );
break;
case TIME:
if ( supportsTimeWithTimeZoneLiteral() ) {
if ( supportsTimeLiteralOffset() ) {
appender.appendSql( "time with time zone '" );
appendAsTime( appender, calendar, jdbcTimeZone );
}
@ -545,7 +568,7 @@ public class H2LegacyDialect extends Dialect {
}
}
public boolean supportsTimeWithTimeZoneLiteral() {
public boolean supportsTimeLiteralOffset() {
return getVersion().isSameOrAfter( 1, 4, 200 );
}
@ -824,4 +847,24 @@ public class H2LegacyDialect extends Dialect {
public String generatedAs(String generatedAs) {
return " generated always as (" + generatedAs + ")";
}
@Override
public boolean canDisableConstraints() {
return true;
}
@Override
public String getEnableConstraintsStatement() {
return "set referential_integrity true";
}
@Override
public String getDisableConstraintsStatement() {
return "set referential_integrity false";
}
@Override
public UniqueDelegate getUniqueDelegate() {
return uniqueDelegate;
}
}

View File

@ -9,6 +9,7 @@ package org.hibernate.community.dialect;
import java.util.List;
import org.hibernate.LockMode;
import org.hibernate.dialect.identity.H2IdentityColumnSupport;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.query.sqm.ComparisonOperator;
@ -21,6 +22,7 @@ import org.hibernate.sql.ast.tree.cte.CteContainer;
import org.hibernate.sql.ast.tree.cte.CteStatement;
import org.hibernate.sql.ast.tree.cte.CteTableGroup;
import org.hibernate.sql.ast.tree.expression.BinaryArithmeticExpression;
import org.hibernate.sql.ast.tree.expression.ColumnReference;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.expression.Literal;
import org.hibernate.sql.ast.tree.expression.SqlTuple;
@ -35,6 +37,7 @@ import org.hibernate.sql.ast.tree.predicate.LikePredicate;
import org.hibernate.sql.ast.tree.select.QueryPart;
import org.hibernate.sql.ast.tree.select.SelectClause;
import org.hibernate.sql.exec.spi.JdbcOperation;
import org.hibernate.sql.model.internal.TableInsertStandard;
/**
* A legacy SQL AST translator for H2.
@ -49,6 +52,39 @@ public class H2LegacySqlAstTranslator<T extends JdbcOperation> extends AbstractS
super( sessionFactory, statement );
}
@Override
public void visitStandardTableInsert(TableInsertStandard tableInsert) {
if ( CollectionHelper.isNotEmpty( tableInsert.getReturningColumns() ) ) {
visitReturningInsertStatement( tableInsert );
}
else {
super.visitStandardTableInsert( tableInsert );
}
}
public void visitReturningInsertStatement(TableInsertStandard tableInsert) {
assert tableInsert.getReturningColumns() != null
&& !tableInsert.getReturningColumns().isEmpty();
final H2IdentityColumnSupport identitySupport = (H2IdentityColumnSupport) getSessionFactory()
.getJdbcServices()
.getDialect()
.getIdentityColumnSupport();
identitySupport.render(
tableInsert,
this::appendSql,
(columnReference) -> columnReference.accept( this ),
() -> super.visitStandardTableInsert( tableInsert ),
getSessionFactory()
);
}
@Override
protected void visitReturningColumns(List<ColumnReference> returningColumns) {
// do nothing - this is handled via `#visitReturningInsertStatement`
}
@Override
public void visitCteContainer(CteContainer cteContainer) {
// H2 has various bugs in different versions that make it impossible to use CTEs with parameters reliably
@ -180,7 +216,7 @@ public class H2LegacySqlAstTranslator<T extends JdbcOperation> extends AbstractS
// This could theoretically be emulated by rendering all grouping variations of the query and
// connect them via union all but that's probably pretty inefficient and would have to happen
// on the query spec level
throw new UnsupportedOperationException( "Summarization is not supported by DBMS!" );
throw new UnsupportedOperationException( "Summarization is not supported by DBMS" );
}
else {
expression.accept( this );

View File

@ -36,6 +36,8 @@ import org.hibernate.dialect.sequence.HSQLSequenceSupport;
import org.hibernate.dialect.sequence.SequenceSupport;
import org.hibernate.dialect.temptable.TemporaryTable;
import org.hibernate.dialect.temptable.TemporaryTableKind;
import org.hibernate.dialect.unique.CreateTableUniqueDelegate;
import org.hibernate.dialect.unique.UniqueDelegate;
import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo;
import org.hibernate.engine.jdbc.env.spi.IdentifierHelper;
import org.hibernate.engine.jdbc.env.spi.IdentifierHelperBuilder;
@ -98,6 +100,7 @@ public class HSQLLegacyDialect extends Dialect {
CoreMessageLogger.class,
org.hibernate.community.dialect.HSQLLegacyDialect.class.getName()
);
private final UniqueDelegate uniqueDelegate = new CreateTableUniqueDelegate( this );
public HSQLLegacyDialect(DialectResolutionInfo info) {
super( info );
@ -827,4 +830,9 @@ public class HSQLLegacyDialect extends Dialect {
builder.setAutoQuoteInitialUnderscore(true);
return super.buildIdentifierHelper(builder, dbMetaData);
}
@Override
public UniqueDelegate getUniqueDelegate() {
return uniqueDelegate;
}
}

View File

@ -9,12 +9,14 @@ package org.hibernate.community.dialect;
import java.sql.DatabaseMetaData;
import java.sql.SQLException;
import org.hibernate.boot.model.TypeContributions;
import org.hibernate.dialect.DatabaseVersion;
import org.hibernate.dialect.Dialect;
import org.hibernate.dialect.InnoDBStorageEngine;
import org.hibernate.dialect.MySQLServerConfiguration;
import org.hibernate.dialect.MySQLStorageEngine;
import org.hibernate.dialect.NationalizationSupport;
import org.hibernate.dialect.VarcharUUIDJdbcType;
import org.hibernate.dialect.function.CommonFunctionFactory;
import org.hibernate.dialect.sequence.MariaDBSequenceSupport;
import org.hibernate.dialect.sequence.SequenceSupport;
@ -24,6 +26,7 @@ import org.hibernate.engine.jdbc.env.spi.IdentifierHelper;
import org.hibernate.engine.jdbc.env.spi.IdentifierHelperBuilder;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.query.spi.QueryEngine;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.SqlAstTranslatorFactory;
import org.hibernate.sql.ast.spi.SqlAppender;
@ -33,6 +36,16 @@ import org.hibernate.sql.exec.spi.JdbcOperation;
import org.hibernate.tool.schema.extract.internal.SequenceInformationExtractorMariaDBDatabaseImpl;
import org.hibernate.tool.schema.extract.spi.SequenceInformationExtractor;
import org.hibernate.type.StandardBasicTypes;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
import org.hibernate.type.descriptor.sql.internal.DdlTypeImpl;
import org.hibernate.type.descriptor.sql.spi.DdlTypeRegistry;
import static org.hibernate.query.sqm.produce.function.FunctionParameterType.NUMERIC;
import static org.hibernate.type.SqlTypes.GEOMETRY;
import static org.hibernate.type.SqlTypes.OTHER;
import static org.hibernate.type.SqlTypes.UUID;
import static org.hibernate.type.SqlTypes.VARBINARY;
/**
* A {@linkplain Dialect SQL dialect} for MariaDB
@ -85,10 +98,58 @@ public class MariaDBLegacyDialect extends MySQLLegacyDialect {
);
if ( getVersion().isSameOrAfter( 10, 3, 3 ) ) {
commonFunctionFactory.inverseDistributionOrderedSetAggregates_windowEmulation();
queryEngine.getSqmFunctionRegistry().patternDescriptorBuilder( "median", "median(?1) over ()" )
.setInvariantType( queryEngine.getTypeConfiguration().getBasicTypeRegistry().resolve( StandardBasicTypes.DOUBLE ) )
.setExactArgumentCount( 1 )
.setParameterTypes(NUMERIC)
.register();
}
}
}
@Override
protected void registerColumnTypes(TypeContributions typeContributions, ServiceRegistry serviceRegistry) {
super.registerColumnTypes( typeContributions, serviceRegistry );
final DdlTypeRegistry ddlTypeRegistry = typeContributions.getTypeConfiguration().getDdlTypeRegistry();
if ( getVersion().isSameOrAfter( 10, 7 ) ) {
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( UUID, "uuid", this ) );
}
}
@Override
public JdbcType resolveSqlTypeDescriptor(
String columnTypeName,
int jdbcTypeCode,
int precision,
int scale,
JdbcTypeRegistry jdbcTypeRegistry) {
switch ( jdbcTypeCode ) {
case OTHER:
switch ( columnTypeName ) {
case "uuid":
jdbcTypeCode = UUID;
break;
}
break;
case VARBINARY:
if ( "GEOMETRY".equals( columnTypeName ) ) {
jdbcTypeCode = GEOMETRY;
}
break;
}
return super.resolveSqlTypeDescriptor( columnTypeName, jdbcTypeCode, precision, scale, jdbcTypeRegistry );
}
@Override
public void contributeTypes(TypeContributions typeContributions, ServiceRegistry serviceRegistry) {
super.contributeTypes( typeContributions, serviceRegistry );
final JdbcTypeRegistry jdbcTypeRegistry = typeContributions.getTypeConfiguration()
.getJdbcTypeRegistry();
if ( getVersion().isSameOrAfter( 10, 7 ) ) {
jdbcTypeRegistry.addDescriptorIfAbsent( VarcharUUIDJdbcType.INSTANCE );
}
}
@Override
public SqlAstTranslatorFactory getSqlAstTranslatorFactory() {
return new StandardSqlAstTranslatorFactory() {

View File

@ -56,12 +56,14 @@ import org.hibernate.query.sqm.CastType;
import org.hibernate.query.sqm.IntervalType;
import org.hibernate.query.sqm.NullOrdering;
import org.hibernate.query.sqm.TemporalUnit;
import org.hibernate.query.sqm.function.SqmFunctionRegistry;
import org.hibernate.query.sqm.mutation.internal.temptable.AfterUseAction;
import org.hibernate.query.sqm.mutation.internal.temptable.BeforeUseAction;
import org.hibernate.query.sqm.mutation.internal.temptable.LocalTemporaryTableInsertStrategy;
import org.hibernate.query.sqm.mutation.internal.temptable.LocalTemporaryTableMutationStrategy;
import org.hibernate.query.sqm.mutation.spi.SqmMultiTableInsertStrategy;
import org.hibernate.query.sqm.mutation.spi.SqmMultiTableMutationStrategy;
import org.hibernate.query.sqm.produce.function.FunctionParameterType;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.SqlAstTranslatorFactory;
@ -211,7 +213,7 @@ public class MySQLLegacyDialect extends Dialect {
return MyISAMStorageEngine.INSTANCE;
}
else {
throw new UnsupportedOperationException( "The " + storageEngine + " storage engine is not supported!" );
throw new UnsupportedOperationException( "The " + storageEngine + " storage engine is not supported" );
}
}
@ -480,8 +482,14 @@ public class MySQLLegacyDialect extends Dialect {
int precision,
int scale,
JdbcTypeRegistry jdbcTypeRegistry) {
if ( jdbcTypeCode == Types.BIT ) {
return jdbcTypeRegistry.getDescriptor( Types.BOOLEAN );
switch ( jdbcTypeCode ) {
case Types.BIT:
return jdbcTypeRegistry.getDescriptor( Types.BOOLEAN );
case Types.BINARY:
if ( "GEOMETRY".equals( columnTypeName ) ) {
jdbcTypeCode = GEOMETRY;
}
break;
}
return super.resolveSqlTypeDescriptor(
columnTypeName,
@ -516,7 +524,6 @@ public class MySQLLegacyDialect extends Dialect {
functionFactory.log();
functionFactory.log2();
functionFactory.log10();
functionFactory.pi();
functionFactory.trim2();
functionFactory.octetLength();
functionFactory.reverse();
@ -543,7 +550,6 @@ public class MySQLLegacyDialect extends Dialect {
functionFactory.bitLength();
functionFactory.octetLength();
functionFactory.ascii();
functionFactory.chr_char();
functionFactory.instr();
functionFactory.substr();
//also natively supports ANSI-style substring()
@ -565,26 +571,53 @@ public class MySQLLegacyDialect extends Dialect {
BasicTypeRegistry basicTypeRegistry = queryEngine.getTypeConfiguration().getBasicTypeRegistry();
queryEngine.getSqmFunctionRegistry().noArgsBuilder( "localtime" )
SqmFunctionRegistry functionRegistry = queryEngine.getSqmFunctionRegistry();
functionRegistry.noArgsBuilder( "localtime" )
.setInvariantType(basicTypeRegistry.resolve( StandardBasicTypes.TIMESTAMP ))
.setUseParenthesesWhenNoArgs( false )
.register();
if ( getMySQLVersion().isBefore( 5, 7 ) ) {
functionFactory.sysdateParens();
// pi() produces a value with 7 digits unless we're explicit
if ( getMySQLVersion().isSameOrAfter( 8 ) ) {
functionRegistry.patternDescriptorBuilder( "pi", "cast(pi() as double)" )
.setInvariantType( basicTypeRegistry.resolve( StandardBasicTypes.DOUBLE ) )
.setExactArgumentCount( 0 )
.setArgumentListSignature( "" )
.register();
}
else {
// MySQL timestamp type defaults to precision 0 (seconds) but
// we want the standard default precision of 6 (microseconds)
functionFactory.sysdateExplicitMicros();
if ( getMySQLVersion().isSameOrAfter( 8, 0, 2 ) ) {
functionFactory.windowFunctions();
if ( getMySQLVersion().isSameOrAfter( 8, 0, 11 ) ) {
functionFactory.hypotheticalOrderedSetAggregates_windowEmulation();
// But before MySQL 8, it's not possible to cast to double. Double has a default precision of 53
// and since the internal representation of pi has only 15 decimal places, we cast to decimal(53,15)
functionRegistry.patternDescriptorBuilder( "pi", "cast(pi() as decimal(53,15))" )
.setInvariantType( basicTypeRegistry.resolve( StandardBasicTypes.DOUBLE ) )
.setExactArgumentCount( 0 )
.setArgumentListSignature( "" )
.register();
if ( getMySQLVersion().isBefore( 5, 7 ) ) {
functionFactory.sysdateParens();
}
else {
// MySQL timestamp type defaults to precision 0 (seconds) but
// we want the standard default precision of 6 (microseconds)
functionFactory.sysdateExplicitMicros();
if ( getMySQLVersion().isSameOrAfter( 8, 0, 2 ) ) {
functionFactory.windowFunctions();
if ( getMySQLVersion().isSameOrAfter( 8, 0, 11 ) ) {
functionFactory.hypotheticalOrderedSetAggregates_windowEmulation();
}
}
}
}
// By default char() produces a binary string, not a character string.
// (Note also that char() is actually a variadic function in MySQL.)
functionRegistry.patternDescriptorBuilder( "chr", "char(?1 using ascii)" )
.setInvariantType(basicTypeRegistry.resolve( StandardBasicTypes.CHARACTER ))
.setExactArgumentCount(1)
.setParameterTypes(FunctionParameterType.INTEGER)
.register();
functionRegistry.registerAlternateKey( "char", "chr" );
functionFactory.listagg_groupConcat();
}
@ -754,7 +787,25 @@ public class MySQLLegacyDialect extends Dialect {
@Override
public boolean supportsColumnCheck() {
return false;
return getMySQLVersion().isSameOrAfter( 8, 0, 16 );
}
@Override
public String getEnumTypeDeclaration(String[] values) {
StringBuilder type = new StringBuilder();
type.append( "enum (" );
String separator = "";
for ( String value : values ) {
type.append( separator ).append('\'').append( value ).append('\'');
separator = ",";
}
return type.append( ')' ).toString();
}
@Override
public String getCheckCondition(String columnName, String[] values) {
//not needed, because we use an 'enum' type
return null;
}
@Override
@ -1318,4 +1369,19 @@ public class MySQLLegacyDialect extends Dialect {
return getMySQLVersion().isSameOrAfter( 8 );
}
@Override
public boolean canDisableConstraints() {
return true;
}
@Override
public String getDisableConstraintsStatement() {
return "set foreign_key_checks = 0";
}
@Override
public String getEnableConstraintsStatement() {
return "set foreign_key_checks = 1";
}
}

View File

@ -178,11 +178,6 @@ public class MySQLLegacySqlAstTranslator<T extends JdbcOperation> extends Abstra
return true;
}
@Override
protected boolean supportsWithClause() {
return getDialect().getVersion().isSameOrAfter( 8 );
}
@Override
protected boolean supportsSimpleQueryGrouping() {
return getDialect().getVersion().isSameOrAfter( 8 );
@ -193,6 +188,11 @@ public class MySQLLegacySqlAstTranslator<T extends JdbcOperation> extends Abstra
return false;
}
@Override
protected boolean supportsWithClause() {
return getDialect().getVersion().isSameOrAfter( 8 );
}
@Override
protected String getFromDual() {
return " from dual";

View File

@ -24,12 +24,16 @@ import org.hibernate.dialect.DatabaseVersion;
import org.hibernate.dialect.Dialect;
import org.hibernate.dialect.OracleArrayJdbcType;
import org.hibernate.dialect.OracleBooleanJdbcType;
import org.hibernate.dialect.OracleJsonJdbcType;
import org.hibernate.dialect.OracleTypes;
import org.hibernate.dialect.OracleStructJdbcType;
import org.hibernate.dialect.OracleTypesHelper;
import org.hibernate.dialect.OracleXmlJdbcType;
import org.hibernate.dialect.Replacer;
import org.hibernate.dialect.RowLockStrategy;
import org.hibernate.dialect.TimeZoneSupport;
import org.hibernate.dialect.aggregate.AggregateSupport;
import org.hibernate.dialect.aggregate.OracleAggregateSupport;
import org.hibernate.dialect.function.CommonFunctionFactory;
import org.hibernate.dialect.function.ModeStatsModeEmulation;
import org.hibernate.dialect.function.NvlCoalesceEmulation;
@ -42,6 +46,8 @@ import org.hibernate.dialect.sequence.OracleSequenceSupport;
import org.hibernate.dialect.sequence.SequenceSupport;
import org.hibernate.dialect.temptable.TemporaryTable;
import org.hibernate.dialect.temptable.TemporaryTableKind;
import org.hibernate.dialect.unique.CreateTableUniqueDelegate;
import org.hibernate.dialect.unique.UniqueDelegate;
import org.hibernate.engine.config.spi.ConfigurationService;
import org.hibernate.engine.config.spi.StandardConverters;
import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo;
@ -83,11 +89,13 @@ import org.hibernate.tool.schema.extract.internal.SequenceInformationExtractorOr
import org.hibernate.tool.schema.extract.spi.SequenceInformationExtractor;
import org.hibernate.type.JavaObjectType;
import org.hibernate.type.NullType;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.StandardBasicTypes;
import org.hibernate.type.descriptor.java.PrimitiveByteArrayJavaType;
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
import org.hibernate.type.descriptor.jdbc.BlobJdbcType;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.JsonBlobJdbcType;
import org.hibernate.type.descriptor.jdbc.OracleJsonBlobJdbcType;
import org.hibernate.type.descriptor.jdbc.NullJdbcType;
import org.hibernate.type.descriptor.jdbc.ObjectNullAsNullTypeJdbcType;
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
@ -118,6 +126,7 @@ import static org.hibernate.type.SqlTypes.NVARCHAR;
import static org.hibernate.type.SqlTypes.REAL;
import static org.hibernate.type.SqlTypes.SMALLINT;
import static org.hibernate.type.SqlTypes.SQLXML;
import static org.hibernate.type.SqlTypes.STRUCT;
import static org.hibernate.type.SqlTypes.TIME;
import static org.hibernate.type.SqlTypes.TIMESTAMP;
import static org.hibernate.type.SqlTypes.TIMESTAMP_WITH_TIMEZONE;
@ -146,6 +155,7 @@ public class OracleLegacyDialect extends Dialect {
private final LimitHandler limitHandler = supportsFetchClause( FetchClauseType.ROWS_ONLY )
? Oracle12LimitHandler.INSTANCE
: new LegacyOracleLimitHandler( getVersion() );
private final UniqueDelegate uniqueDelegate = new CreateTableUniqueDelegate(this);
public OracleLegacyDialect() {
this( DatabaseVersion.make( 8, 0 ) );
@ -198,6 +208,9 @@ public class OracleLegacyDialect extends Dialect {
functionFactory.monthsBetween();
functionFactory.everyAny_minMaxCase();
functionFactory.radians_acos();
functionFactory.degrees_acos();
functionFactory.median();
functionFactory.stddev();
functionFactory.stddevPopSamp();
@ -352,6 +365,9 @@ public class OracleLegacyDialect extends Dialect {
return "to_char(?1,'YYYY-MM-DD HH24:MI:SS.FF9 TZR')";
}
break;
case CLOB:
// Oracle doesn't like casting to clob
return "to_clob(?1)";
case DATE:
if ( from == CastType.STRING ) {
return "to_date(?1,'YYYY-MM-DD')";
@ -681,6 +697,20 @@ public class OracleLegacyDialect extends Dialect {
switch ( jdbcTypeCode ) {
case OracleTypes.JSON:
return jdbcTypeRegistry.getDescriptor( JSON );
case STRUCT:
if ( "MDSYS.SDO_GEOMETRY".equals( columnTypeName ) ) {
jdbcTypeCode = SqlTypes.GEOMETRY;
}
else {
final AggregateJdbcType aggregateDescriptor = jdbcTypeRegistry.findAggregateDescriptor(
// Skip the schema
columnTypeName.substring( columnTypeName.indexOf( '.' ) + 1 )
);
if ( aggregateDescriptor != null ) {
return aggregateDescriptor;
}
}
break;
case Types.NUMERIC:
if ( scale == -127 ) {
// For some reason, the Oracle JDBC driver reports FLOAT
@ -742,6 +772,7 @@ public class OracleLegacyDialect extends Dialect {
typeContributions.contributeJdbcType( OracleBooleanJdbcType.INSTANCE );
typeContributions.contributeJdbcType( OracleXmlJdbcType.INSTANCE );
typeContributions.contributeJdbcType( OracleStructJdbcType.INSTANCE );
if ( getVersion().isSameOrAfter( 12 ) ) {
// account for Oracle's deprecated support for LONGVARBINARY
@ -759,10 +790,10 @@ public class OracleLegacyDialect extends Dialect {
typeContributions.contributeJdbcType( descriptor );
if ( getVersion().isSameOrAfter( 21 ) ) {
typeContributions.contributeJdbcType( OracleTypesHelper.INSTANCE.getJsonJdbcType() );
typeContributions.contributeJdbcType( OracleJsonJdbcType.INSTANCE );
}
else {
typeContributions.contributeJdbcType( JsonBlobJdbcType.INSTANCE );
typeContributions.contributeJdbcType( OracleJsonBlobJdbcType.INSTANCE );
}
}
@ -790,6 +821,11 @@ public class OracleLegacyDialect extends Dialect {
);
}
@Override
public AggregateSupport getAggregateSupport() {
return OracleAggregateSupport.valueOf( this );
}
@Override
public String getNativeIdentifierGeneratorStrategy() {
return "sequence";
@ -1356,4 +1392,29 @@ public class OracleLegacyDialect extends Dialect {
builder.setAutoQuoteInitialUnderscore(true);
return super.buildIdentifierHelper(builder, dbMetaData);
}
@Override
public boolean canDisableConstraints() {
return true;
}
@Override
public String getDisableConstraintStatement(String tableName, String name) {
return "alter table " + tableName + " disable constraint " + name;
}
@Override
public String getEnableConstraintStatement(String tableName, String name) {
return "alter table " + tableName + " enable constraint " + name;
}
@Override
public UniqueDelegate getUniqueDelegate() {
return uniqueDelegate;
}
@Override
public String getCreateUserDefinedTypeKindString() {
return "object";
}
}

View File

@ -8,11 +8,11 @@ package org.hibernate.community.dialect;
import java.util.List;
import org.hibernate.LockMode;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.internal.util.collections.Stack;
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
import org.hibernate.query.IllegalQueryOperationException;
import org.hibernate.query.sqm.BinaryArithmeticOperator;
import org.hibernate.query.sqm.ComparisonOperator;
import org.hibernate.query.sqm.FetchClauseType;
import org.hibernate.query.sqm.FrameExclusion;
@ -22,25 +22,29 @@ import org.hibernate.sql.ast.spi.AbstractSqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlSelection;
import org.hibernate.sql.ast.tree.Statement;
import org.hibernate.sql.ast.tree.cte.CteMaterialization;
import org.hibernate.sql.ast.tree.expression.BinaryArithmeticExpression;
import org.hibernate.sql.ast.tree.expression.CaseSearchedExpression;
import org.hibernate.sql.ast.tree.expression.ColumnReference;
import org.hibernate.sql.ast.tree.expression.AggregateColumnWriteExpression;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.expression.FunctionExpression;
import org.hibernate.sql.ast.tree.expression.Literal;
import org.hibernate.sql.ast.tree.expression.Over;
import org.hibernate.sql.ast.tree.expression.SqlTuple;
import org.hibernate.sql.ast.tree.expression.SqlTupleContainer;
import org.hibernate.sql.ast.tree.expression.Summarization;
import org.hibernate.sql.ast.tree.from.FunctionTableReference;
import org.hibernate.sql.ast.tree.from.NamedTableReference;
import org.hibernate.sql.ast.tree.from.QueryPartTableReference;
import org.hibernate.sql.ast.tree.from.UnionTableGroup;
import org.hibernate.sql.ast.tree.from.ValuesTableReference;
import org.hibernate.sql.ast.tree.insert.InsertStatement;
import org.hibernate.sql.ast.tree.insert.InsertSelectStatement;
import org.hibernate.sql.ast.tree.insert.Values;
import org.hibernate.sql.ast.tree.select.QueryGroup;
import org.hibernate.sql.ast.tree.select.QueryPart;
import org.hibernate.sql.ast.tree.select.QuerySpec;
import org.hibernate.sql.ast.tree.select.SelectClause;
import org.hibernate.sql.ast.tree.select.SortSpecification;
import org.hibernate.sql.ast.tree.update.Assignment;
import org.hibernate.sql.exec.spi.JdbcOperation;
import org.hibernate.type.SqlTypes;
@ -147,6 +151,19 @@ public class OracleLegacySqlAstTranslator<T extends JdbcOperation> extends Abstr
return getQueryPartStack().findCurrentFirst( part -> part instanceof QueryGroup ? part : null ) != null;
}
@Override
protected boolean shouldEmulateLateralWithIntersect(QueryPart queryPart) {
// On Oracle 11 where there is no lateral support,
// make sure we don't use intersect if the query has an offset/fetch clause
return !queryPart.hasOffsetOrFetchClause();
}
@Override
protected boolean supportsNestedSubqueryCorrelation() {
// It seems it doesn't support it, at least on version 11
return false;
}
protected boolean shouldEmulateFetchClause(QueryPart queryPart) {
// Check if current query part is already row numbering to avoid infinite recursion
if (getQueryPartForRowNumbering() == queryPart) {
@ -162,7 +179,7 @@ public class OracleLegacySqlAstTranslator<T extends JdbcOperation> extends Abstr
// When the query has no sort specifications and offset, we want to use the ROWNUM pagination as that is a special locking case
return !queryPart.hasSortSpecifications() && !hasOffset( queryPart )
// Workaround an Oracle bug, segmentation fault for insert queries with a plain query group and fetch clause
|| queryPart instanceof QueryGroup && getClauseStack().isEmpty() && getStatement() instanceof InsertStatement;
|| queryPart instanceof QueryGroup && getClauseStack().isEmpty() && getStatement() instanceof InsertSelectStatement;
}
return true;
}
@ -485,4 +502,50 @@ public class OracleLegacySqlAstTranslator<T extends JdbcOperation> extends Abstr
return getDialect().supportsFetchClause( FetchClauseType.ROWS_ONLY );
}
@Override
protected boolean renderNamedTableReference(NamedTableReference tableReference, LockMode lockMode) {
appendSql( tableReference.getTableExpression() );
registerAffectedTable( tableReference );
renderTableReferenceIdentificationVariable( tableReference );
return false;
}
@Override
protected void visitSetAssignment(Assignment assignment) {
final List<ColumnReference> columnReferences = assignment.getAssignable().getColumnReferences();
if ( columnReferences.size() == 1 ) {
columnReferences.get( 0 ).appendColumnForWrite( this );
appendSql( '=' );
final Expression assignedValue = assignment.getAssignedValue();
final SqlTuple sqlTuple = SqlTupleContainer.getSqlTuple( assignedValue );
if ( sqlTuple != null ) {
assert sqlTuple.getExpressions().size() == 1;
sqlTuple.getExpressions().get( 0 ).accept( this );
}
else {
assignedValue.accept( this );
}
}
else {
char separator = OPEN_PARENTHESIS;
for ( ColumnReference columnReference : columnReferences ) {
appendSql( separator );
columnReference.appendColumnForWrite( this );
separator = COMA_SEPARATOR_CHAR;
}
appendSql( ")=" );
assignment.getAssignedValue().accept( this );
}
}
@Override
public void visitColumnReference(ColumnReference columnReference) {
columnReference.appendReadExpression( this );
}
@Override
public void visitAggregateColumnWriteExpression(AggregateColumnWriteExpression aggregateColumnWriteExpression) {
aggregateColumnWriteExpression.appendWriteExpression( this, this );
}
}

View File

@ -35,10 +35,13 @@ import org.hibernate.dialect.PostgreSQLInetJdbcType;
import org.hibernate.dialect.PostgreSQLIntervalSecondJdbcType;
import org.hibernate.dialect.PostgreSQLJsonbJdbcType;
import org.hibernate.dialect.PostgreSQLPGObjectJdbcType;
import org.hibernate.dialect.PostgreSQLStructJdbcType;
import org.hibernate.dialect.Replacer;
import org.hibernate.dialect.RowLockStrategy;
import org.hibernate.dialect.SelectItemReferenceStrategy;
import org.hibernate.dialect.TimeZoneSupport;
import org.hibernate.dialect.aggregate.AggregateSupport;
import org.hibernate.dialect.aggregate.PostgreSQLAggregateSupport;
import org.hibernate.dialect.function.CommonFunctionFactory;
import org.hibernate.dialect.function.PostgreSQLMinMaxFunction;
import org.hibernate.dialect.identity.IdentityColumnSupport;
@ -48,6 +51,8 @@ import org.hibernate.dialect.pagination.LimitOffsetLimitHandler;
import org.hibernate.dialect.pagination.OffsetFetchLimitHandler;
import org.hibernate.dialect.sequence.PostgreSQLSequenceSupport;
import org.hibernate.dialect.sequence.SequenceSupport;
import org.hibernate.dialect.unique.CreateTableUniqueDelegate;
import org.hibernate.dialect.unique.UniqueDelegate;
import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo;
import org.hibernate.engine.jdbc.env.spi.IdentifierCaseStrategy;
import org.hibernate.engine.jdbc.env.spi.IdentifierHelper;
@ -80,7 +85,9 @@ import org.hibernate.sql.ast.spi.StandardSqlAstTranslatorFactory;
import org.hibernate.sql.ast.tree.Statement;
import org.hibernate.sql.exec.spi.JdbcOperation;
import org.hibernate.type.JavaObjectType;
import org.hibernate.type.descriptor.DateTimeUtils;
import org.hibernate.type.descriptor.java.PrimitiveByteArrayJavaType;
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
import org.hibernate.type.descriptor.jdbc.ArrayJdbcType;
import org.hibernate.type.descriptor.jdbc.BlobJdbcType;
import org.hibernate.type.descriptor.jdbc.ClobJdbcType;
@ -122,6 +129,7 @@ import static org.hibernate.type.SqlTypes.NCLOB;
import static org.hibernate.type.SqlTypes.NVARCHAR;
import static org.hibernate.type.SqlTypes.OTHER;
import static org.hibernate.type.SqlTypes.SQLXML;
import static org.hibernate.type.SqlTypes.STRUCT;
import static org.hibernate.type.SqlTypes.TIMESTAMP;
import static org.hibernate.type.SqlTypes.TIMESTAMP_UTC;
import static org.hibernate.type.SqlTypes.TIMESTAMP_WITH_TIMEZONE;
@ -145,6 +153,7 @@ public class PostgreSQLLegacyDialect extends Dialect {
private static final PostgreSQLIdentityColumnSupport IDENTITY_COLUMN_SUPPORT = new PostgreSQLIdentityColumnSupport();
private final PostgreSQLDriverKind driverKind;
private final UniqueDelegate uniqueDelegate = new CreateTableUniqueDelegate(this);
public PostgreSQLLegacyDialect() {
this( DatabaseVersion.make( 8, 0 ) );
@ -333,6 +342,12 @@ public class PostgreSQLLegacyDialect extends Dialect {
}
}
return jdbcType;
case STRUCT:
final AggregateJdbcType aggregateDescriptor = jdbcTypeRegistry.findAggregateDescriptor( columnTypeName );
if ( aggregateDescriptor != null ) {
return aggregateDescriptor;
}
break;
}
return jdbcTypeRegistry.getDescriptor( jdbcTypeCode );
}
@ -659,6 +674,11 @@ public class PostgreSQLLegacyDialect extends Dialect {
return getVersion().isSameOrAfter( 8, 2 );
}
@Override
public boolean supportsIfExistsBeforeTypeName() {
return getVersion().isSameOrAfter( 8, 2 );
}
@Override
public boolean supportsIfExistsBeforeConstraintName() {
return getVersion().isSameOrAfter( 9 );
@ -715,17 +735,12 @@ public class PostgreSQLLegacyDialect extends Dialect {
@Override
public String getForUpdateString(String aliases, LockOptions lockOptions) {
/*
* Parent's implementation for (aliases, lockOptions) ignores aliases.
*/
// parent's implementation for (aliases, lockOptions) ignores aliases
if ( aliases.isEmpty() ) {
LockMode lockMode = lockOptions.getLockMode();
final Iterator<Map.Entry<String, LockMode>> itr = lockOptions.getAliasLockIterator();
while ( itr.hasNext() ) {
for ( Map.Entry<String, LockMode> entry : lockOptions.getAliasSpecificLocks() ) {
// seek the highest lock mode
final Map.Entry<String, LockMode> entry = itr.next();
final LockMode lm = entry.getValue();
if ( lm.greaterThan( lockMode ) ) {
if ( entry.getValue().greaterThan(lockMode) ) {
aliases = entry.getKey();
}
}
@ -1003,6 +1018,11 @@ public class PostgreSQLLegacyDialect extends Dialect {
return false;
}
@Override
public boolean supportsTemporalLiteralOffset() {
return true;
}
@Override
public void appendDatetimeFormat(SqlAppender appender, String format) {
appender.appendSql( datetimeFormat( format ).result() );
@ -1039,6 +1059,11 @@ public class PostgreSQLLegacyDialect extends Dialect {
}
}
@Override
public AggregateSupport getAggregateSupport() {
return PostgreSQLAggregateSupport.valueOf( this );
}
@Override
public void appendBinaryLiteral(SqlAppender appender, byte[] bytes) {
appender.appendSql( "bytea '\\x" );
@ -1059,7 +1084,7 @@ public class PostgreSQLLegacyDialect extends Dialect {
appender.appendSql( '\'' );
break;
case TIME:
if ( temporalAccessor.isSupported( ChronoField.OFFSET_SECONDS ) ) {
if ( supportsTemporalLiteralOffset() && temporalAccessor.isSupported( ChronoField.OFFSET_SECONDS ) ) {
appender.appendSql( "time with time zone '" );
appendAsTime( appender, temporalAccessor, true, jdbcTimeZone );
}
@ -1070,9 +1095,16 @@ public class PostgreSQLLegacyDialect extends Dialect {
appender.appendSql( '\'' );
break;
case TIMESTAMP:
appender.appendSql( "timestamp with time zone '" );
appendAsTimestampWithMicros( appender, temporalAccessor, supportsTemporalLiteralOffset(), jdbcTimeZone );
appender.appendSql( '\'' );
if ( supportsTemporalLiteralOffset() && temporalAccessor.isSupported( ChronoField.OFFSET_SECONDS ) ) {
appender.appendSql( "timestamp with time zone '" );
appendAsTimestampWithMicros( appender, temporalAccessor, true, jdbcTimeZone );
appender.appendSql( '\'' );
}
else {
appender.appendSql( "timestamp '" );
appendAsTimestampWithMicros( appender, temporalAccessor, false, jdbcTimeZone );
appender.appendSql( '\'' );
}
break;
default:
throw new IllegalArgumentException();
@ -1218,6 +1250,11 @@ public class PostgreSQLLegacyDialect extends Dialect {
return getVersion().isSameOrAfter( 9, 3 );
}
@Override
public boolean supportsRecursiveCTE() {
return true;
}
@Override
public boolean supportsFetchClause(FetchClauseType type) {
switch ( type ) {
@ -1275,6 +1312,7 @@ public class PostgreSQLLegacyDialect extends Dialect {
if ( PostgreSQLPGObjectJdbcType.isUsable() ) {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLInetJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLIntervalSecondJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLStructJdbcType.INSTANCE );
}
if ( getVersion().isSameOrAfter( 8, 2 ) ) {
@ -1301,4 +1339,40 @@ public class PostgreSQLLegacyDialect extends Dialect {
)
);
}
@Override
public UniqueDelegate getUniqueDelegate() {
return uniqueDelegate;
}
/**
* @return {@code true}, but only because we can "batch" truncate
*/
@Override
public boolean canBatchTruncate() {
return true;
}
// disabled foreign key constraints still prevent 'truncate table'
// (these would help if we used 'delete' instead of 'truncate')
// @Override
// public String getDisableConstraintsStatement() {
// return "set constraints all deferred";
// }
//
// @Override
// public String getEnableConstraintsStatement() {
// return "set constraints all immediate";
// }
//
// @Override
// public String getDisableConstraintStatement(String tableName, String name) {
// return "alter table " + tableName + " alter constraint " + name + " deferrable";
// }
//
// @Override
// public String getEnableConstraintStatement(String tableName, String name) {
// return "alter table " + tableName + " alter constraint " + name + " deferrable";
// }
}

View File

@ -7,6 +7,7 @@
package org.hibernate.community.dialect;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
import org.hibernate.query.sqm.FetchClauseType;
import org.hibernate.sql.ast.spi.AbstractSqlAstTranslator;
import org.hibernate.sql.ast.tree.Statement;
@ -18,10 +19,12 @@ import org.hibernate.sql.ast.tree.expression.Literal;
import org.hibernate.sql.ast.tree.expression.Summarization;
import org.hibernate.sql.ast.tree.predicate.BooleanExpressionPredicate;
import org.hibernate.sql.ast.tree.predicate.LikePredicate;
import org.hibernate.sql.ast.tree.predicate.NullnessPredicate;
import org.hibernate.sql.ast.tree.select.QueryGroup;
import org.hibernate.sql.ast.tree.select.QueryPart;
import org.hibernate.sql.ast.tree.select.QuerySpec;
import org.hibernate.sql.exec.spi.JdbcOperation;
import org.hibernate.sql.model.internal.TableInsertStandard;
/**
* A SQL AST translator for PostgreSQL.
@ -34,6 +37,12 @@ public class PostgreSQLLegacySqlAstTranslator<T extends JdbcOperation> extends A
super( sessionFactory, statement );
}
@Override
protected void renderInsertIntoNoColumns(TableInsertStandard tableInsert) {
renderIntoIntoAndTable( tableInsert );
appendSql( "default values" );
}
@Override
protected void renderExpressionAsClauseItem(Expression expression) {
expression.accept( this );
@ -51,6 +60,26 @@ public class PostgreSQLLegacySqlAstTranslator<T extends JdbcOperation> extends A
}
}
@Override
public void visitNullnessPredicate(NullnessPredicate nullnessPredicate) {
final Expression expression = nullnessPredicate.getExpression();
final JdbcMappingContainer expressionType = expression.getExpressionType();
if ( isStruct( expressionType ) ) {
// Surprise, the null predicate checks if all components of the struct are null or not,
// rather than the column itself, so we have to use the distinct from predicate to implement this instead
expression.accept( this );
if ( nullnessPredicate.isNegated() ) {
appendSql( " is distinct from null" );
}
else {
appendSql( " is not distinct from null" );
}
}
else {
super.visitNullnessPredicate( nullnessPredicate );
}
}
@Override
protected void renderMaterializationHint(CteMaterialization materialization) {
if ( getDialect().getVersion().isSameOrAfter( 12 ) ) {

View File

@ -63,8 +63,11 @@ import org.hibernate.type.BasicType;
import org.hibernate.type.BasicTypeRegistry;
import org.hibernate.type.StandardBasicTypes;
import org.hibernate.type.descriptor.java.PrimitiveByteArrayJavaType;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.SmallIntJdbcType;
import org.hibernate.type.descriptor.jdbc.UUIDJdbcType;
import org.hibernate.type.descriptor.jdbc.XmlJdbcType;
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
import org.hibernate.type.descriptor.sql.internal.DdlTypeImpl;
import org.hibernate.type.descriptor.sql.spi.DdlTypeRegistry;
@ -95,6 +98,10 @@ import static org.hibernate.type.descriptor.DateTimeUtils.appendAsTimestampWithM
*/
public class SQLServerLegacyDialect extends AbstractTransactSQLDialect {
private static final int PARAM_LIST_SIZE_LIMIT = 2100;
// See microsoft.sql.Types.GEOMETRY
private static final int GEOMETRY_TYPE_CODE = -157;
// See microsoft.sql.Types.GEOGRAPHY
private static final int GEOGRAPHY_TYPE_CODE = -158;
private final StandardSequenceExporter exporter;
private final UniqueDelegate uniqueDelegate;
@ -198,6 +205,32 @@ public class SQLServerLegacyDialect extends AbstractTransactSQLDialect {
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( GEOGRAPHY, "geography", this ) );
}
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( SQLXML, "xml", this ) );
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( UUID, "uniqueidentifier", this ) );
}
@Override
public JdbcType resolveSqlTypeDescriptor(
String columnTypeName,
int jdbcTypeCode,
int precision,
int scale,
JdbcTypeRegistry jdbcTypeRegistry) {
switch ( jdbcTypeCode ) {
case OTHER:
switch ( columnTypeName ) {
case "uniqueidentifier":
jdbcTypeCode = UUID;
break;
}
break;
case GEOMETRY_TYPE_CODE:
jdbcTypeCode = GEOMETRY;
break;
case GEOGRAPHY_TYPE_CODE:
jdbcTypeCode = GEOGRAPHY;
break;
}
return super.resolveSqlTypeDescriptor( columnTypeName, jdbcTypeCode, precision, scale, jdbcTypeRegistry );
}
@Override
@ -237,6 +270,7 @@ public class SQLServerLegacyDialect extends AbstractTransactSQLDialect {
SmallIntJdbcType.INSTANCE
);
typeContributions.contributeJdbcType( XmlJdbcType.INSTANCE );
typeContributions.contributeJdbcType( UUIDJdbcType.INSTANCE );
}
@Override
@ -267,6 +301,8 @@ public class SQLServerLegacyDialect extends AbstractTransactSQLDialect {
// AVG by default uses the input type, so we possibly need to cast the argument type, hence a special function
functionFactory.avg_castingNonDoubleArguments( this, SqlAstNodeRenderingMode.DEFAULT );
functionFactory.log_log();
functionFactory.truncate_round();
functionFactory.everyAny_minMaxIif();
functionFactory.octetLength_pattern( "datalength(?1)" );
@ -847,6 +883,13 @@ public class SQLServerLegacyDialect extends AbstractTransactSQLDialect {
PrimitiveByteArrayJavaType.INSTANCE.appendString( appender, bytes );
}
@Override
public void appendUUIDLiteral(SqlAppender appender, java.util.UUID literal) {
appender.appendSql( "cast('" );
appender.appendSql( literal.toString() );
appender.appendSql( "' as uniqueidentifier)" );
}
@Override
public void appendDateTimeLiteral(
SqlAppender appender,
@ -867,12 +910,14 @@ public class SQLServerLegacyDialect extends AbstractTransactSQLDialect {
break;
case TIMESTAMP:
appender.appendSql( "cast('" );
appendAsTimestampWithMicros( appender, temporalAccessor, supportsTemporalLiteralOffset(), jdbcTimeZone );
//needed because the {ts ... } JDBC escape chokes on microseconds
if ( supportsTemporalLiteralOffset() && temporalAccessor.isSupported( ChronoField.OFFSET_SECONDS ) ) {
appendAsTimestampWithMicros( appender, temporalAccessor, true, jdbcTimeZone );
appender.appendSql( "' as datetimeoffset)" );
}
else {
appendAsTimestampWithMicros( appender, temporalAccessor, false, jdbcTimeZone );
appender.appendSql( "' as datetime2)" );
}
break;
@ -956,24 +1001,6 @@ public class SQLServerLegacyDialect extends AbstractTransactSQLDialect {
return super.getDropSchemaCommand( schemaName );
}
@Override
public NameQualifierSupport getNameQualifierSupport() {
return NameQualifierSupport.BOTH;
}
public Exporter<Sequence> getSequenceExporter() {
if ( exporter == null ) {
return super.getSequenceExporter();
}
return exporter;
}
@Override
public UniqueDelegate getUniqueDelegate() {
return uniqueDelegate;
}
@Override
public String getCreateIndexString(boolean unique) {
// we only create unique indexes, as opposed to unique constraints,
@ -999,6 +1026,24 @@ public class SQLServerLegacyDialect extends AbstractTransactSQLDialect {
}
}
@Override
public NameQualifierSupport getNameQualifierSupport() {
return NameQualifierSupport.BOTH;
}
@Override
public UniqueDelegate getUniqueDelegate() {
return uniqueDelegate;
}
@Override
public Exporter<Sequence> getSequenceExporter() {
if ( exporter == null ) {
return super.getSequenceExporter();
}
return exporter;
}
private static class SqlServerSequenceExporter extends StandardSequenceExporter {
public SqlServerSequenceExporter(Dialect dialect) {
@ -1029,4 +1074,17 @@ public class SQLServerLegacyDialect extends AbstractTransactSQLDialect {
public boolean hasDataTypeBeforeGeneratedAs() {
return false;
}
// disabled foreign key constraints still prevent 'truncate table'
// (these would help if we used 'delete' instead of 'truncate')
// @Override
// public String getDisableConstraintStatement(String tableName, String name) {
// return "alter table " + tableName + " nocheck constraint " + name;
// }
//
// @Override
// public String getEnableConstraintStatement(String tableName, String name) {
// return "alter table " + tableName + " with check check constraint " + name;
// }
}

View File

@ -55,6 +55,7 @@ import org.hibernate.type.BasicType;
import org.hibernate.type.BasicTypeRegistry;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.StandardBasicTypes;
import org.hibernate.type.descriptor.DateTimeUtils;
import org.hibernate.type.descriptor.jdbc.BlobJdbcType;
import org.hibernate.type.descriptor.jdbc.ClobJdbcType;
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
@ -82,7 +83,8 @@ import static org.hibernate.type.SqlTypes.TIME_WITH_TIMEZONE;
import static org.hibernate.type.SqlTypes.VARBINARY;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsDate;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsTime;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsTimestampWithMicros;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsTimestampWithMillis;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsTimestampWithNanos;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsTimestampWithMillis;
/**
@ -664,7 +666,7 @@ public class SQLiteDialect extends Dialect {
break;
case TIMESTAMP:
appender.appendSql( "datetime(" );
appendAsTimestampWithMicros( appender, temporalAccessor, supportsTemporalLiteralOffset(), jdbcTimeZone );
appendAsTimestampWithNanos( appender, temporalAccessor, supportsTemporalLiteralOffset(), jdbcTimeZone );
appender.appendSql( ')' );
break;
default:
@ -687,7 +689,7 @@ public class SQLiteDialect extends Dialect {
break;
case TIMESTAMP:
appender.appendSql( "datetime(" );
appendAsTimestampWithMicros( appender, date, jdbcTimeZone );
appendAsTimestampWithNanos( appender, date, jdbcTimeZone );
appender.appendSql( ')' );
break;
default:

View File

@ -631,6 +631,20 @@ public class SybaseASELegacyDialect extends SybaseLegacyDialect {
: sql + new ForUpdateFragment( this, aliasedLockOptions, keyColumnNames ).toFragmentString();
}
@Override
public String toQuotedIdentifier(String name) {
if ( name == null || name.isEmpty() ) {
return name;
}
if ( name.charAt( 0 ) == '#' ) {
// Temporary tables must start with a '#' character,
// but Sybase doesn't support quoting of such identifiers,
// so we simply don't apply quoting in this case
return name;
}
return super.toQuotedIdentifier( name );
}
@Override
public ViolatedConstraintNameExtractor getViolatedConstraintNameExtractor() {
return EXTRACTOR;

View File

@ -17,11 +17,11 @@ import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
import org.hibernate.sql.ast.spi.AbstractSqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlSelection;
import org.hibernate.sql.ast.tree.Statement;
import org.hibernate.sql.ast.tree.cte.CteStatement;
import org.hibernate.sql.ast.tree.expression.BinaryArithmeticExpression;
import org.hibernate.sql.ast.tree.expression.CaseSearchedExpression;
import org.hibernate.sql.ast.tree.expression.CaseSimpleExpression;
import org.hibernate.sql.ast.tree.expression.ColumnReference;
import org.hibernate.sql.ast.tree.expression.AggregateColumnWriteExpression;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.expression.Literal;
import org.hibernate.sql.ast.tree.expression.QueryLiteral;
@ -369,6 +369,33 @@ public class SybaseASELegacySqlAstTranslator<T extends JdbcOperation> extends Ab
}
}
@Override
public void visitAggregateColumnWriteExpression(AggregateColumnWriteExpression aggregateColumnWriteExpression) {
final String dmlTargetTableAlias = getDmlTargetTableAlias();
final ColumnReference columnReference = aggregateColumnWriteExpression.getColumnReference();
if ( dmlTargetTableAlias != null && dmlTargetTableAlias.equals( columnReference.getQualifier() ) ) {
// Sybase needs a table name prefix
// but not if this is a restricted union table reference subquery
final QuerySpec currentQuerySpec = (QuerySpec) getQueryPartStack().getCurrent();
final List<TableGroup> roots;
if ( currentQuerySpec != null && !currentQuerySpec.isRoot()
&& (roots = currentQuerySpec.getFromClause().getRoots()).size() == 1
&& roots.get( 0 ).getPrimaryTableReference() instanceof UnionTableReference ) {
aggregateColumnWriteExpression.appendWriteExpression( this, this );
}
else {
aggregateColumnWriteExpression.appendWriteExpression(
this,
this,
getCurrentDmlStatement().getTargetTable().getTableExpression()
);
}
}
else {
aggregateColumnWriteExpression.appendWriteExpression( this, this );
}
}
@Override
protected boolean needsRowsToSkip() {
return true;

View File

@ -26,6 +26,8 @@ import org.hibernate.engine.jdbc.env.spi.IdentifierHelperBuilder;
import org.hibernate.engine.jdbc.env.spi.NameQualifierSupport;
import org.hibernate.engine.spi.LoadQueryInfluencers;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.procedure.internal.JTDSCallableStatementSupport;
import org.hibernate.procedure.spi.CallableStatementSupport;
import org.hibernate.query.spi.QueryEngine;
import org.hibernate.query.spi.QueryOptions;
import org.hibernate.query.spi.QueryParameterBindings;
@ -208,6 +210,13 @@ public class SybaseLegacyDialect extends AbstractTransactSQLDialect {
CommonFunctionFactory functionFactory = new CommonFunctionFactory(queryEngine);
functionFactory.stddev();
functionFactory.variance();
functionFactory.stddevPopSamp_stdevp();
functionFactory.varPopSamp_varp();
functionFactory.stddevPopSamp();
functionFactory.varPopSamp();
// For SQL-Server we need to cast certain arguments to varchar(16384) to be able to concat them
queryEngine.getSqmFunctionRegistry().register(
"count",
@ -345,4 +354,9 @@ public class SybaseLegacyDialect extends AbstractTransactSQLDialect {
public UniqueDelegate getUniqueDelegate() {
return uniqueDelegate;
}
@Override
public CallableStatementSupport getCallableStatementSupport() {
return jtdsDriver ? JTDSCallableStatementSupport.INSTANCE : super.getCallableStatementSupport();
}
}

View File

@ -0,0 +1,47 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.annotations;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.ElementType.TYPE;
/**
* Specifies the UDT (user defined type) name for the annotated embeddable or embedded.
*
* <pre>
* Example:
*
* {@code @Embeddable}
* {@code Struct(name = "CUST")}
* public class Customer { ... }
* </pre>
*
* <pre>
* Example:
*
* public class Order {
* {@code Embedded}
* {@code Struct(name = "CUST")}
* private Customer customer;
* }
* </pre>
*
* @since 6.2
*/
@Target({TYPE, FIELD, METHOD})
@Retention( RetentionPolicy.RUNTIME )
public @interface Struct {
/**
* The name of the UDT (user defined type).
*/
String name();
}

View File

@ -11,6 +11,7 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
@ -62,6 +63,7 @@ import org.hibernate.boot.spi.InFlightMetadataCollector;
import org.hibernate.boot.spi.MetadataBuildingContext;
import org.hibernate.boot.spi.MetadataBuildingOptions;
import org.hibernate.boot.spi.NaturalIdUniqueKeyBinder;
import org.hibernate.cfg.AggregateComponentSecondPass;
import org.hibernate.cfg.AnnotatedClassType;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.cfg.CreateKeySecondPass;
@ -76,6 +78,7 @@ import org.hibernate.cfg.SecondaryTableFromAnnotationSecondPass;
import org.hibernate.cfg.SecondaryTableSecondPass;
import org.hibernate.cfg.SetBasicValueTypeSecondPass;
import org.hibernate.cfg.UniqueConstraintHolder;
import org.hibernate.cfg.annotations.BasicValueBinder;
import org.hibernate.cfg.annotations.NamedEntityGraphDefinition;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.spi.FilterDefinition;
@ -83,6 +86,7 @@ import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.internal.CoreLogging;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.mapping.BasicValue;
import org.hibernate.mapping.Collection;
import org.hibernate.mapping.Column;
import org.hibernate.mapping.Component;
@ -100,6 +104,7 @@ import org.hibernate.mapping.RootClass;
import org.hibernate.mapping.SimpleValue;
import org.hibernate.mapping.Table;
import org.hibernate.mapping.UniqueKey;
import org.hibernate.mapping.Value;
import org.hibernate.metamodel.CollectionClassification;
import org.hibernate.metamodel.spi.EmbeddableInstantiator;
import org.hibernate.query.named.NamedObjectRepository;
@ -1634,6 +1639,7 @@ public class InFlightMetadataCollectorImpl implements InFlightMetadataCollector,
private ArrayList<IdGeneratorResolverSecondPass> idGeneratorResolverSecondPassList;
private ArrayList<SetBasicValueTypeSecondPass> setBasicValueTypeSecondPassList;
private ArrayList<AggregateComponentSecondPass> aggregateComponentSecondPassList;
private ArrayList<FkSecondPass> fkSecondPassList;
private ArrayList<CreateKeySecondPass> createKeySecondPasList;
private ArrayList<SecondaryTableSecondPass> secondaryTableSecondPassList;
@ -1656,6 +1662,9 @@ public class InFlightMetadataCollectorImpl implements InFlightMetadataCollector,
else if ( secondPass instanceof SetBasicValueTypeSecondPass ) {
addSetBasicValueTypeSecondPass( (SetBasicValueTypeSecondPass) secondPass, onTopOfTheQueue );
}
else if ( secondPass instanceof AggregateComponentSecondPass ) {
addAggregateComponentSecondPass( (AggregateComponentSecondPass) secondPass, onTopOfTheQueue );
}
else if ( secondPass instanceof FkSecondPass ) {
addFkSecondPass( (FkSecondPass) secondPass, onTopOfTheQueue );
}
@ -1702,6 +1711,13 @@ public class InFlightMetadataCollectorImpl implements InFlightMetadataCollector,
addSecondPass( secondPass, setBasicValueTypeSecondPassList, onTopOfTheQueue );
}
private void addAggregateComponentSecondPass(AggregateComponentSecondPass secondPass, boolean onTopOfTheQueue) {
if ( aggregateComponentSecondPassList == null ) {
aggregateComponentSecondPassList = new ArrayList<>();
}
addSecondPass( secondPass, aggregateComponentSecondPassList, onTopOfTheQueue );
}
private void addIdGeneratorResolverSecondPass(IdGeneratorResolverSecondPass secondPass, boolean onTopOfTheQueue) {
if ( idGeneratorResolverSecondPassList == null ) {
idGeneratorResolverSecondPassList = new ArrayList<>();
@ -1766,6 +1782,7 @@ public class InFlightMetadataCollectorImpl implements InFlightMetadataCollector,
processSecondPasses( idGeneratorResolverSecondPassList );
processSecondPasses( implicitColumnNamingSecondPassList );
processSecondPasses( setBasicValueTypeSecondPassList );
processSecondPasses( aggregateComponentSecondPassList );
composites.forEach( Component::sortProperties );

View File

@ -15,6 +15,7 @@ import java.util.Objects;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicInteger;
import org.hibernate.boot.model.process.internal.InferredBasicValueResolver;
import org.hibernate.boot.model.process.internal.UserTypeResolution;
import org.hibernate.boot.registry.StandardServiceRegistry;
import org.hibernate.boot.spi.BootstrapContext;
@ -223,7 +224,11 @@ public class TypeDefinition implements Serializable {
.getJavaTypeRegistry()
.resolveDescriptor( typeImplementorClass );
final JdbcType jdbcType = typeConfiguration.getJdbcTypeRegistry().getDescriptor( Types.VARBINARY );
final BasicType<Serializable> resolved = typeConfiguration.getBasicTypeRegistry().resolve( jtd, jdbcType );
final BasicType<Serializable> resolved = InferredBasicValueResolver.resolveSqlTypeIndicators(
indicators,
typeConfiguration.getBasicTypeRegistry().resolve( jtd, jdbcType ),
jtd
);
@SuppressWarnings({"rawtypes", "unchecked"})
final SerializableType legacyType = new SerializableType( typeImplementorClass );

View File

@ -62,4 +62,11 @@ public interface PhysicalNamingStrategy {
* Determine the physical column name from the given logical name
*/
Identifier toPhysicalColumnName(Identifier logicalName, JdbcEnvironment jdbcEnvironment);
/**
* Determine the physical UDT type name from the given logical name
*/
default Identifier toPhysicalTypeName(Identifier logicalName, JdbcEnvironment jdbcEnvironment) {
return toPhysicalTableName( logicalName, jdbcEnvironment );
}
}

View File

@ -63,7 +63,6 @@ public class InferredBasicValueResolver {
// makes the code a little more complicated but the benefit is well worth it - saving memory
final BasicType<T> jdbcMapping;
final BasicType<T> legacyType;
if ( explicitJavaType != null ) {
// we have an explicit JavaType
@ -93,33 +92,20 @@ public class InferredBasicValueResolver {
explicitJavaType,
explicitJdbcType
);
legacyType = jdbcMapping;
}
else {
// we need to infer the JdbcType and use that to build the value-mapping
final JdbcType inferredJdbcType = explicitJavaType.getRecommendedJdbcType( stdIndicators );
if ( inferredJdbcType instanceof ObjectJdbcType ) {
// have a "fallback" JDBC type... see if we can decide a better choice
if ( explicitJavaType instanceof SerializableJavaType
|| explicitJavaType.getJavaType() instanceof Serializable ) {
legacyType = new SerializableType( explicitJavaType );
jdbcMapping = legacyType;
}
else {
jdbcMapping = typeConfiguration.getBasicTypeRegistry().resolve(
explicitJavaType,
inferredJdbcType
);
legacyType = jdbcMapping;
}
if ( inferredJdbcType instanceof ObjectJdbcType && ( explicitJavaType instanceof SerializableJavaType
|| explicitJavaType.getJavaType() instanceof Serializable ) ) {
// Use the SerializableType if possible since ObjectJdbcType is our fallback
jdbcMapping = new SerializableType( explicitJavaType );
}
else {
jdbcMapping = typeConfiguration.getBasicTypeRegistry().resolve(
explicitJavaType,
inferredJdbcType
);
legacyType = jdbcMapping;
}
}
}
@ -153,8 +139,6 @@ public class InferredBasicValueResolver {
reflectedJtd,
explicitJdbcType
);
legacyType = jdbcMapping;
}
else {
// see if there is a registered BasicType for this JavaType and, if so, use it.
@ -213,8 +197,7 @@ public class InferredBasicValueResolver {
if ( registeredType != null ) {
// so here is the legacy resolution
legacyType = resolveSqlTypeIndicators( stdIndicators, registeredType, reflectedJtd );
jdbcMapping = legacyType;
jdbcMapping = resolveSqlTypeIndicators( stdIndicators, registeredType, reflectedJtd );
}
else {
// there was not a "legacy" BasicType registration, so use `JavaType#getRecommendedJdbcType`, if
@ -225,16 +208,13 @@ public class InferredBasicValueResolver {
reflectedJtd,
recommendedJdbcType
);
legacyType = jdbcMapping;
}
else if ( reflectedJtd instanceof SerializableJavaType
|| Serializable.class.isAssignableFrom( reflectedJtd.getJavaTypeClass() ) ) {
legacyType = new SerializableType( reflectedJtd );
jdbcMapping = legacyType;
jdbcMapping = new SerializableType( reflectedJtd );
}
else {
// let this fall through to the exception creation below
legacyType = null;
jdbcMapping = null;
}
}
@ -262,7 +242,7 @@ public class InferredBasicValueResolver {
}
}
final BasicJavaType<T> recommendedJtd = explicitJdbcType.getJdbcRecommendedJavaTypeMapping(
final JavaType<T> recommendedJtd = explicitJdbcType.getJdbcRecommendedJavaTypeMapping(
length,
scale,
typeConfiguration
@ -276,7 +256,6 @@ public class InferredBasicValueResolver {
),
recommendedJtd
);
legacyType = jdbcMapping;
}
else {
// we have neither a JTD nor STD
@ -305,46 +284,11 @@ public class InferredBasicValueResolver {
jdbcMapping.getJavaTypeDescriptor(),
jdbcMapping.getJavaTypeDescriptor(),
jdbcMapping.getJdbcType(),
legacyType,
jdbcMapping,
null
);
}
// /**
// * Create an inference-based resolution
// */
// public static <T> BasicValue.Resolution<T> from(
// Function<TypeConfiguration, BasicJavaType> explicitJavaTypeAccess,
// Function<TypeConfiguration, JdbcType> explicitSqlTypeAccess,
// Type resolvedJavaType,
// Supplier<JavaType<T>> reflectedJtdResolver,
// JdbcTypeIndicators stdIndicators,
// Table table,
// Selectable selectable,
// String ownerName,
// String propertyName,
// TypeConfiguration typeConfiguration) {
//
// final BasicJavaType<T> explicitJavaType = explicitJavaTypeAccess != null
// ? explicitJavaTypeAccess.apply( typeConfiguration )
// : null;
// final JdbcType explicitJdbcType = explicitSqlTypeAccess
// != null ? explicitSqlTypeAccess.apply( typeConfiguration )
// : null;
// return InferredBasicValueResolver.from(
// explicitJavaType,
// explicitJdbcType,
// resolvedJavaType,
// reflectedJtdResolver,
// stdIndicators,
// table,
// selectable,
// ownerName,
// propertyName,
// typeConfiguration
// );
// }
public static <T> BasicType<T> resolveSqlTypeIndicators(
JdbcTypeIndicators stdIndicators,
BasicType<T> resolved,

View File

@ -10,6 +10,7 @@ import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
@ -33,7 +34,7 @@ public class Database {
private final TypeConfiguration typeConfiguration;
private final JdbcEnvironment jdbcEnvironment;
private final Map<Namespace.Name,Namespace> namespaceMap = new TreeMap<>();
private final Map<String,AuxiliaryDatabaseObject> auxiliaryDatabaseObjects = new HashMap<>();
private final Map<String,AuxiliaryDatabaseObject> auxiliaryDatabaseObjects = new LinkedHashMap<>();
private final ServiceRegistry serviceRegistry;
private final PhysicalNamingStrategy physicalNamingStrategy;

View File

@ -6,9 +6,18 @@
*/
package org.hibernate.boot.model.relational;
import java.sql.Types;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.TreeMap;
import java.util.function.Function;
@ -18,8 +27,11 @@ import org.hibernate.boot.model.naming.PhysicalNamingStrategy;
import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
import org.hibernate.internal.CoreLogging;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.mapping.AggregateColumn;
import org.hibernate.mapping.Column;
import org.hibernate.mapping.DenormalizedTable;
import org.hibernate.mapping.Table;
import org.hibernate.mapping.UserDefinedType;
/**
* Represents a namespace (named schema/catalog pair) with a Database and manages objects defined within.
@ -36,6 +48,7 @@ public class Namespace {
private final Map<Identifier, Table> tables = new TreeMap<>();
private final Map<Identifier, Sequence> sequences = new TreeMap<>();
private final Map<Identifier, UserDefinedType> udts = new HashMap<>();
public Namespace(PhysicalNamingStrategy physicalNamingStrategy, JdbcEnvironment jdbcEnvironment, Name name) {
this.physicalNamingStrategy = physicalNamingStrategy;
@ -132,6 +145,83 @@ public class Namespace {
return sequence;
}
public Collection<UserDefinedType> getUserDefinedTypes() {
return udts.values();
}
public List<UserDefinedType> getDependencyOrderedUserDefinedTypes() {
final var orderedUdts = new LinkedHashMap<Identifier, UserDefinedType>( udts.size() );
final var udtDependencies = new HashMap<Identifier, Set<Identifier>>( udts.size() );
for ( var entry : udts.entrySet() ) {
final var dependencies = new HashSet<Identifier>();
final UserDefinedType udt = entry.getValue();
for ( Column udtColumn : udt.getColumns() ) {
if ( udtColumn.getSqlTypeCode() == Types.STRUCT ) {
final String structName = ( (AggregateColumn) udtColumn ).getComponent().getStructName();
dependencies.add( Identifier.toIdentifier( structName ) );
}
}
if ( dependencies.isEmpty() ) {
// The UDTs without dependencies are added directly
orderedUdts.put( udt.getNameIdentifier(), udt );
}
else {
// For the rest we record the direct dependencies
udtDependencies.put( entry.getKey(), dependencies );
}
}
// Traverse the dependency sets
while ( !udtDependencies.isEmpty() ) {
for ( final var iterator = udtDependencies.entrySet().iterator(); iterator.hasNext(); ) {
final var entry = iterator.next();
final Set<Identifier> dependencies = entry.getValue();
// Remove the already ordered UDTs from the dependencies
dependencies.removeAll( orderedUdts.keySet() );
// If the dependencies have become empty
if ( dependencies.isEmpty() ) {
// the UDT can be inserted
orderedUdts.put( entry.getKey(), udts.get( entry.getKey() ) );
iterator.remove();
}
}
}
return new ArrayList<>( orderedUdts.values() );
}
/**
* Returns the UDT with the specified logical UDT name.
*
* @param logicalTypeName - the logical name of the UDT
*
* @return the table with the specified UDT name,
* or null if there is no table with the specified
* UDT name.
*/
public UserDefinedType locateUserDefinedType(Identifier logicalTypeName) {
return udts.get( logicalTypeName );
}
/**
* Creates a mapping UDT instance.
*
* @param logicalTypeName The logical UDT name
*
* @return the created UDT.
*/
public UserDefinedType createUserDefinedType(Identifier logicalTypeName, Function<Identifier,UserDefinedType> creator) {
final UserDefinedType existing = udts.get( logicalTypeName );
if ( existing != null ) {
return existing;
}
final Identifier physicalTableName = physicalNamingStrategy.toPhysicalTypeName( logicalTypeName, jdbcEnvironment );
final UserDefinedType type = creator.apply( physicalTableName );
udts.put( logicalTypeName, type );
return type;
}
@Override
public String toString() {
return "Schema" + "{name=" + name + '}';

View File

@ -0,0 +1,42 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.boot.model.relational;
import org.hibernate.boot.model.naming.Identifier;
/**
* @author Christian Beikov
*/
public class QualifiedTypeName extends QualifiedNameImpl {
public QualifiedTypeName(Identifier catalogName, Identifier schemaName, Identifier tableName) {
super( catalogName, schemaName, tableName );
}
public QualifiedTypeName(Namespace.Name schemaName, Identifier tableName) {
super( schemaName, tableName );
}
public Identifier getTypeName() {
return getObjectName();
}
public QualifiedTypeName quote() {
Identifier catalogName = getCatalogName();
if ( catalogName != null ) {
catalogName = new Identifier( catalogName.getText(), true );
}
Identifier schemaName = getSchemaName();
if ( schemaName != null ) {
schemaName = new Identifier( schemaName.getText(), true );
}
Identifier tableName = getTypeName();
if ( tableName != null ) {
tableName = new Identifier( tableName.getText(), true );
}
return new QualifiedTypeName( catalogName, schemaName, tableName );
}
}

View File

@ -0,0 +1,159 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.cfg;
import java.util.List;
import org.hibernate.AnnotationException;
import org.hibernate.annotations.JdbcTypeCode;
import org.hibernate.annotations.Struct;
import org.hibernate.annotations.common.reflection.XClass;
import org.hibernate.annotations.common.reflection.XProperty;
import org.hibernate.boot.spi.InFlightMetadataCollector;
import org.hibernate.boot.spi.MetadataBuildingContext;
import org.hibernate.cfg.annotations.BasicValueBinder;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.mapping.AggregateColumn;
import org.hibernate.mapping.BasicValue;
import org.hibernate.mapping.Component;
import org.hibernate.mapping.Property;
import org.hibernate.mapping.Value;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.java.spi.EmbeddableAggregateJavaType;
import org.hibernate.type.spi.TypeConfiguration;
/**
* Processes aggregate component annotations from Java classes and produces the Hibernate configuration-time metamodel,
* that is, the objects defined in the package {@link org.hibernate.mapping}.
*/
public final class AggregateComponentBinder {
private AggregateComponentBinder() {}
public static void processAggregate(
Component component,
PropertyHolder propertyHolder,
PropertyData inferredData,
XClass returnedClassOrElement,
AnnotatedColumns columns,
MetadataBuildingContext context) {
if ( isAggregate( inferredData.getProperty(), inferredData.getClassOrElement() ) ) {
validateComponent( component, BinderHelper.getPath( propertyHolder, inferredData ) );
final InFlightMetadataCollector metadataCollector = context.getMetadataCollector();
final TypeConfiguration typeConfiguration = metadataCollector.getTypeConfiguration();
// Determine a struct name if this is a struct through some means
final String structName = determineStructName( columns, inferredData, returnedClassOrElement );
// We must register a special JavaType for the embeddable which can provide a recommended JdbcType
typeConfiguration.getJavaTypeRegistry().resolveDescriptor(
component.getComponentClass(),
() -> new EmbeddableAggregateJavaType<>( component.getComponentClass(), structName )
);
component.setStructName( structName );
// Determine the aggregate column
BasicValueBinder basicValueBinder = new BasicValueBinder( BasicValueBinder.Kind.ATTRIBUTE, component, context );
basicValueBinder.setPropertyName( inferredData.getPropertyName() );
basicValueBinder.setReturnedClassName( inferredData.getClassOrElementName() );
basicValueBinder.setColumns( columns );
basicValueBinder.setPersistentClassName( propertyHolder.getClassName() );
basicValueBinder.setType(
inferredData.getProperty(),
inferredData.getPropertyClass(),
inferredData.getDeclaringClass().getName(),
null
);
final BasicValue propertyValue = basicValueBinder.make();
final AggregateColumn aggregateColumn = (AggregateColumn) propertyValue.getColumn();
aggregateColumn.setSqlType( structName );
if ( structName != null ) {
aggregateColumn.setSqlTypeCode( SqlTypes.STRUCT );
}
component.setAggregateColumn( aggregateColumn );
context.getMetadataCollector().addSecondPass(
new AggregateComponentSecondPass(
propertyHolder,
component,
returnedClassOrElement,
context
)
);
}
}
private static void validateComponent(Component component, String basePath) {
for ( Property property : component.getProperties() ) {
final Value value = property.getValue();
if ( !( value instanceof BasicValue ) && !( value instanceof Component ) ) {
// todo: see HHH-15831
throw new AnnotationException(
"Property '" + StringHelper.qualify( basePath, property.getName() )
+ "' uses not yet supported mapping type '"
+ value.getClass().getName()
+ "' in component class '"
+ component.getComponentClassName()
+ "'. Aggregate components currently may only contain basic values and components of basic values."
);
}
if ( value instanceof Component ) {
final Component c = (Component) value;
if ( c.getAggregateColumn() == null ) {
validateComponent( c, StringHelper.qualify( basePath, property.getName() ) );
}
}
}
}
private static String determineStructName(
AnnotatedColumns columns,
PropertyData inferredData,
XClass returnedClassOrElement) {
final XProperty property = inferredData.getProperty();
if ( property != null ) {
final Struct struct = property.getAnnotation( Struct.class );
if ( struct != null ) {
return struct.name();
}
final JdbcTypeCode jdbcTypeCode = property.getAnnotation( JdbcTypeCode.class );
if ( jdbcTypeCode != null && jdbcTypeCode.value() == SqlTypes.STRUCT && columns != null ) {
final List<AnnotatedColumn> columnList = columns.getColumns();
if ( columnList.size() == 1 && columnList.get( 0 ).getSqlType() != null ) {
return columnList.get( 0 ).getSqlType();
}
}
}
final Struct struct = returnedClassOrElement.getAnnotation( Struct.class );
if ( struct != null ) {
return struct.name();
}
return null;
}
private static boolean isAggregate(XProperty property, XClass returnedClass) {
if ( property != null ) {
final Struct struct = property.getAnnotation( Struct.class );
if ( struct != null ) {
return true;
}
final JdbcTypeCode jdbcTypeCode = property.getAnnotation( JdbcTypeCode.class );
if ( jdbcTypeCode != null ) {
switch ( jdbcTypeCode.value() ) {
case SqlTypes.STRUCT:
case SqlTypes.JSON:
case SqlTypes.SQLXML:
return true;
}
}
}
if ( returnedClass != null ) {
return returnedClass.isAnnotationPresent( Struct.class );
}
return false;
}
}

View File

@ -0,0 +1,281 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.cfg;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.TreeSet;
import org.hibernate.AnnotationException;
import org.hibernate.MappingException;
import org.hibernate.annotations.Comment;
import org.hibernate.annotations.common.reflection.XClass;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.boot.model.relational.Database;
import org.hibernate.boot.model.relational.Namespace;
import org.hibernate.boot.spi.InFlightMetadataCollector;
import org.hibernate.boot.spi.MetadataBuildingContext;
import org.hibernate.dialect.Dialect;
import org.hibernate.dialect.aggregate.AggregateSupport;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.mapping.AggregateColumn;
import org.hibernate.mapping.BasicValue;
import org.hibernate.mapping.Column;
import org.hibernate.mapping.Component;
import org.hibernate.mapping.PersistentClass;
import org.hibernate.mapping.Property;
import org.hibernate.mapping.UserDefinedType;
import org.hibernate.mapping.Value;
import org.hibernate.sql.Template;
import org.hibernate.type.BasicPluralType;
import org.hibernate.type.BasicType;
import org.hibernate.type.spi.TypeConfiguration;
/**
* @author Christian Beikov
*/
public class AggregateComponentSecondPass implements SecondPass {
private final PropertyHolder propertyHolder;
private final Component component;
private final XClass returnedClassOrElement;
private final MetadataBuildingContext context;
public AggregateComponentSecondPass(
PropertyHolder propertyHolder,
Component component,
XClass returnedClassOrElement,
MetadataBuildingContext context) {
this.propertyHolder = propertyHolder;
this.component = component;
this.returnedClassOrElement = returnedClassOrElement;
this.context = context;
}
public void doSecondPass(Map<String, PersistentClass> persistentClasses) throws MappingException {
final InFlightMetadataCollector metadataCollector = context.getMetadataCollector();
final TypeConfiguration typeConfiguration = metadataCollector.getTypeConfiguration();
final Database database = metadataCollector.getDatabase();
final Dialect dialect = database.getDialect();
final AggregateSupport aggregateSupport = dialect.getAggregateSupport();
// Compute aggregated columns since we have to replace them in the table with the aggregate column
final List<Column> aggregatedColumns = component.getAggregatedColumns();
final AggregateColumn aggregateColumn = component.getAggregateColumn();
ensureInitialized( metadataCollector, typeConfiguration, dialect, aggregateColumn );
validateSupportedColumnTypes( propertyHolder.getPath(), component );
for ( org.hibernate.mapping.Column aggregatedColumn : aggregatedColumns ) {
// Make sure this state is initialized
aggregatedColumn.getSqlTypeCode( metadataCollector );
aggregatedColumn.getSqlType( typeConfiguration, dialect, metadataCollector );
}
final String structName = component.getStructName();
final boolean addAuxiliaryObjects;
if ( structName != null ) {
final Namespace defaultNamespace = database.getDefaultNamespace();
final Identifier udtName = Identifier.toIdentifier( structName );
final UserDefinedType udt = new UserDefinedType( "orm", defaultNamespace, udtName );
final Comment comment = returnedClassOrElement.getAnnotation( Comment.class );
if ( comment != null ) {
udt.setComment( comment.value() );
}
for ( org.hibernate.mapping.Column aggregatedColumn : aggregatedColumns ) {
// Clone the column, since the column name will be changed later on,
// but we don't want the DDL to be affected by that
udt.addColumn( aggregatedColumn );
}
final UserDefinedType registeredUdt = defaultNamespace.createUserDefinedType(
udtName,
name -> udt
);
addAuxiliaryObjects = registeredUdt == udt;
if ( registeredUdt != udt ) {
validateEqual( registeredUdt, udt );
}
}
else {
addAuxiliaryObjects = true;
}
final String aggregateReadTemplate = aggregateColumn.getAggregateReadExpressionTemplate( dialect );
final String aggregateReadExpression = aggregateReadTemplate.replace(
Template.TEMPLATE + ".",
""
);
final String aggregateAssignmentExpression = aggregateColumn.getAggregateAssignmentExpressionTemplate( dialect )
.replace( Template.TEMPLATE + ".", "" );
if ( addAuxiliaryObjects ) {
aggregateSupport.aggregateAuxiliaryDatabaseObjects(
database.getDefaultNamespace(),
aggregateReadExpression,
aggregateColumn,
aggregatedColumns
).forEach( database::addAuxiliaryDatabaseObject );
}
// Hook for the dialect for allowing to flush the whole aggregate
aggregateColumn.setCustomWrite(
aggregateSupport.aggregateCustomWriteExpression(
aggregateColumn,
aggregatedColumns
)
);
// The following determines the custom read/write expression and write expression for aggregatedColumns
for ( org.hibernate.mapping.Column subColumn : aggregatedColumns ) {
final String selectableExpression = subColumn.getText( dialect );
final String customReadExpression;
final String assignmentExpression = aggregateSupport.aggregateComponentAssignmentExpression(
aggregateAssignmentExpression,
selectableExpression,
aggregateColumn,
subColumn
);
if ( subColumn.getCustomReadExpression() == null ) {
if ( subColumn.isFormula() ) {
customReadExpression = aggregateSupport.aggregateComponentCustomReadExpression(
subColumn.getTemplate(
dialect,
typeConfiguration,
null
),
Template.TEMPLATE + ".",
aggregateReadTemplate,
"",
aggregateColumn,
subColumn
);
}
else {
customReadExpression = aggregateSupport.aggregateComponentCustomReadExpression(
"",
"",
aggregateReadTemplate,
selectableExpression,
aggregateColumn,
subColumn
);
}
}
else {
customReadExpression = aggregateSupport.aggregateComponentCustomReadExpression(
subColumn.getCustomReadExpression(),
Template.TEMPLATE + ".",
aggregateReadTemplate,
"",
aggregateColumn,
subColumn
);
}
subColumn.setAssignmentExpression( assignmentExpression );
subColumn.setCustomRead( customReadExpression );
}
propertyHolder.getTable().getColumns().removeAll( aggregatedColumns );
}
private void validateSupportedColumnTypes(String basePath, Component component) {
for ( Property property : component.getProperties() ) {
final Value value = property.getValue();
if ( value instanceof Component ) {
final Component subComponent = (Component) value;
if ( subComponent.getAggregateColumn() == null ) {
validateSupportedColumnTypes( StringHelper.qualify( basePath, property.getName() ), subComponent );
}
}
else if ( value instanceof BasicValue ) {
final BasicType<?> basicType = (BasicType<?>) value.getType();
if ( basicType instanceof BasicPluralType<?, ?> ) {
// todo: see HHH-15862
throw new AnnotationException(
"Property '" + StringHelper.qualify( basePath, property.getName() )
+ "' uses not yet supported array mapping type in component class '"
+ component.getComponentClassName()
+ "'. Aggregate components currently may only contain simple basic values and components of simple basic values."
);
}
}
}
}
private static void ensureInitialized(
InFlightMetadataCollector metadataCollector,
TypeConfiguration typeConfiguration,
Dialect dialect,
AggregateColumn aggregateColumn) {
do {
// Trigger resolving of the value so that the column gets properly filled
aggregateColumn.getValue().getType();
// Make sure this state is initialized
aggregateColumn.getSqlTypeCode( metadataCollector );
aggregateColumn.getSqlType( typeConfiguration, dialect, metadataCollector );
aggregateColumn = aggregateColumn.getComponent().getParentAggregateColumn();
} while ( aggregateColumn != null );
}
private void validateEqual(UserDefinedType udt1, UserDefinedType udt2) {
if ( udt1.getColumnSpan() != udt2.getColumnSpan() ) {
throw new MappingException(
String.format(
"Struct [%s] is defined by multiple components %s with different number of mappings %d and %d",
udt1.getName(),
findComponentClasses(),
udt1.getColumnSpan(),
udt2.getColumnSpan()
)
);
}
final List<Column> missingColumns = new ArrayList<>();
for ( Column column1 : udt1.getColumns() ) {
final Column column2 = udt2.getColumn( column1 );
if ( column2 == null ) {
missingColumns.add( column1 );
}
else if ( !column1.getSqlType().equals( column2.getSqlType() ) ) {
throw new MappingException(
String.format(
"Struct [%s] of class [%s] is defined by multiple components with different mappings [%s] and [%s] for column [%s]",
udt1.getName(),
returnedClassOrElement.getName(),
column1.getSqlType(),
column2.getSqlType(),
column1.getCanonicalName()
)
);
}
}
if ( !missingColumns.isEmpty() ) {
throw new MappingException(
String.format(
"Struct [%s] is defined by multiple components %s but some columns are missing in [%s]: %s",
udt1.getName(),
findComponentClasses(),
returnedClassOrElement.getName(),
missingColumns
)
);
}
}
private TreeSet<String> findComponentClasses() {
final TreeSet<String> componentClasses = new TreeSet<>();
context.getMetadataCollector().visitRegisteredComponents(
c -> {
if ( component.getStructName().equals( c.getStructName() ) ) {
componentClasses.add( c.getComponentClassName() );
}
}
);
return componentClasses;
}
}

View File

@ -28,7 +28,9 @@ import org.hibernate.boot.spi.MetadataBuildingContext;
import org.hibernate.cfg.annotations.Nullability;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.mapping.AggregateColumn;
import org.hibernate.mapping.Column;
import org.hibernate.mapping.Component;
import org.hibernate.mapping.Formula;
import org.hibernate.mapping.Join;
import org.hibernate.mapping.SimpleValue;
@ -400,6 +402,12 @@ public class AnnotatedColumn {
}
//TODO: move this operation to AnnotatedColumns!!
public void linkWithAggregateValue(SimpleValue value, Component component) {
mappingColumn = new AggregateColumn( mappingColumn, component );
linkWithValue( value );
}
public void linkWithValue(SimpleValue value) {
if ( formula != null ) {
value.addFormula( formula );

View File

@ -1466,6 +1466,15 @@ public final class AnnotationBinder {
compositeUserType
);
}
else if ( property.isCollection() && property.getElementClass() != null
&& isEmbedded( property, property.getElementClass() ) ) {
// This is a special kind of basic aggregate component array type
// todo: see HHH-15830
throw new AnnotationException(
"Property '" + BinderHelper.getPath( propertyHolder, inferredData )
+ "' is mapped as basic aggregate component array, but this is not yet supported."
);
}
else {
createBasicBinder(
propertyHolder,
@ -1654,7 +1663,8 @@ public final class AnnotationBinder {
propertyName,
determineCustomInstantiator( property, returnedClass, context ),
compositeUserType,
actualColumns
actualColumns,
columns
);
}
@ -1941,7 +1951,8 @@ public final class AnnotationBinder {
String propertyName,
Class<? extends EmbeddableInstantiator> customInstantiatorImpl,
Class<? extends CompositeUserType<?>> compositeUserTypeClass,
AnnotatedJoinColumns columns) {
AnnotatedJoinColumns columns,
AnnotatedColumns annotatedColumns) {
final Component component;
if ( referencedEntityName != null ) {
component = createComponent(
@ -1972,6 +1983,7 @@ public final class AnnotationBinder {
false,
customInstantiatorImpl,
compositeUserTypeClass,
annotatedColumns,
context,
inheritanceStatePerClass
);
@ -2030,6 +2042,7 @@ public final class AnnotationBinder {
boolean inSecondPass,
Class<? extends EmbeddableInstantiator> customInstantiatorImpl,
Class<? extends CompositeUserType<?>> compositeUserTypeClass,
AnnotatedColumns columns,
MetadataBuildingContext context,
Map<XClass, InheritanceState> inheritanceStatePerClass) {
return fillComponent(
@ -2044,6 +2057,7 @@ public final class AnnotationBinder {
inSecondPass,
customInstantiatorImpl,
compositeUserTypeClass,
columns,
context,
inheritanceStatePerClass
);
@ -2061,6 +2075,7 @@ public final class AnnotationBinder {
boolean inSecondPass,
Class<? extends EmbeddableInstantiator> customInstantiatorImpl,
Class<? extends CompositeUserType<?>> compositeUserTypeClass,
AnnotatedColumns columns,
MetadataBuildingContext context,
Map<XClass, InheritanceState> inheritanceStatePerClass) {
// inSecondPass can only be used to apply right away the second pass of a composite-element
@ -2135,6 +2150,14 @@ public final class AnnotationBinder {
if ( compositeUserType != null ) {
processCompositeUserType( component, compositeUserType );
}
AggregateComponentBinder.processAggregate(
component,
propertyHolder,
inferredData,
returnedClassOrElement,
columns,
context
);
return component;
}

View File

@ -65,6 +65,7 @@ import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.ReflectHelper;
import org.hibernate.mapping.BasicValue;
import org.hibernate.mapping.Component;
import org.hibernate.mapping.Table;
import org.hibernate.resource.beans.spi.ManagedBean;
import org.hibernate.resource.beans.spi.ManagedBeanRegistry;
@ -125,6 +126,7 @@ public class BasicValueBinder implements JdbcTypeIndicators {
}
private final Kind kind;
private final Component aggregateComponent;
private final MetadataBuildingContext buildingContext;
@ -163,12 +165,16 @@ public class BasicValueBinder implements JdbcTypeIndicators {
private String returnedClassName;
private String referencedEntityName;
public BasicValueBinder(Kind kind, MetadataBuildingContext buildingContext) {
this( kind, null, buildingContext );
}
public BasicValueBinder(Kind kind, Component aggregateComponent, MetadataBuildingContext buildingContext) {
assert kind != null;
assert buildingContext != null;
this.kind = kind;
this.aggregateComponent = aggregateComponent;
this.buildingContext = buildingContext;
}
@ -203,27 +209,35 @@ public class BasicValueBinder implements JdbcTypeIndicators {
@Override
public int getPreferredSqlTypeCodeForBoolean() {
return buildingContext.getPreferredSqlTypeCodeForBoolean();
return resolveJdbcTypeCode( buildingContext.getPreferredSqlTypeCodeForBoolean() );
}
@Override
public int getPreferredSqlTypeCodeForDuration() {
return buildingContext.getPreferredSqlTypeCodeForDuration();
return resolveJdbcTypeCode( buildingContext.getPreferredSqlTypeCodeForDuration() );
}
@Override
public int getPreferredSqlTypeCodeForUuid() {
return buildingContext.getPreferredSqlTypeCodeForUuid();
return resolveJdbcTypeCode( buildingContext.getPreferredSqlTypeCodeForUuid() );
}
@Override
public int getPreferredSqlTypeCodeForInstant() {
return buildingContext.getPreferredSqlTypeCodeForInstant();
return resolveJdbcTypeCode( buildingContext.getPreferredSqlTypeCodeForInstant() );
}
@Override
public int getPreferredSqlTypeCodeForArray() {
return buildingContext.getPreferredSqlTypeCodeForArray();
return resolveJdbcTypeCode( buildingContext.getPreferredSqlTypeCodeForArray() );
}
@Override
public int resolveJdbcTypeCode(int jdbcTypeCode) {
return aggregateComponent == null
? jdbcTypeCode
: buildingContext.getMetadataCollector().getDatabase().getDialect().getAggregateSupport()
.aggregateComponentSqlTypeCode( aggregateComponent.getAggregateColumn().getSqlTypeCode(), jdbcTypeCode );
}
@Override
@ -1130,6 +1144,10 @@ public class BasicValueBinder implements JdbcTypeIndicators {
new PkDrivenByDefaultMapsIdSecondPass( referencedEntityName, joinColumns, basicValue )
);
}
else if ( aggregateComponent != null ) {
assert columns.getColumns().size() == 1;
firstColumn.linkWithAggregateValue( basicValue, aggregateComponent );
}
else {
for ( AnnotatedColumn column : columns.getColumns() ) {
column.linkWithValue( basicValue );

View File

@ -2218,6 +2218,7 @@ public abstract class CollectionBinder {
true,
resolveCustomInstantiator( property, elementClass, buildingContext ),
compositeUserType,
null,
buildingContext,
inheritanceStatePerClass
);

View File

@ -432,6 +432,7 @@ public class EntityBinder {
false,
null,
null,
null,
context,
inheritanceStates
);
@ -515,6 +516,7 @@ public class EntityBinder {
false,
null,
null,
null,
buildingContext,
inheritanceStates
);

View File

@ -385,6 +385,7 @@ public class MapBinder extends CollectionBinder {
true,
null,
compositeUserType,
null,
buildingContext,
inheritanceStatePerClass
) );

View File

@ -30,9 +30,12 @@ import java.sql.SQLFeatureNotSupportedException;
import java.sql.Statement;
import java.sql.Types;
import java.time.Duration;
import java.time.temporal.TemporalAccessor;
import java.util.Arrays;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.TimeZone;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@ -138,6 +141,13 @@ import static org.hibernate.type.SqlTypes.TIMESTAMP;
import static org.hibernate.type.SqlTypes.TIMESTAMP_WITH_TIMEZONE;
import static org.hibernate.type.SqlTypes.TINYINT;
import static org.hibernate.type.SqlTypes.VARCHAR;
import static org.hibernate.type.descriptor.DateTimeUtils.JDBC_ESCAPE_END;
import static org.hibernate.type.descriptor.DateTimeUtils.JDBC_ESCAPE_START_DATE;
import static org.hibernate.type.descriptor.DateTimeUtils.JDBC_ESCAPE_START_TIME;
import static org.hibernate.type.descriptor.DateTimeUtils.JDBC_ESCAPE_START_TIMESTAMP;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsDate;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsTime;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsTimestampWithMicros;
/**
* An abstract base class for SAP HANA dialects.
@ -1197,6 +1207,56 @@ public abstract class AbstractHANADialect extends Dialect {
}
}
@Override
public void appendDateTimeLiteral(
SqlAppender appender,
TemporalAccessor temporalAccessor,
TemporalType precision,
TimeZone jdbcTimeZone) {
switch ( precision ) {
case DATE:
appender.appendSql( JDBC_ESCAPE_START_DATE );
appendAsDate( appender, temporalAccessor );
appender.appendSql( JDBC_ESCAPE_END );
break;
case TIME:
appender.appendSql( JDBC_ESCAPE_START_TIME );
appendAsTime( appender, temporalAccessor, supportsTemporalLiteralOffset(), jdbcTimeZone );
appender.appendSql( JDBC_ESCAPE_END );
break;
case TIMESTAMP:
appender.appendSql( JDBC_ESCAPE_START_TIMESTAMP );
appendAsTimestampWithMicros( appender, temporalAccessor, supportsTemporalLiteralOffset(), jdbcTimeZone );
appender.appendSql( JDBC_ESCAPE_END );
break;
default:
throw new IllegalArgumentException();
}
}
@Override
public void appendDateTimeLiteral(SqlAppender appender, Date date, TemporalType precision, TimeZone jdbcTimeZone) {
switch ( precision ) {
case DATE:
appender.appendSql( JDBC_ESCAPE_START_DATE );
appendAsDate( appender, date );
appender.appendSql( JDBC_ESCAPE_END );
break;
case TIME:
appender.appendSql( JDBC_ESCAPE_START_TIME );
appendAsTime( appender, date );
appender.appendSql( JDBC_ESCAPE_END );
break;
case TIMESTAMP:
appender.appendSql( JDBC_ESCAPE_START_TIMESTAMP );
appendAsTimestampWithMicros( appender, date, jdbcTimeZone );
appender.appendSql( JDBC_ESCAPE_END );
break;
default:
throw new IllegalArgumentException();
}
}
@Override
public String generatedAs(String generatedAs) {
return " generated always as (" + generatedAs + ")";

View File

@ -0,0 +1,74 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.dialect;
import java.sql.SQLException;
import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
/**
* @author Christian Beikov
*/
public abstract class AbstractPostgreSQLJsonJdbcType extends PostgreSQLPGObjectJdbcType implements AggregateJdbcType {
private final EmbeddableMappingType embeddableMappingType;
public AbstractPostgreSQLJsonJdbcType(EmbeddableMappingType embeddableMappingType, String typeName) {
super( typeName, SqlTypes.JSON );
this.embeddableMappingType = embeddableMappingType;
}
@Override
public EmbeddableMappingType getEmbeddableMappingType() {
return embeddableMappingType;
}
@Override
protected <X> X fromString(String string, JavaType<X> javaType, WrapperOptions options) throws SQLException {
if ( embeddableMappingType != null ) {
return JsonHelper.fromString(
embeddableMappingType,
string,
javaType.getJavaTypeClass() != Object[].class,
options
);
}
return options.getSessionFactory().getFastSessionServices().getJsonFormatMapper().fromString(
string,
javaType,
options
);
}
@Override
protected <X> String toString(X value, JavaType<X> javaType, WrapperOptions options) {
if ( embeddableMappingType != null ) {
return JsonHelper.toString( embeddableMappingType, value, options );
}
return options.getSessionFactory().getFastSessionServices().getJsonFormatMapper().toString(
value,
javaType,
options
);
}
@Override
public Object createJdbcValue(Object domainValue, WrapperOptions options) throws SQLException {
assert embeddableMappingType != null;
return JsonHelper.toString( embeddableMappingType, domainValue, options );
}
@Override
public Object[] extractJdbcValues(Object rawJdbcValue, WrapperOptions options) throws SQLException {
assert embeddableMappingType != null;
return JsonHelper.fromString( embeddableMappingType, (String) rawJdbcValue, false, options );
}
}

View File

@ -15,6 +15,8 @@ import java.util.List;
import org.hibernate.LockOptions;
import org.hibernate.boot.model.TypeContributions;
import org.hibernate.dialect.aggregate.AggregateSupport;
import org.hibernate.dialect.aggregate.DB2AggregateSupport;
import org.hibernate.dialect.function.CastingConcatFunction;
import org.hibernate.dialect.function.CommonFunctionFactory;
import org.hibernate.dialect.function.CountFunction;
@ -43,6 +45,8 @@ import org.hibernate.metamodel.spi.RuntimeModelCreationContext;
import org.hibernate.procedure.internal.DB2CallableStatementSupport;
import org.hibernate.procedure.spi.CallableStatementSupport;
import org.hibernate.query.spi.QueryEngine;
import org.hibernate.procedure.internal.DB2CallableStatementSupport;
import org.hibernate.procedure.spi.CallableStatementSupport;
import org.hibernate.query.sqm.IntervalType;
import org.hibernate.query.sqm.TemporalUnit;
import org.hibernate.query.sqm.mutation.internal.cte.CteInsertStrategy;
@ -181,6 +185,9 @@ public class DB2Dialect extends Dialect {
return "timestamp($p)";
case TIME_WITH_TIMEZONE:
return "time";
case BINARY:
// should use 'binary' since version 11
return getDB2Version().isBefore( 11 ) ? "char($l) for bit data" : super.columnType( sqlTypeCode );
case VARBINARY:
// should use 'varbinary' since version 11
return getDB2Version().isBefore( 11 ) ? "varchar($l) for bit data" : super.columnType( sqlTypeCode );
@ -196,8 +203,8 @@ public class DB2Dialect extends Dialect {
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( SQLXML, "xml", this ) );
ddlTypeRegistry.addDescriptor(
CapacityDependentDdlType.builder( BINARY, "varchar($l) for bit data", this )
.withTypeCapacity( 254, "char($l) for bit data" )
CapacityDependentDdlType.builder( BINARY, columnType( VARBINARY ), this )
.withTypeCapacity( 254, columnType( BINARY ) )
.build()
);
}
@ -736,6 +743,7 @@ public class DB2Dialect extends Dialect {
jdbcTypeRegistry.addDescriptor( Types.NUMERIC, DecimalJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptor( XmlJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptor( DB2StructJdbcType.INSTANCE );
// DB2 requires a custom binder for binding untyped nulls that resolves the type through the statement
typeContributions.contributeJdbcType( ObjectNullResolvingJdbcType.INSTANCE );
@ -751,6 +759,11 @@ public class DB2Dialect extends Dialect {
);
}
@Override
public AggregateSupport getAggregateSupport() {
return DB2AggregateSupport.INSTANCE;
}
@Override
public CallableStatementSupport getCallableStatementSupport() {
return DB2CallableStatementSupport.INSTANCE;
@ -933,4 +946,9 @@ public class DB2Dialect extends Dialect {
public String getTruncateTableStatement(String tableName) {
return super.getTruncateTableStatement(tableName) + " immediate";
}
@Override
public String getCreateUserDefinedTypeExtensionsString() {
return " instantiable mode db2sql";
}
}

View File

@ -0,0 +1,174 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.dialect;
import java.sql.CallableStatement;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.SQLXML;
import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.ValueBinder;
import org.hibernate.type.descriptor.ValueExtractor;
import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.java.spi.UnknownBasicJavaType;
import org.hibernate.type.descriptor.jdbc.BasicBinder;
import org.hibernate.type.descriptor.jdbc.BasicExtractor;
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
import org.hibernate.type.spi.TypeConfiguration;
/**
* DB2 supports UDTs but not in JDBC, so there is a feature called "transforms",
* which allows to specify an automatic translation from one data type to another.
* To support UDTs, we require that a transform exists for the UDT that serializes from SQL to XML
* and deserializes to SQL from UDT. This means that from the JDBC perspective, this is an XML type,
* but the database models it internally as UDT.
*
* The {@link org.hibernate.dialect.aggregate.DB2AggregateSupport} generates the functions and transforms for this
* process automatically, but note that all of this is only used for functions and native queries.
* By default, we select individual struct parts to avoid the encoding/decoding.
*
* @author Christian Beikov
*/
public class DB2StructJdbcType implements AggregateJdbcType {
public static final DB2StructJdbcType INSTANCE = new DB2StructJdbcType();
private final String structTypeName;
private final EmbeddableMappingType embeddableMappingType;
private final ValueExtractor<Object[]> objectArrayExtractor;
private DB2StructJdbcType() {
// The default instance is for reading only and will return an Object[]
this( null, null );
}
public DB2StructJdbcType(String structTypeName, EmbeddableMappingType embeddableMappingType) {
this.structTypeName = structTypeName;
this.embeddableMappingType = embeddableMappingType;
// We cache the extractor for Object[] here
// since that is used in AggregateEmbeddableFetchImpl and AggregateEmbeddableResultImpl
this.objectArrayExtractor = createBasicExtractor( new UnknownBasicJavaType<>( Object[].class ) );
}
@Override
public int getJdbcTypeCode() {
return SqlTypes.SQLXML;
}
@Override
public int getDefaultSqlTypeCode() {
return SqlTypes.STRUCT;
}
@Override
public AggregateJdbcType resolveAggregateJdbcType(EmbeddableMappingType mappingType, String sqlType) {
return new DB2StructJdbcType( sqlType, mappingType );
}
@Override
public EmbeddableMappingType getEmbeddableMappingType() {
return embeddableMappingType;
}
public String getStructTypeName() {
return structTypeName;
}
@Override
public <T> JavaType<T> getJdbcRecommendedJavaTypeMapping(
Integer precision,
Integer scale,
TypeConfiguration typeConfiguration) {
if ( embeddableMappingType == null ) {
return typeConfiguration.getJavaTypeRegistry().getDescriptor( Object[].class );
}
else {
//noinspection unchecked
return (JavaType<T>) embeddableMappingType.getMappedJavaType();
}
}
@Override
public <X> ValueBinder<X> getBinder(JavaType<X> javaType) {
return new BasicBinder<>( javaType, this ) {
@Override
protected void doBind(PreparedStatement st, X value, int index, WrapperOptions options)
throws SQLException {
st.setSQLXML( index, createJdbcValue( value, options ) );
}
@Override
protected void doBind(CallableStatement st, X value, String name, WrapperOptions options)
throws SQLException {
st.setSQLXML( name, createJdbcValue( value, options ) );
}
};
}
@Override
public <X> ValueExtractor<X> getExtractor(JavaType<X> javaType) {
if ( javaType.getJavaTypeClass() == Object[].class ) {
//noinspection unchecked
return (ValueExtractor<X>) objectArrayExtractor;
}
return createBasicExtractor( javaType );
}
private <X> BasicExtractor<X> createBasicExtractor(JavaType<X> javaType) {
return new BasicExtractor<>( javaType, this ) {
@Override
protected X doExtract(ResultSet rs, int paramIndex, WrapperOptions options) throws SQLException {
return getValue( rs.getSQLXML( paramIndex ), options );
}
@Override
protected X doExtract(CallableStatement statement, int index, WrapperOptions options) throws SQLException {
return getValue( statement.getSQLXML( index ), options );
}
@Override
protected X doExtract(CallableStatement statement, String name, WrapperOptions options)
throws SQLException {
return getValue( statement.getSQLXML( name ), options );
}
private X getValue(SQLXML object, WrapperOptions options) throws SQLException {
if ( object == null ) {
return null;
}
return XmlHelper.fromString(
embeddableMappingType,
object.getString(),
javaType.getJavaTypeClass() != Object[].class,
options
);
}
};
}
@Override
public SQLXML createJdbcValue(Object value, WrapperOptions options) throws SQLException {
final SQLXML sqlxml = options.getSession()
.getJdbcCoordinator()
.getLogicalConnection()
.getPhysicalConnection()
.createSQLXML();
sqlxml.setString( XmlHelper.toString( embeddableMappingType, value, options) );
return sqlxml;
}
@Override
public Object[] extractJdbcValues(Object rawJdbcValue, WrapperOptions options) throws SQLException {
return XmlHelper.fromString( embeddableMappingType, (String) rawJdbcValue, false, options );
}
}

View File

@ -49,6 +49,8 @@ import org.hibernate.boot.model.relational.AuxiliaryDatabaseObject;
import org.hibernate.boot.model.relational.Sequence;
import org.hibernate.boot.spi.SessionFactoryOptions;
import org.hibernate.cfg.Environment;
import org.hibernate.dialect.aggregate.AggregateSupport;
import org.hibernate.dialect.aggregate.AggregateSupportImpl;
import org.hibernate.dialect.function.CastFunction;
import org.hibernate.dialect.function.CastStrEmulation;
import org.hibernate.dialect.function.CoalesceIfnullEmulation;
@ -108,6 +110,7 @@ import org.hibernate.mapping.Constraint;
import org.hibernate.mapping.ForeignKey;
import org.hibernate.mapping.Index;
import org.hibernate.mapping.Table;
import org.hibernate.mapping.UserDefinedType;
import org.hibernate.metamodel.mapping.EntityMappingType;
import org.hibernate.metamodel.spi.RuntimeModelCreationContext;
import org.hibernate.persister.entity.Lockable;
@ -155,6 +158,7 @@ import org.hibernate.tool.schema.internal.StandardTableExporter;
import org.hibernate.tool.schema.internal.StandardTableMigrator;
import org.hibernate.tool.schema.internal.StandardUniqueKeyExporter;
import org.hibernate.tool.schema.internal.TableMigrator;
import org.hibernate.tool.schema.internal.StandardUserDefinedTypeExporter;
import org.hibernate.tool.schema.spi.Cleaner;
import org.hibernate.tool.schema.spi.Exporter;
import org.hibernate.tool.schema.spi.SchemaManagementTool;
@ -228,8 +232,8 @@ import static org.hibernate.type.descriptor.DateTimeUtils.JDBC_ESCAPE_START_TIME
import static org.hibernate.type.descriptor.DateTimeUtils.JDBC_ESCAPE_START_TIMESTAMP;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsDate;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsTime;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsTimestampWithMicros;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsTimestampWithMillis;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsTimestampWithNanos;
/**
* Represents a dialect of SQL implemented by a particular RDBMS. Every
@ -2043,6 +2047,47 @@ public abstract class Dialect implements ConversionContext {
);
}
// UDT support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
/**
* Command used to create a table.
*
* @return The command used to create a table.
*/
public String getCreateUserDefinedTypeKindString() {
return "";
}
public String getCreateUserDefinedTypeExtensionsString() {
return "";
}
/**
* For dropping a type, can the phrase "{@code if exists} be
* applied before the type name?
* <p/>
* NOTE : Only one or the other (or neither) of this and
* {@link #supportsIfExistsAfterTypeName} should return true.
*
* @return {@code true} if {@code if exists} can be applied before the type name
*/
public boolean supportsIfExistsBeforeTypeName() {
return false;
}
/**
* For dropping a type, can the phrase {@code if exists} be
* applied after the type name?
* <p/>
* NOTE : Only one or the other (or neither) of this and
* {@link #supportsIfExistsBeforeTypeName} should return true.
*
* @return {@code true} if {@code if exists} can be applied after the type name
*/
public boolean supportsIfExistsAfterTypeName() {
return false;
}
// callable statement support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
/**
@ -2483,6 +2528,7 @@ public abstract class Dialect implements ConversionContext {
// DDL support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
private final StandardTableExporter tableExporter = new StandardTableExporter( this );
private final StandardUserDefinedTypeExporter userDefinedTypeExporter = new StandardUserDefinedTypeExporter( this );
private final StandardSequenceExporter sequenceExporter = new StandardSequenceExporter( this );
private final StandardIndexExporter indexExporter = new StandardIndexExporter( this );
private final StandardForeignKeyExporter foreignKeyExporter = new StandardForeignKeyExporter( this );
@ -2504,6 +2550,10 @@ public abstract class Dialect implements ConversionContext {
return tableCleaner;
}
public Exporter<UserDefinedType> getUserDefinedTypeExporter() {
return userDefinedTypeExporter;
}
public Exporter<Sequence> getSequenceExporter() {
return sequenceExporter;
}
@ -2829,6 +2879,17 @@ public abstract class Dialect implements ConversionContext {
return "";
}
/**
* Get the comment into a form supported for UDT definition.
*
* @param comment The comment to apply
*
* @return The comment fragment
*/
public String getUserDefinedTypeComment(String comment) {
return "";
}
/**
* Get the comment into a form supported for column definition.
*
@ -3513,6 +3574,15 @@ public abstract class Dialect implements ConversionContext {
return NationalizationSupport.EXPLICIT;
}
/**
* How the Dialect supports aggregate types like {@link SqlTypes#STRUCT}.
*
* @since 6.2
*/
public AggregateSupport getAggregateSupport() {
return AggregateSupportImpl.INSTANCE;
}
/**
* Database has native support for SQL standard arrays which
* can be referred to by its base type name.
@ -4325,7 +4395,7 @@ public abstract class Dialect implements ConversionContext {
break;
case TIMESTAMP:
appender.appendSql( JDBC_ESCAPE_START_TIMESTAMP );
appendAsTimestampWithMicros( appender, temporalAccessor, supportsTemporalLiteralOffset(), jdbcTimeZone );
appendAsTimestampWithNanos( appender, temporalAccessor, supportsTemporalLiteralOffset(), jdbcTimeZone );
appender.appendSql( JDBC_ESCAPE_END );
break;
default:
@ -4347,7 +4417,7 @@ public abstract class Dialect implements ConversionContext {
break;
case TIMESTAMP:
appender.appendSql( JDBC_ESCAPE_START_TIMESTAMP );
appendAsTimestampWithMicros( appender, date, jdbcTimeZone );
appendAsTimestampWithNanos( appender, date, jdbcTimeZone );
appender.appendSql( JDBC_ESCAPE_END );
break;
default:

View File

@ -63,6 +63,7 @@ import org.hibernate.sql.exec.spi.JdbcOperation;
import org.hibernate.tool.schema.extract.internal.SequenceInformationExtractorH2DatabaseImpl;
import org.hibernate.tool.schema.extract.internal.SequenceInformationExtractorLegacyImpl;
import org.hibernate.tool.schema.extract.spi.SequenceInformationExtractor;
import org.hibernate.type.descriptor.DateTimeUtils;
import org.hibernate.type.descriptor.jdbc.InstantJdbcType;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.UUIDJdbcType;
@ -88,6 +89,7 @@ import static org.hibernate.type.SqlTypes.LONG32VARCHAR;
import static org.hibernate.type.SqlTypes.NCHAR;
import static org.hibernate.type.SqlTypes.NUMERIC;
import static org.hibernate.type.SqlTypes.NVARCHAR;
import static org.hibernate.type.SqlTypes.OTHER;
import static org.hibernate.type.SqlTypes.TIMESTAMP_UTC;
import static org.hibernate.type.SqlTypes.UUID;
import static org.hibernate.type.SqlTypes.VARBINARY;
@ -95,7 +97,8 @@ import static org.hibernate.type.SqlTypes.VARCHAR;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsDate;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsLocalTime;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsTime;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsTimestampWithMicros;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsTimestampWithMillis;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsTimestampWithNanos;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsTimestampWithMillis;
/**
@ -346,8 +349,17 @@ public class H2Dialect extends Dialect {
int scale,
JdbcTypeRegistry jdbcTypeRegistry) {
// As of H2 2.0 we get a FLOAT type code even though it is a DOUBLE
if ( jdbcTypeCode == FLOAT && "DOUBLE PRECISION".equals( columnTypeName ) ) {
return jdbcTypeRegistry.getDescriptor( DOUBLE );
switch ( jdbcTypeCode ) {
case FLOAT:
if ( "DOUBLE PRECISION".equals( columnTypeName ) ) {
return jdbcTypeRegistry.getDescriptor( DOUBLE );
}
break;
case OTHER:
if ( "GEOMETRY".equals( columnTypeName ) ) {
return jdbcTypeRegistry.getDescriptor( GEOMETRY );
}
break;
}
return super.resolveSqlTypeDescriptor( columnTypeName, jdbcTypeCode, precision, scale, jdbcTypeRegistry );
}
@ -447,12 +459,12 @@ public class H2Dialect extends Dialect {
case TIMESTAMP:
if ( supportsTemporalLiteralOffset() && temporalAccessor.isSupported( ChronoField.OFFSET_SECONDS ) ) {
appender.appendSql( "timestamp with time zone '" );
appendAsTimestampWithMicros( appender, temporalAccessor, true, jdbcTimeZone );
appendAsTimestampWithNanos( appender, temporalAccessor, true, jdbcTimeZone );
appender.appendSql( '\'' );
}
else {
appender.appendSql( "timestamp '" );
appendAsTimestampWithMicros( appender, temporalAccessor, false, jdbcTimeZone );
appendAsTimestampWithNanos( appender, temporalAccessor, false, jdbcTimeZone );
appender.appendSql( '\'' );
}
break;
@ -482,7 +494,7 @@ public class H2Dialect extends Dialect {
break;
case TIMESTAMP:
appender.appendSql( "timestamp with time zone '" );
appendAsTimestampWithMicros( appender, date, jdbcTimeZone );
appendAsTimestampWithNanos( appender, date, jdbcTimeZone );
appender.appendSql( '\'' );
break;
default:

View File

@ -12,6 +12,7 @@ import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Types;
import java.time.Duration;
import java.time.Instant;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.ValueBinder;
@ -40,6 +41,11 @@ public class H2DurationIntervalSecondJdbcType implements JdbcType {
return SqlTypes.INTERVAL_SECOND;
}
@Override
public Class<?> getPreferredJavaTypeClass(WrapperOptions options) {
return Duration.class;
}
@Override
public <T> JdbcLiteralFormatter<T> getJdbcLiteralFormatter(JavaType<T> javaType) {
return (appender, value, dialect, wrapperOptions) -> dialect.appendIntervalLiteral(

File diff suppressed because it is too large Load Diff

View File

@ -35,8 +35,10 @@ import org.hibernate.type.descriptor.sql.internal.DdlTypeImpl;
import org.hibernate.type.descriptor.sql.spi.DdlTypeRegistry;
import static org.hibernate.query.sqm.produce.function.FunctionParameterType.NUMERIC;
import static org.hibernate.type.SqlTypes.GEOMETRY;
import static org.hibernate.type.SqlTypes.OTHER;
import static org.hibernate.type.SqlTypes.UUID;
import static org.hibernate.type.SqlTypes.VARBINARY;
/**
* A {@linkplain Dialect SQL dialect} for MariaDB
@ -121,6 +123,11 @@ public class MariaDBDialect extends MySQLDialect {
break;
}
break;
case VARBINARY:
if ( "GEOMETRY".equals( columnTypeName ) ) {
jdbcTypeCode = GEOMETRY;
}
break;
}
return super.resolveSqlTypeDescriptor( columnTypeName, jdbcTypeCode, precision, scale, jdbcTypeRegistry );
}

View File

@ -472,8 +472,14 @@ public class MySQLDialect extends Dialect {
int precision,
int scale,
JdbcTypeRegistry jdbcTypeRegistry) {
if ( jdbcTypeCode == Types.BIT ) {
return jdbcTypeRegistry.getDescriptor( Types.BOOLEAN );
switch ( jdbcTypeCode ) {
case Types.BIT:
return jdbcTypeRegistry.getDescriptor( Types.BOOLEAN );
case Types.BINARY:
if ( "GEOMETRY".equals( columnTypeName ) ) {
jdbcTypeCode = GEOMETRY;
}
break;
}
return super.resolveSqlTypeDescriptor(
columnTypeName,

View File

@ -21,6 +21,8 @@ import org.hibernate.LockOptions;
import org.hibernate.QueryTimeoutException;
import org.hibernate.boot.model.TypeContributions;
import org.hibernate.cfg.Environment;
import org.hibernate.dialect.aggregate.AggregateSupport;
import org.hibernate.dialect.aggregate.OracleAggregateSupport;
import org.hibernate.dialect.function.CommonFunctionFactory;
import org.hibernate.dialect.function.ModeStatsModeEmulation;
import org.hibernate.dialect.identity.IdentityColumnSupport;
@ -76,14 +78,16 @@ import org.hibernate.tool.schema.extract.internal.SequenceInformationExtractorOr
import org.hibernate.tool.schema.extract.spi.SequenceInformationExtractor;
import org.hibernate.type.JavaObjectType;
import org.hibernate.type.NullType;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.StandardBasicTypes;
import org.hibernate.type.descriptor.java.BooleanJavaType;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.java.PrimitiveByteArrayJavaType;
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
import org.hibernate.type.descriptor.jdbc.BlobJdbcType;
import org.hibernate.type.descriptor.jdbc.BooleanJdbcType;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.JsonBlobJdbcType;
import org.hibernate.type.descriptor.jdbc.OracleJsonBlobJdbcType;
import org.hibernate.type.descriptor.jdbc.NullJdbcType;
import org.hibernate.type.descriptor.jdbc.ObjectNullAsNullTypeJdbcType;
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
@ -113,6 +117,7 @@ import static org.hibernate.type.SqlTypes.NUMERIC;
import static org.hibernate.type.SqlTypes.NVARCHAR;
import static org.hibernate.type.SqlTypes.REAL;
import static org.hibernate.type.SqlTypes.SMALLINT;
import static org.hibernate.type.SqlTypes.STRUCT;
import static org.hibernate.type.SqlTypes.SQLXML;
import static org.hibernate.type.SqlTypes.TIME;
import static org.hibernate.type.SqlTypes.TIME_WITH_TIMEZONE;
@ -352,6 +357,9 @@ public class OracleDialect extends Dialect {
return "to_char(?1,'YYYY-MM-DD HH24:MI:SS.FF9 TZR')";
}
break;
case CLOB:
// Oracle doesn't like casting to clob
return "to_clob(?1)";
case DATE:
if ( from == CastType.STRING ) {
return "to_date(?1,'YYYY-MM-DD')";
@ -677,6 +685,20 @@ public class OracleDialect extends Dialect {
switch ( jdbcTypeCode ) {
case OracleTypes.JSON:
return jdbcTypeRegistry.getDescriptor( JSON );
case STRUCT:
if ( "MDSYS.SDO_GEOMETRY".equals( columnTypeName ) ) {
jdbcTypeCode = SqlTypes.GEOMETRY;
}
else {
final AggregateJdbcType aggregateDescriptor = jdbcTypeRegistry.findAggregateDescriptor(
// Skip the schema
columnTypeName.substring( columnTypeName.indexOf( '.' ) + 1 )
);
if ( aggregateDescriptor != null ) {
return aggregateDescriptor;
}
}
break;
case Types.NUMERIC:
if ( scale == -127 ) {
// For some reason, the Oracle JDBC driver reports FLOAT
@ -738,6 +760,7 @@ public class OracleDialect extends Dialect {
typeContributions.contributeJdbcType( OracleBooleanJdbcType.INSTANCE );
typeContributions.contributeJdbcType( OracleXmlJdbcType.INSTANCE );
typeContributions.contributeJdbcType( OracleStructJdbcType.INSTANCE );
if ( getVersion().isSameOrAfter( 12 ) ) {
// account for Oracle's deprecated support for LONGVARBINARY
@ -755,10 +778,10 @@ public class OracleDialect extends Dialect {
typeContributions.contributeJdbcType( descriptor );
if ( getVersion().isSameOrAfter( 21 ) ) {
typeContributions.contributeJdbcType( OracleTypesHelper.INSTANCE.getJsonJdbcType() );
typeContributions.contributeJdbcType( OracleJsonJdbcType.INSTANCE );
}
else {
typeContributions.contributeJdbcType( JsonBlobJdbcType.INSTANCE );
typeContributions.contributeJdbcType( OracleJsonBlobJdbcType.INSTANCE );
}
}
@ -786,6 +809,11 @@ public class OracleDialect extends Dialect {
);
}
@Override
public AggregateSupport getAggregateSupport() {
return OracleAggregateSupport.valueOf( this );
}
@Override
public String getNativeIdentifierGeneratorStrategy() {
return "sequence";
@ -1349,4 +1377,9 @@ public class OracleDialect extends Dialect {
public UniqueDelegate getUniqueDelegate() {
return uniqueDelegate;
}
@Override
public String getCreateUserDefinedTypeKindString() {
return "object";
}
}

View File

@ -6,45 +6,24 @@
*/
package org.hibernate.dialect;
import java.sql.CallableStatement;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.ValueBinder;
import org.hibernate.type.descriptor.ValueExtractor;
import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.jdbc.BasicBinder;
import org.hibernate.type.descriptor.jdbc.BasicExtractor;
import org.hibernate.type.descriptor.jdbc.JdbcLiteralFormatter;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import oracle.jdbc.OracleType;
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
import org.hibernate.type.descriptor.jdbc.OracleJsonBlobJdbcType;
/**
* Specialized type mapping for {@code JSON} that encodes as OSON.
* This class is used from {@link OracleTypesHelper} reflectively to avoid loading Oracle JDBC classes eagerly.
* Specialized type mapping for {@code JSON} and the JSON SQL data type for Oracle.
*
* @author Christian Beikov
*/
public class OracleJsonJdbcType implements JdbcType {
public class OracleJsonJdbcType extends OracleJsonBlobJdbcType {
/**
* Singleton access
*/
public static final OracleJsonJdbcType INSTANCE = new OracleJsonJdbcType();
public static final OracleJsonJdbcType INSTANCE = new OracleJsonJdbcType( null );
private static final int JSON_TYPE_CODE = OracleType.JSON.getVendorTypeNumber();
@Override
public int getJdbcTypeCode() {
return SqlTypes.BLOB;
}
@Override
public int getDefaultSqlTypeCode() {
return SqlTypes.JSON;
private OracleJsonJdbcType(EmbeddableMappingType embeddableMappingType) {
super( embeddableMappingType );
}
@Override
@ -53,66 +32,13 @@ public class OracleJsonJdbcType implements JdbcType {
}
@Override
public <T> JdbcLiteralFormatter<T> getJdbcLiteralFormatter(JavaType<T> javaType) {
// No literal support for now
public AggregateJdbcType resolveAggregateJdbcType(EmbeddableMappingType mappingType, String sqlType) {
return new OracleJsonJdbcType( mappingType );
}
@Override
public String getCheckCondition(String columnName, JavaType<?> javaType, Dialect dialect) {
// No check constraint necessary, because the JSON DDL type is already OSON encoded
return null;
}
@Override
public <X> ValueBinder<X> getBinder(JavaType<X> javaType) {
return new BasicBinder<>( javaType, this ) {
@Override
protected void doBind(PreparedStatement st, X value, int index, WrapperOptions options)
throws SQLException {
final String json = options.getSessionFactory().getFastSessionServices().getJsonFormatMapper().toString(
value,
getJavaType(),
options
);
st.setObject( index, json, JSON_TYPE_CODE );
}
@Override
protected void doBind(CallableStatement st, X value, String name, WrapperOptions options)
throws SQLException {
final String json = options.getSessionFactory().getFastSessionServices().getJsonFormatMapper().toString(
value,
getJavaType(),
options
);
st.setObject( name, json, JSON_TYPE_CODE );
}
};
}
@Override
public <X> ValueExtractor<X> getExtractor(JavaType<X> javaType) {
return new BasicExtractor<>( javaType, this ) {
@Override
protected X doExtract(ResultSet rs, int paramIndex, WrapperOptions options) throws SQLException {
return getObject( rs.getString( paramIndex ), options );
}
@Override
protected X doExtract(CallableStatement statement, int index, WrapperOptions options) throws SQLException {
return getObject( statement.getString( index ), options );
}
@Override
protected X doExtract(CallableStatement statement, String name, WrapperOptions options) throws SQLException {
return getObject( statement.getString( name ), options );
}
private X getObject(String json, WrapperOptions options) throws SQLException {
if ( json == null ) {
return null;
}
return options.getSessionFactory().getFastSessionServices().getJsonFormatMapper().fromString(
json,
getJavaType(),
options
);
}
};
}
}

View File

@ -8,6 +8,7 @@ package org.hibernate.dialect;
import java.util.List;
import org.hibernate.LockMode;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.internal.util.collections.Stack;
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
@ -21,13 +22,17 @@ import org.hibernate.sql.ast.spi.AbstractSqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlSelection;
import org.hibernate.sql.ast.tree.Statement;
import org.hibernate.sql.ast.tree.cte.CteMaterialization;
import org.hibernate.sql.ast.tree.expression.ColumnReference;
import org.hibernate.sql.ast.tree.expression.AggregateColumnWriteExpression;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.expression.FunctionExpression;
import org.hibernate.sql.ast.tree.expression.Literal;
import org.hibernate.sql.ast.tree.expression.Over;
import org.hibernate.sql.ast.tree.expression.SqlTuple;
import org.hibernate.sql.ast.tree.expression.SqlTupleContainer;
import org.hibernate.sql.ast.tree.expression.Summarization;
import org.hibernate.sql.ast.tree.from.FunctionTableReference;
import org.hibernate.sql.ast.tree.from.NamedTableReference;
import org.hibernate.sql.ast.tree.from.QueryPartTableReference;
import org.hibernate.sql.ast.tree.from.UnionTableGroup;
import org.hibernate.sql.ast.tree.from.ValuesTableReference;
@ -38,6 +43,7 @@ import org.hibernate.sql.ast.tree.select.QueryPart;
import org.hibernate.sql.ast.tree.select.QuerySpec;
import org.hibernate.sql.ast.tree.select.SelectClause;
import org.hibernate.sql.ast.tree.select.SortSpecification;
import org.hibernate.sql.ast.tree.update.Assignment;
import org.hibernate.sql.exec.spi.JdbcOperation;
import org.hibernate.type.SqlTypes;
@ -484,4 +490,49 @@ public class OracleSqlAstTranslator<T extends JdbcOperation> extends AbstractSql
return getDialect().supportsFetchClause( FetchClauseType.ROWS_ONLY );
}
@Override
protected boolean renderNamedTableReference(NamedTableReference tableReference, LockMode lockMode) {
appendSql( tableReference.getTableExpression() );
registerAffectedTable( tableReference );
renderTableReferenceIdentificationVariable( tableReference );
return false;
}
@Override
protected void visitSetAssignment(Assignment assignment) {
final List<ColumnReference> columnReferences = assignment.getAssignable().getColumnReferences();
if ( columnReferences.size() == 1 ) {
columnReferences.get( 0 ).appendColumnForWrite( this );
appendSql( '=' );
final Expression assignedValue = assignment.getAssignedValue();
final SqlTuple sqlTuple = SqlTupleContainer.getSqlTuple( assignedValue );
if ( sqlTuple != null ) {
assert sqlTuple.getExpressions().size() == 1;
sqlTuple.getExpressions().get( 0 ).accept( this );
}
else {
assignedValue.accept( this );
}
}
else {
char separator = OPEN_PARENTHESIS;
for ( ColumnReference columnReference : columnReferences ) {
appendSql( separator );
columnReference.appendColumnForWrite( this );
separator = COMA_SEPARATOR_CHAR;
}
appendSql( ")=" );
assignment.getAssignedValue().accept( this );
}
}
@Override
public void visitColumnReference(ColumnReference columnReference) {
columnReference.appendReadExpression( this );
}
@Override
public void visitAggregateColumnWriteExpression(AggregateColumnWriteExpression aggregateColumnWriteExpression) {
aggregateColumnWriteExpression.appendWriteExpression( this, this );
}
}

View File

@ -0,0 +1,363 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.dialect;
import java.lang.reflect.Method;
import java.sql.CallableStatement;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Struct;
import java.util.Locale;
import org.hibernate.HibernateException;
import org.hibernate.metamodel.mapping.AttributeMapping;
import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.metamodel.mapping.MappingType;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.ValueBinder;
import org.hibernate.type.descriptor.ValueExtractor;
import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.java.spi.UnknownBasicJavaType;
import org.hibernate.type.descriptor.jdbc.BasicBinder;
import org.hibernate.type.descriptor.jdbc.BasicExtractor;
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
import org.hibernate.type.spi.TypeConfiguration;
/**
* @author Christian Beikov
*/
public class OracleStructJdbcType implements AggregateJdbcType {
public static final AggregateJdbcType INSTANCE = new OracleStructJdbcType();
private static final ClassValue<Method> RAW_JDBC_TRANSFORMER = new ClassValue<>() {
@Override
protected Method computeValue(Class<?> type) {
if ( "oracle.sql.TIMESTAMPTZ".equals( type.getName() ) ) {
try {
return type.getMethod( "offsetDateTimeValue", Connection.class );
}
catch (NoSuchMethodException e) {
throw new RuntimeException( e );
}
}
return null;
}
};
private final String oracleTypeName;
private final EmbeddableMappingType embeddableMappingType;
private final ValueExtractor<Object[]> objectArrayExtractor;
private OracleStructJdbcType() {
// The default instance is for reading only and will return an Object[]
this( null, null );
}
public OracleStructJdbcType(EmbeddableMappingType embeddableMappingType, String typeName) {
this.oracleTypeName = typeName == null ? null : typeName.toUpperCase( Locale.ROOT );
this.embeddableMappingType = embeddableMappingType;
// We cache the extractor for Object[] here
// since that is used in AggregateEmbeddableFetchImpl and AggregateEmbeddableResultImpl
this.objectArrayExtractor = createBasicExtractor( new UnknownBasicJavaType<>( Object[].class ) );
}
@Override
public int getJdbcTypeCode() {
return SqlTypes.STRUCT;
}
@Override
public AggregateJdbcType resolveAggregateJdbcType(EmbeddableMappingType mappingType, String sqlType) {
return new OracleStructJdbcType( mappingType, sqlType );
}
@Override
public EmbeddableMappingType getEmbeddableMappingType() {
return embeddableMappingType;
}
@Override
public <T> JavaType<T> getJdbcRecommendedJavaTypeMapping(
Integer precision,
Integer scale,
TypeConfiguration typeConfiguration) {
if ( embeddableMappingType == null ) {
return typeConfiguration.getJavaTypeRegistry().getDescriptor( Object[].class );
}
else {
//noinspection unchecked
return (JavaType<T>) embeddableMappingType.getMappedJavaType();
}
}
@Override
public void registerOutParameter(CallableStatement callableStatement, String name) throws SQLException {
callableStatement.registerOutParameter( name, getJdbcTypeCode(), oracleTypeName );
}
@Override
public void registerOutParameter(CallableStatement callableStatement, int index) throws SQLException {
callableStatement.registerOutParameter( index, getJdbcTypeCode(), oracleTypeName );
}
@Override
public Object createJdbcValue(Object domainValue, WrapperOptions options) throws SQLException {
final Object[] jdbcValues = StructHelper.getJdbcValues(
embeddableMappingType,
embeddableMappingType.getValues( domainValue ),
options
);
return options.getSession()
.getJdbcCoordinator()
.getLogicalConnection()
.getPhysicalConnection()
.createStruct( oracleTypeName, jdbcValues );
}
@Override
public Object[] extractJdbcValues(Object rawJdbcValue, WrapperOptions options) throws SQLException {
final Object[] attributes = ( (Struct) rawJdbcValue ).getAttributes();
wrapRawJdbcValues( embeddableMappingType, attributes, 0, options );
return attributes;
}
@Override
public <X> ValueBinder<X> getBinder(JavaType<X> javaType) {
return new BasicBinder<>( javaType, this ) {
@Override
protected void doBind(PreparedStatement st, X value, int index, WrapperOptions options)
throws SQLException {
st.setObject( index, createJdbcValue( value, options ) );
}
@Override
protected void doBind(CallableStatement st, X value, String name, WrapperOptions options)
throws SQLException {
st.setObject( name, createJdbcValue( value, options ) );
}
};
}
@Override
public <X> ValueExtractor<X> getExtractor(JavaType<X> javaType) {
if ( javaType.getJavaTypeClass() == Object[].class ) {
//noinspection unchecked
return (ValueExtractor<X>) objectArrayExtractor;
}
return createBasicExtractor( javaType );
}
private <X> BasicExtractor<X> createBasicExtractor(JavaType<X> javaType) {
return new BasicExtractor<>( javaType, this ) {
@Override
protected X doExtract(ResultSet rs, int paramIndex, WrapperOptions options) throws SQLException {
return getValue( rs.getObject( paramIndex ), options );
}
@Override
protected X doExtract(CallableStatement statement, int index, WrapperOptions options) throws SQLException {
return getValue( statement.getObject( index ), options );
}
@Override
protected X doExtract(CallableStatement statement, String name, WrapperOptions options)
throws SQLException {
return getValue( statement.getObject( name ), options );
}
private X getValue(Object object, WrapperOptions options) throws SQLException {
if ( object == null ) {
return null;
}
final Struct struct = (Struct) object;
final Object[] values = struct.getAttributes();
final boolean jdbcRepresentation = getJavaType().getJavaTypeClass() == Object[].class;
if ( jdbcRepresentation ) {
wrapRawJdbcValues( embeddableMappingType, values, 0, options );
//noinspection unchecked
return (X) values;
}
assert embeddableMappingType != null && embeddableMappingType.getJavaType() == getJavaType();
final Object[] attributeValues = getAttributeValues(
embeddableMappingType,
values,
options
);
//noinspection unchecked
return (X) embeddableMappingType.getRepresentationStrategy().getInstantiator().instantiate(
() -> attributeValues,
options.getSessionFactory()
);
}
};
}
public static Object[] getAttributeValues(
EmbeddableMappingType embeddableMappingType,
Object[] rawJdbcValues,
WrapperOptions options) throws SQLException {
final int numberOfAttributeMappings = embeddableMappingType.getNumberOfAttributeMappings();
final Object[] attributeValues;
if ( numberOfAttributeMappings != rawJdbcValues.length ) {
attributeValues = new Object[numberOfAttributeMappings];
}
else {
attributeValues = rawJdbcValues;
}
int jdbcIndex = 0;
for ( int i = 0; i < numberOfAttributeMappings; i++ ) {
final AttributeMapping attributeMapping = embeddableMappingType.getAttributeMapping( i );
jdbcIndex += injectAttributeValue( attributeMapping, attributeValues, i, rawJdbcValues, jdbcIndex, options );
}
return attributeValues;
}
private static int injectAttributeValue(
AttributeMapping attributeMapping,
Object[] attributeValues,
int attributeIndex,
Object[] rawJdbcValues,
int jdbcIndex,
WrapperOptions options) throws SQLException {
final MappingType mappedType = attributeMapping.getMappedType();
final int jdbcValueCount;
final Object rawJdbcValue = rawJdbcValues[jdbcIndex];
if ( mappedType instanceof EmbeddableMappingType ) {
final EmbeddableMappingType embeddableMappingType = (EmbeddableMappingType) mappedType;
if ( embeddableMappingType.getAggregateMapping() != null ) {
jdbcValueCount = 1;
if ( rawJdbcValue instanceof Struct ) {
final Object[] subValues = getAttributeValues(
embeddableMappingType,
( (Struct) rawJdbcValue ).getAttributes(),
options
);
attributeValues[attributeIndex] = embeddableMappingType.getRepresentationStrategy()
.getInstantiator()
.instantiate(
() -> subValues,
embeddableMappingType.findContainingEntityMapping()
.getEntityPersister()
.getFactory()
);
}
else {
attributeValues[attributeIndex] = rawJdbcValue;
}
}
else {
jdbcValueCount = embeddableMappingType.getJdbcValueCount();
final Object[] jdbcValues = new Object[jdbcValueCount];
System.arraycopy( rawJdbcValues, jdbcIndex, jdbcValues, 0, jdbcValues.length );
final Object[] subValues = getAttributeValues( embeddableMappingType, jdbcValues, options );
attributeValues[attributeIndex] = embeddableMappingType.getRepresentationStrategy()
.getInstantiator()
.instantiate(
() -> subValues,
embeddableMappingType.findContainingEntityMapping()
.getEntityPersister()
.getFactory()
);
}
}
else {
assert attributeMapping.getJdbcTypeCount() == 1;
jdbcValueCount = 1;
final JdbcMapping jdbcMapping = attributeMapping.getJdbcMappings().get( 0 );
final Object jdbcValue;
if ( rawJdbcValue == null ) {
jdbcValue = null;
}
else {
switch ( jdbcMapping.getJdbcType().getDefaultSqlTypeCode() ) {
case SqlTypes.TIMESTAMP_WITH_TIMEZONE:
case SqlTypes.TIMESTAMP_UTC:
// Only transform the raw jdbc value if it could be a TIMESTAMPTZ
jdbcValue = jdbcMapping.getJdbcJavaType()
.wrap( transformRawJdbcValue( rawJdbcValue, options ), options );
break;
default:
jdbcValue = jdbcMapping.getJdbcJavaType().wrap( rawJdbcValue, options );
break;
}
}
attributeValues[attributeIndex] = jdbcMapping.convertToDomainValue( jdbcValue );
}
return jdbcValueCount;
}
private static int wrapRawJdbcValues(
EmbeddableMappingType embeddableMappingType,
Object[] jdbcValues,
int jdbcIndex,
WrapperOptions options) throws SQLException {
final int numberOfAttributeMappings = embeddableMappingType.getNumberOfAttributeMappings();
for ( int i = 0; i < numberOfAttributeMappings; i++ ) {
final AttributeMapping attributeMapping = embeddableMappingType.getAttributeMapping( i );
final MappingType mappedType = attributeMapping.getMappedType();
if ( mappedType instanceof EmbeddableMappingType ) {
final EmbeddableMappingType embeddableType = (EmbeddableMappingType) mappedType;
if ( embeddableType.getAggregateMapping() != null ) {
final AggregateJdbcType aggregateJdbcType = (AggregateJdbcType) embeddableType.getAggregateMapping()
.getJdbcMapping()
.getJdbcType();
jdbcValues[jdbcIndex] = aggregateJdbcType.extractJdbcValues( jdbcValues[jdbcIndex], options );
jdbcIndex++;
}
else {
jdbcIndex = wrapRawJdbcValues( embeddableType, jdbcValues, jdbcIndex, options );
}
}
else {
assert attributeMapping.getJdbcTypeCount() == 1;
final Object rawJdbcValue = jdbcValues[jdbcIndex];
if ( rawJdbcValue != null ) {
final JdbcMapping jdbcMapping = attributeMapping.getJdbcMappings().get( 0 );
switch ( jdbcMapping.getJdbcType().getDefaultSqlTypeCode() ) {
case SqlTypes.TIMESTAMP_WITH_TIMEZONE:
case SqlTypes.TIMESTAMP_UTC:
// Only transform the raw jdbc value if it could be a TIMESTAMPTZ
jdbcValues[jdbcIndex] = jdbcMapping.getJdbcJavaType()
.wrap( transformRawJdbcValue( rawJdbcValue, options ), options );
break;
default:
jdbcValues[jdbcIndex] = jdbcMapping.getJdbcJavaType().wrap( rawJdbcValue, options );
break;
}
}
jdbcIndex++;
}
}
return jdbcIndex;
}
private static Object transformRawJdbcValue(Object rawJdbcValue, WrapperOptions options) {
Method rawJdbcTransformer = RAW_JDBC_TRANSFORMER.get( rawJdbcValue.getClass() );
if ( rawJdbcTransformer == null ) {
return rawJdbcValue;
}
try {
return rawJdbcTransformer.invoke(
rawJdbcValue,
options.getSession()
.getJdbcCoordinator()
.getLogicalConnection()
.getPhysicalConnection()
);
}
catch (Exception e) {
throw new HibernateException( "Could not transform the raw jdbc value", e );
}
}
}

View File

@ -9,8 +9,6 @@ package org.hibernate.dialect;
import org.hibernate.HibernateException;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.ReflectHelper;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.JsonJdbcType;
import org.jboss.logging.Logger;
@ -29,10 +27,8 @@ public class OracleTypesHelper {
private static final String ORACLE_TYPES_CLASS_NAME = "oracle.jdbc.OracleTypes";
private static final String DEPRECATED_ORACLE_TYPES_CLASS_NAME = "oracle.jdbc.driver.OracleTypes";
private static final String ORACLE_JSON_JDBC_TYPE_CLASS_NAME = "org.hibernate.dialect.OracleJsonJdbcType";
private final int oracleCursorTypeSqlType;
private final JdbcType jsonJdbcType;
private OracleTypesHelper() {
int typeCode = -99;
@ -43,17 +39,6 @@ public class OracleTypesHelper {
log.warn( "Unable to resolve Oracle CURSOR JDBC type code: the class OracleTypesHelper was initialized but the Oracle JDBC driver could not be loaded." );
}
oracleCursorTypeSqlType = typeCode;
JdbcType jsonJdbcType = JsonJdbcType.INSTANCE;
try {
jsonJdbcType = (JdbcType) ReflectHelper.classForName( ORACLE_JSON_JDBC_TYPE_CLASS_NAME )
.getField( "INSTANCE" )
.get( null );
}
catch (Exception e) {
log.warn( "Unable to resolve OracleJsonJdbcType: the class OracleTypesHelper was initialized but the Oracle JDBC driver could not be loaded." );
}
this.jsonJdbcType = jsonJdbcType;
}
private int extractOracleCursorTypeValue() {
@ -90,10 +75,6 @@ public class OracleTypesHelper {
return oracleCursorTypeSqlType;
}
public JdbcType getJsonJdbcType() {
return jsonJdbcType;
}
// initial code as copied from Oracle8iDialect
//
// private int oracleCursorTypeSqlType = INIT_ORACLETYPES_CURSOR_VALUE;

View File

@ -11,9 +11,11 @@ import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Types;
import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.type.descriptor.ValueBinder;
import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
import org.hibernate.type.descriptor.jdbc.XmlJdbcType;
/**
@ -21,7 +23,16 @@ import org.hibernate.type.descriptor.jdbc.XmlJdbcType;
*/
public class OracleXmlJdbcType extends XmlJdbcType {
public static final OracleXmlJdbcType INSTANCE = new OracleXmlJdbcType();
public static final OracleXmlJdbcType INSTANCE = new OracleXmlJdbcType( null );
private OracleXmlJdbcType(EmbeddableMappingType embeddableMappingType) {
super( embeddableMappingType );
}
@Override
public AggregateJdbcType resolveAggregateJdbcType(EmbeddableMappingType mappingType, String sqlType) {
return new OracleXmlJdbcType( mappingType );
}
@Override
public <X> ValueBinder<X> getBinder(JavaType<X> javaType) {

View File

@ -24,6 +24,8 @@ import org.hibernate.LockOptions;
import org.hibernate.PessimisticLockException;
import org.hibernate.QueryTimeoutException;
import org.hibernate.boot.model.TypeContributions;
import org.hibernate.dialect.aggregate.AggregateSupport;
import org.hibernate.dialect.aggregate.PostgreSQLAggregateSupport;
import org.hibernate.dialect.function.CommonFunctionFactory;
import org.hibernate.dialect.function.PostgreSQLMinMaxFunction;
import org.hibernate.dialect.identity.IdentityColumnSupport;
@ -67,6 +69,7 @@ import org.hibernate.sql.ast.tree.Statement;
import org.hibernate.sql.exec.spi.JdbcOperation;
import org.hibernate.type.JavaObjectType;
import org.hibernate.type.descriptor.java.PrimitiveByteArrayJavaType;
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
import org.hibernate.type.descriptor.jdbc.ArrayJdbcType;
import org.hibernate.type.descriptor.jdbc.BlobJdbcType;
import org.hibernate.type.descriptor.jdbc.ClobJdbcType;
@ -108,6 +111,7 @@ import static org.hibernate.type.SqlTypes.NCLOB;
import static org.hibernate.type.SqlTypes.NVARCHAR;
import static org.hibernate.type.SqlTypes.OTHER;
import static org.hibernate.type.SqlTypes.SQLXML;
import static org.hibernate.type.SqlTypes.STRUCT;
import static org.hibernate.type.SqlTypes.TIMESTAMP;
import static org.hibernate.type.SqlTypes.TIMESTAMP_UTC;
import static org.hibernate.type.SqlTypes.TIMESTAMP_WITH_TIMEZONE;
@ -321,6 +325,12 @@ public class PostgreSQLDialect extends Dialect {
}
}
return jdbcType;
case STRUCT:
final AggregateJdbcType aggregateDescriptor = jdbcTypeRegistry.findAggregateDescriptor( columnTypeName );
if ( aggregateDescriptor != null ) {
return aggregateDescriptor;
}
break;
}
return jdbcTypeRegistry.getDescriptor( jdbcTypeCode );
}
@ -645,6 +655,11 @@ public class PostgreSQLDialect extends Dialect {
return true;
}
@Override
public boolean supportsIfExistsBeforeTypeName() {
return true;
}
@Override
public boolean supportsIfExistsBeforeConstraintName() {
return true;
@ -1016,6 +1031,11 @@ public class PostgreSQLDialect extends Dialect {
}
}
@Override
public AggregateSupport getAggregateSupport() {
return PostgreSQLAggregateSupport.valueOf( this );
}
@Override
public void appendBinaryLiteral(SqlAppender appender, byte[] bytes) {
appender.appendSql( "bytea '\\x" );
@ -1260,6 +1280,7 @@ public class PostgreSQLDialect extends Dialect {
if ( PostgreSQLPGObjectJdbcType.isUsable() ) {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLInetJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLIntervalSecondJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLStructJdbcType.INSTANCE );
}
// HHH-9562

View File

@ -29,6 +29,7 @@ import org.hibernate.type.descriptor.jdbc.BasicExtractor;
import org.hibernate.type.descriptor.jdbc.JdbcLiteralFormatter;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.JdbcTypeIndicators;
import org.hibernate.type.spi.TypeConfiguration;
/**
* @author Christian Beikov
@ -96,11 +97,16 @@ public class PostgreSQLIntervalSecondJdbcType implements AdjustableJdbcType {
return SqlTypes.INTERVAL_SECOND;
}
@Override
public Class<?> getPreferredJavaTypeClass(WrapperOptions options) {
return Duration.class;
}
@Override
public JdbcType resolveIndicatedType(JdbcTypeIndicators indicators, JavaType<?> domainJtd) {
// The default scale is 9
if ( indicators.getColumnScale() == JdbcTypeIndicators.NO_COLUMN_SCALE || indicators.getColumnScale() > 6 ) {
return indicators.getJdbcType( SqlTypes.NUMERIC );
return indicators.getJdbcType( indicators.resolveJdbcTypeCode( SqlTypes.NUMERIC ) );
}
return this;
}

View File

@ -6,36 +6,23 @@
*/
package org.hibernate.dialect;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
/**
* @author Christian Beikov
*/
public class PostgreSQLJsonJdbcType extends PostgreSQLPGObjectJdbcType {
public class PostgreSQLJsonJdbcType extends AbstractPostgreSQLJsonJdbcType {
public static final PostgreSQLJsonJdbcType INSTANCE = new PostgreSQLJsonJdbcType();
public static final PostgreSQLJsonJdbcType INSTANCE = new PostgreSQLJsonJdbcType( null );
public PostgreSQLJsonJdbcType() {
super( "json", SqlTypes.JSON );
private PostgreSQLJsonJdbcType(EmbeddableMappingType embeddableMappingType) {
super( embeddableMappingType, "json" );
}
@Override
protected <X> X fromString(String string, JavaType<X> javaType, WrapperOptions options) {
return options.getSessionFactory().getFastSessionServices().getJsonFormatMapper().fromString(
string,
javaType,
options
);
public AggregateJdbcType resolveAggregateJdbcType(EmbeddableMappingType mappingType, String sqlType) {
return new PostgreSQLJsonJdbcType( mappingType );
}
@Override
protected <X> String toString(X value, JavaType<X> javaType, WrapperOptions options) {
return options.getSessionFactory().getFastSessionServices().getJsonFormatMapper().toString(
value,
javaType,
options
);
}
}

View File

@ -6,36 +6,22 @@
*/
package org.hibernate.dialect;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
/**
* @author Christian Beikov
*/
public class PostgreSQLJsonbJdbcType extends PostgreSQLPGObjectJdbcType {
public class PostgreSQLJsonbJdbcType extends AbstractPostgreSQLJsonJdbcType {
public static final PostgreSQLJsonbJdbcType INSTANCE = new PostgreSQLJsonbJdbcType();
public static final PostgreSQLJsonbJdbcType INSTANCE = new PostgreSQLJsonbJdbcType( null );
public PostgreSQLJsonbJdbcType() {
super( "jsonb", SqlTypes.JSON );
public PostgreSQLJsonbJdbcType(EmbeddableMappingType embeddableMappingType) {
super( embeddableMappingType, "jsonb" );
}
@Override
protected <X> X fromString(String string, JavaType<X> javaType, WrapperOptions options) {
return options.getSessionFactory().getFastSessionServices().getJsonFormatMapper().fromString(
string,
javaType,
options
);
}
@Override
protected <X> String toString(X value, JavaType<X> javaType, WrapperOptions options) {
return options.getSessionFactory().getFastSessionServices().getJsonFormatMapper().toString(
value,
javaType,
options
);
public AggregateJdbcType resolveAggregateJdbcType(EmbeddableMappingType mappingType, String sqlType) {
return new PostgreSQLJsonbJdbcType( mappingType );
}
}

View File

@ -81,7 +81,11 @@ public abstract class PostgreSQLPGObjectJdbcType implements JdbcType {
return sqlTypeCode;
}
protected <X> X fromString(String string, JavaType<X> javaType, WrapperOptions options) {
public String getTypeName() {
return typeName;
}
protected <X> X fromString(String string, JavaType<X> javaType, WrapperOptions options) throws SQLException {
return javaType.wrap( string, options );
}
@ -143,26 +147,26 @@ public abstract class PostgreSQLPGObjectJdbcType implements JdbcType {
return new BasicExtractor<>( javaType, this ) {
@Override
protected X doExtract(ResultSet rs, int paramIndex, WrapperOptions options) throws SQLException {
return getObject( rs.getString( paramIndex ), options );
return getObject( rs.getObject( paramIndex ), options );
}
@Override
protected X doExtract(CallableStatement statement, int index, WrapperOptions options) throws SQLException {
return getObject( statement.getString( index ), options );
return getObject( statement.getObject( index ), options );
}
@Override
protected X doExtract(CallableStatement statement, String name, WrapperOptions options)
throws SQLException {
return getObject( statement.getString( name ), options );
return getObject( statement.getObject( name ), options );
}
private X getObject(String string, WrapperOptions options) throws SQLException {
if ( string == null ) {
private X getObject(Object object, WrapperOptions options) throws SQLException {
if ( object == null ) {
return null;
}
return ( (PostgreSQLPGObjectJdbcType) getJdbcType() ).fromString(
string,
object.toString(),
getJavaType(),
options
);

View File

@ -6,6 +6,8 @@
*/
package org.hibernate.dialect;
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
import org.hibernate.query.sqm.FetchClauseType;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.query.sqm.FetchClauseType;
import org.hibernate.sql.ast.spi.AbstractSqlAstTranslator;
@ -18,6 +20,7 @@ import org.hibernate.sql.ast.tree.expression.Literal;
import org.hibernate.sql.ast.tree.expression.Summarization;
import org.hibernate.sql.ast.tree.predicate.BooleanExpressionPredicate;
import org.hibernate.sql.ast.tree.predicate.LikePredicate;
import org.hibernate.sql.ast.tree.predicate.NullnessPredicate;
import org.hibernate.sql.ast.tree.select.QueryGroup;
import org.hibernate.sql.ast.tree.select.QueryPart;
import org.hibernate.sql.ast.tree.select.QuerySpec;
@ -59,6 +62,26 @@ public class PostgreSQLSqlAstTranslator<T extends JdbcOperation> extends Abstrac
}
}
@Override
public void visitNullnessPredicate(NullnessPredicate nullnessPredicate) {
final Expression expression = nullnessPredicate.getExpression();
final JdbcMappingContainer expressionType = expression.getExpressionType();
if ( isStruct( expressionType ) ) {
// Surprise, the null predicate checks if all components of the struct are null or not,
// rather than the column itself, so we have to use the distinct from predicate to implement this instead
expression.accept( this );
if ( nullnessPredicate.isNegated() ) {
appendSql( " is distinct from null" );
}
else {
appendSql( " is not distinct from null" );
}
}
else {
super.visitNullnessPredicate( nullnessPredicate );
}
}
@Override
protected void renderMaterializationHint(CteMaterialization materialization) {
if ( getDialect().getVersion().isSameOrAfter( 12 ) ) {

View File

@ -0,0 +1,900 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.dialect;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.OffsetDateTime;
import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeFormatterBuilder;
import java.time.temporal.ChronoField;
import java.time.temporal.TemporalAccessor;
import java.util.ArrayList;
import java.util.TimeZone;
import org.hibernate.internal.util.CharSequenceHelper;
import org.hibernate.metamodel.mapping.AttributeMapping;
import org.hibernate.metamodel.mapping.BasicValuedMapping;
import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.metamodel.mapping.SelectableMapping;
import org.hibernate.metamodel.mapping.internal.EmbeddedAttributeMapping;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.sql.ast.spi.StringBuilderSqlAppender;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.ValueExtractor;
import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.java.PrimitiveByteArrayJavaType;
import org.hibernate.type.descriptor.java.spi.UnknownBasicJavaType;
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
import org.hibernate.type.spi.TypeConfiguration;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsDate;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsLocalTime;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsTime;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsTimestampWithMicros;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsTimestampWithMillis;
/**
* Implementation for serializing/deserializing an embeddable aggregate to/from the PostgreSQL component format.
* For regular queries, we select the individual struct elements because the PostgreSQL component format encoding
* is probably not very efficient.
*
* @author Christian Beikov
*/
public class PostgreSQLStructJdbcType extends PostgreSQLPGObjectJdbcType implements AggregateJdbcType {
public static final PostgreSQLStructJdbcType INSTANCE = new PostgreSQLStructJdbcType();
private static final DateTimeFormatter LOCAL_DATE_TIME;
static {
LOCAL_DATE_TIME = new DateTimeFormatterBuilder()
.parseCaseInsensitive()
.append(DateTimeFormatter.ISO_LOCAL_DATE)
.appendLiteral(' ')
.append(DateTimeFormatter.ISO_LOCAL_TIME)
.optionalStart()
.appendOffset( "+HH:mm", "+00" )
.toFormatter();
}
// Need a custom formatter for parsing what PostgresPlus/EDB produces
private static final DateTimeFormatter LOCAL_DATE;
static {
LOCAL_DATE = new DateTimeFormatterBuilder()
.parseCaseInsensitive()
.append(DateTimeFormatter.ISO_LOCAL_DATE)
.optionalStart()
.appendLiteral(' ')
.append(DateTimeFormatter.ISO_LOCAL_TIME)
.optionalStart()
.appendOffset( "+HH:mm", "+00" )
.toFormatter();
}
private final EmbeddableMappingType embeddableMappingType;
private final ValueExtractor<Object[]> objectArrayExtractor;
private PostgreSQLStructJdbcType() {
// The default instance is for reading only and will return an Object[]
this( null, null );
}
public PostgreSQLStructJdbcType(EmbeddableMappingType embeddableMappingType, String typeName) {
super( typeName, SqlTypes.STRUCT );
this.embeddableMappingType = embeddableMappingType;
// We cache the extractor for Object[] here
// since that is used in AggregateEmbeddableFetchImpl and AggregateEmbeddableResultImpl
this.objectArrayExtractor = super.getExtractor( new UnknownBasicJavaType<>( Object[].class ) );
}
@Override
public int getJdbcTypeCode() {
return SqlTypes.STRUCT;
}
@Override
public AggregateJdbcType resolveAggregateJdbcType(EmbeddableMappingType mappingType, String sqlType) {
return new PostgreSQLStructJdbcType( mappingType, sqlType );
}
@Override
public EmbeddableMappingType getEmbeddableMappingType() {
return embeddableMappingType;
}
@Override
public <T> JavaType<T> getJdbcRecommendedJavaTypeMapping(
Integer precision,
Integer scale,
TypeConfiguration typeConfiguration) {
if ( embeddableMappingType == null ) {
return typeConfiguration.getJavaTypeRegistry().getDescriptor( Object[].class );
}
else {
//noinspection unchecked
return (JavaType<T>) embeddableMappingType.getMappedJavaType();
}
}
@Override
public <X> ValueExtractor<X> getExtractor(JavaType<X> javaType) {
if ( javaType.getJavaTypeClass() == Object[].class ) {
//noinspection unchecked
return (ValueExtractor<X>) objectArrayExtractor;
}
return super.getExtractor( javaType );
}
@Override
protected <X> X fromString(String string, JavaType<X> javaType, WrapperOptions options) throws SQLException {
if ( string == null ) {
return null;
}
final boolean returnEmbeddable = javaType.getJavaTypeClass() != Object[].class;
final int end;
final Object[] array;
if ( embeddableMappingType == null ) {
assert !returnEmbeddable;
final ArrayList<Object> values = new ArrayList<>( 8 );
end = deserializeStruct( string, 0, string.length() - 1, values );
array = values.toArray();
}
else {
array = new Object[embeddableMappingType.getJdbcValueCount()];
end = deserializeStruct( string, 0, 0, array, returnEmbeddable, options );
}
assert end == string.length();
if ( returnEmbeddable ) {
final Object[] attributeValues = StructHelper.getAttributeValues( embeddableMappingType, array, options );
//noinspection unchecked
return (X) embeddableMappingType.getRepresentationStrategy().getInstantiator().instantiate(
() -> attributeValues,
options.getSessionFactory()
);
}
//noinspection unchecked
return (X) array;
}
private int deserializeStruct(
String string,
int begin,
int end,
ArrayList<Object> values) {
int column = 0;
boolean inQuote = false;
boolean hasEscape = false;
assert string.charAt( begin ) == '(';
int start = begin + 1;
int element = 1;
for ( int i = start; i < string.length(); i++ ) {
final char c = string.charAt( i );
switch ( c ) {
case '"':
if ( inQuote ) {
if ( i + 1 != end && string.charAt( i + 1 ) == '"' ) {
// Skip double quotes as that will be unescaped later
i++;
hasEscape = true;
continue;
}
if ( hasEscape ) {
values.add( unescape( string, start, i ) );
}
else {
values.add( string.substring( start, i ) );
}
column++;
inQuote = false;
}
else {
inQuote = true;
}
hasEscape = false;
start = i + 1;
break;
case ',':
if ( !inQuote ) {
if ( column < element ) {
if ( start == i ) {
values.add( null );
}
else {
values.add( string.substring( start, i ) );
}
column++;
}
start = i + 1;
element++;
}
break;
case ')':
if ( !inQuote ) {
if ( column < element ) {
if ( start == i ) {
values.add( null );
}
else {
values.add( string.substring( start, i ) );
}
}
return i + 1;
}
break;
}
}
throw new IllegalArgumentException( "Struct not properly formed: " + string.subSequence( start, end ) );
}
private int deserializeStruct(
String string,
int begin,
int quoteLevel,
Object[] values,
boolean returnEmbeddable,
WrapperOptions options) throws SQLException {
int column = 0;
boolean inQuote = false;
StringBuilder escapingSb = null;
assert string.charAt( begin ) == '(';
int start = begin + 1;
for ( int i = start; i < string.length(); i++ ) {
final char c = string.charAt( i );
switch ( c ) {
case '"':
if ( inQuote ) {
if ( repeatsChar( string, i, 1 << ( quoteLevel + 1 ), '"' ) ) {
// Skip quote escaping as that will be unescaped later
if ( escapingSb == null ) {
escapingSb = new StringBuilder();
}
escapingSb.append( string, start, i );
escapingSb.append( '"' );
// Move forward to the last quote
i += ( 1 << ( quoteLevel + 1 ) ) - 1;
start = i + 1;
continue;
}
assert repeatsChar( string, i, 1 << quoteLevel, '"' );
final JdbcMapping jdbcMapping = embeddableMappingType.getJdbcValueSelectable( column )
.getJdbcMapping();
switch ( jdbcMapping.getJdbcType().getDefaultSqlTypeCode() ) {
case SqlTypes.DATE:
values[column] = fromRawObject(
jdbcMapping,
parseDate(
CharSequenceHelper.subSequence(
string,
start,
i
)
),
options
);
break;
case SqlTypes.TIME:
case SqlTypes.TIME_WITH_TIMEZONE:
values[column] = fromRawObject(
jdbcMapping,
parseTime(
CharSequenceHelper.subSequence(
string,
start,
i
)
),
options
);
break;
case SqlTypes.TIMESTAMP:
values[column] = fromRawObject(
jdbcMapping,
parseTimestamp(
CharSequenceHelper.subSequence(
string,
start,
i
),
jdbcMapping.getJdbcJavaType()
),
options
);
break;
case SqlTypes.TIMESTAMP_WITH_TIMEZONE:
case SqlTypes.TIMESTAMP_UTC:
values[column] = fromRawObject(
jdbcMapping,
parseTimestampWithTimeZone(
CharSequenceHelper.subSequence(
string,
start,
i
),
jdbcMapping.getJdbcJavaType()
),
options
);
break;
case SqlTypes.BINARY:
case SqlTypes.VARBINARY:
case SqlTypes.LONGVARBINARY:
case SqlTypes.LONG32VARBINARY:
final int backslashes = 1 << ( quoteLevel + 1 );
assert repeatsChar( string, start, backslashes, '\\' );
final int xCharPosition = start + backslashes;
assert string.charAt( xCharPosition ) == 'x';
values[column] = fromString(
jdbcMapping,
string,
xCharPosition + 1,
i
);
break;
default:
if ( escapingSb == null || escapingSb.length() == 0 ) {
values[column] = fromString(
jdbcMapping,
string,
start,
i
);
}
else {
escapingSb.append( string, start, i );
values[column] = fromString(
jdbcMapping,
escapingSb,
0,
escapingSb.length()
);
escapingSb.setLength( 0 );
}
break;
}
column++;
inQuote = false;
// move forward the index by 2 ^ quoteLevel to point to the next char after the quote
i += 1 << quoteLevel;
if ( string.charAt( i ) == ')' ) {
// Return the end position if this is the last element
assert column == values.length;
return i + 1;
}
// at this point, we must see a comma to indicate the next element
assert string.charAt( i ) == ',';
}
else {
// This is a start quote, so move forward the index to the last quote
final int expectedQuotes = Math.max( 1, 1 << quoteLevel );
assert repeatsChar( string, i, expectedQuotes, '"' );
i += expectedQuotes - 1;
if ( string.charAt( i + 1 ) == '(' ) {
// This could be a nested struct
final JdbcMapping jdbcMapping = embeddableMappingType.getJdbcValueSelectable( column )
.getJdbcMapping();
if ( jdbcMapping.getJdbcType() instanceof PostgreSQLStructJdbcType ) {
final PostgreSQLStructJdbcType structJdbcType;
structJdbcType = (PostgreSQLStructJdbcType) jdbcMapping.getJdbcType();
final Object[] subValues = new Object[structJdbcType.embeddableMappingType.getJdbcValueCount()];
final int subEnd = structJdbcType.deserializeStruct(
string,
i + 1,
quoteLevel + 1,
subValues,
returnEmbeddable,
options
);
if ( returnEmbeddable ) {
final Object[] attributeValues = StructHelper.getAttributeValues(
structJdbcType.embeddableMappingType,
subValues,
options
);
final Object subValue = structJdbcType.embeddableMappingType.getRepresentationStrategy()
.getInstantiator()
.instantiate( () -> attributeValues, options.getSessionFactory() );
values[column] = subValue;
}
else {
values[column] = subValues;
}
column++;
// The subEnd points to the first character after the ')',
// so move forward the index to point to the next char after quotes
assert repeatsChar( string, subEnd, expectedQuotes, '"' );
i = subEnd + expectedQuotes;
if ( string.charAt( i ) == ')' ) {
// Return the end position if this is the last element
assert column == values.length;
return i + 1;
}
// at this point, we must see a comma to indicate the next element
assert string.charAt( i ) == ',';
}
else {
inQuote = true;
}
}
else {
inQuote = true;
}
}
start = i + 1;
break;
case ',':
if ( !inQuote ) {
if ( start == i ) {
values[column] = null;
}
else {
final JdbcMapping jdbcMapping = embeddableMappingType.getJdbcValueSelectable( column ).getJdbcMapping();
if ( jdbcMapping.getJdbcType().getDefaultSqlTypeCode() == SqlTypes.BOOLEAN ) {
values[column] = fromRawObject(
jdbcMapping,
string.charAt( start ) == 't',
options
);
}
else {
values[column] = fromString(
jdbcMapping,
string,
start,
i
);
}
}
column++;
start = i + 1;
}
break;
case ')':
if ( !inQuote ) {
if ( column < values.length ) {
if ( start == i ) {
values[column] = null;
}
else {
values[column] = fromString(
embeddableMappingType,
column,
string,
start,
i
);
}
}
return i + 1;
}
break;
}
}
throw new IllegalArgumentException( "Struct not properly formed: " + string.substring( start ) );
}
private static boolean repeatsChar(String string, int start, int times, char c) {
final int end = start + times;
if ( end < string.length() ) {
for ( ; start < end; start++ ) {
if ( string.charAt( start ) != c ) {
return false;
}
}
return true;
}
return false;
}
private static Object fromString(
EmbeddableMappingType embeddableMappingType,
int selectableIndex,
String string,
int start,
int end) {
return fromString(
embeddableMappingType.getJdbcValueSelectable( selectableIndex ).getJdbcMapping(),
string,
start,
end
);
}
private static Object fromString(JdbcMapping jdbcMapping, CharSequence charSequence, int start, int end) {
return jdbcMapping.getJdbcJavaType().fromEncodedString(
charSequence,
start,
end
);
}
private static Object fromRawObject(JdbcMapping jdbcMapping, Object raw, WrapperOptions options) {
return jdbcMapping.getJdbcJavaType().wrap(
raw,
options
);
}
private Object parseDate(CharSequence subSequence) {
return LOCAL_DATE.parse( subSequence, LocalDate::from );
}
private Object parseTime(CharSequence subSequence) {
return DateTimeFormatter.ISO_LOCAL_TIME.parse( subSequence, LocalTime::from );
}
private Object parseTimestamp(CharSequence subSequence, JavaType<?> jdbcJavaType) {
final TemporalAccessor temporalAccessor = LOCAL_DATE_TIME.parse( subSequence );
final LocalDateTime localDateTime = LocalDateTime.from( temporalAccessor );
final Timestamp timestamp = Timestamp.valueOf( localDateTime );
timestamp.setNanos( temporalAccessor.get( ChronoField.NANO_OF_SECOND ) );
return timestamp;
}
private Object parseTimestampWithTimeZone(CharSequence subSequence, JavaType<?> jdbcJavaType) {
final TemporalAccessor temporalAccessor = LOCAL_DATE_TIME.parse( subSequence );
if ( temporalAccessor.isSupported( ChronoField.OFFSET_SECONDS ) ) {
if ( jdbcJavaType.getJavaTypeClass() == Instant.class ) {
return Instant.from( temporalAccessor );
}
else {
return OffsetDateTime.from( temporalAccessor );
}
}
return LocalDateTime.from( temporalAccessor );
}
private static String unescape(CharSequence string, int start, int end) {
StringBuilder sb = new StringBuilder( end - start );
for ( int i = start; i < end; i++ ) {
final char c = string.charAt( i );
if ( c == '\\' || c == '"' ) {
i++;
sb.append( string.charAt( i ) );
continue;
}
sb.append( c );
}
return sb.toString();
}
@Override
public Object createJdbcValue(Object domainValue, WrapperOptions options) throws SQLException {
assert embeddableMappingType != null;
final StringBuilder sb = new StringBuilder();
serializeStructTo( new PostgreSQLAppender( sb ), domainValue, options );
return sb.toString();
}
@Override
public Object[] extractJdbcValues(Object rawJdbcValue, WrapperOptions options) throws SQLException {
assert embeddableMappingType != null;
final Object[] array = new Object[embeddableMappingType.getJdbcValueCount()];
deserializeStruct( (String) rawJdbcValue, 0, 0, array, true, options );
return array;
}
@Override
protected <X> String toString(X value, JavaType<X> javaType, WrapperOptions options) {
if ( value == null ) {
return null;
}
final StringBuilder sb = new StringBuilder();
serializeStructTo( new PostgreSQLAppender( sb ), value, options );
return sb.toString();
}
private void serializeStructTo(PostgreSQLAppender appender, Object value, WrapperOptions options) {
final Object[] array = embeddableMappingType.getValues( value );
serializeValuesTo( appender, options, embeddableMappingType, array, '(' );
appender.append( ')' );
}
private void serializeValuesTo(
PostgreSQLAppender appender,
WrapperOptions options,
EmbeddableMappingType embeddableMappingType,
Object[] array,
char separator) {
final int end = embeddableMappingType.getNumberOfAttributeMappings();
for ( int i = 0; i < end; i++ ) {
final AttributeMapping attributeMapping = embeddableMappingType.getAttributeMapping( i );
if ( attributeMapping instanceof BasicValuedMapping ) {
appender.append( separator );
separator = ',';
if ( array == null || array[i] == null ) {
continue;
}
final JdbcMapping jdbcMapping = ( (BasicValuedMapping) attributeMapping ).getJdbcMapping();
serializeBasicTo( appender, options, jdbcMapping, array[i] );
}
else if ( attributeMapping instanceof EmbeddedAttributeMapping ) {
final EmbeddableMappingType mappingType = (EmbeddableMappingType) attributeMapping.getMappedType();
final SelectableMapping aggregateMapping = mappingType.getAggregateMapping();
if ( aggregateMapping == null ) {
serializeValuesTo(
appender,
options,
mappingType,
array == null || array[i] == null ? null : mappingType.getValues( array[i] ),
separator
);
separator = ',';
}
else {
appender.append( separator );
separator = ',';
if ( array == null || array[i] == null ) {
continue;
}
appender.quoteStart();
( (PostgreSQLStructJdbcType) aggregateMapping.getJdbcMapping().getJdbcType() ).serializeStructTo(
appender,
array[i],
options
);
appender.quoteEnd();
}
}
else {
throw new UnsupportedOperationException( "Unsupported attribute mapping: " + attributeMapping );
}
}
}
private void serializeBasicTo(
PostgreSQLAppender appender,
WrapperOptions options,
JdbcMapping jdbcMapping,
Object array) {
//noinspection unchecked
final JavaType<Object> jdbcJavaType = (JavaType<Object>) jdbcMapping.getJdbcJavaType();
final Object subValue = jdbcMapping.convertToRelationalValue( array );
switch ( jdbcMapping.getJdbcType().getDefaultSqlTypeCode() ) {
case SqlTypes.TINYINT:
case SqlTypes.SMALLINT:
case SqlTypes.INTEGER:
if ( subValue instanceof Boolean ) {
// BooleanJavaType has this as an implicit conversion
appender.append( (Boolean) subValue ? '1' : '0' );
break;
}
case SqlTypes.BOOLEAN:
case SqlTypes.BIT:
case SqlTypes.BIGINT:
case SqlTypes.FLOAT:
case SqlTypes.REAL:
case SqlTypes.DOUBLE:
case SqlTypes.DECIMAL:
case SqlTypes.NUMERIC:
jdbcJavaType.appendEncodedString(
appender,
jdbcJavaType.unwrap(
subValue,
jdbcJavaType.getJavaTypeClass(),
options
)
);
break;
case SqlTypes.CHAR:
case SqlTypes.NCHAR:
case SqlTypes.VARCHAR:
case SqlTypes.NVARCHAR:
if ( subValue instanceof Boolean ) {
// BooleanJavaType has this as an implicit conversion
appender.append( (Boolean) subValue ? 'Y' : 'N' );
break;
}
case SqlTypes.LONGVARCHAR:
case SqlTypes.LONGNVARCHAR:
case SqlTypes.LONG32VARCHAR:
case SqlTypes.LONG32NVARCHAR:
appender.quoteStart();
jdbcJavaType.appendEncodedString(
appender,
jdbcJavaType.unwrap(
subValue,
jdbcJavaType.getJavaTypeClass(),
options
)
);
appender.quoteEnd();
break;
case SqlTypes.DATE:
case SqlTypes.TIME:
case SqlTypes.TIME_WITH_TIMEZONE:
case SqlTypes.TIMESTAMP:
case SqlTypes.TIMESTAMP_WITH_TIMEZONE:
case SqlTypes.TIMESTAMP_UTC:
appendTemporal( appender, jdbcMapping, subValue, options );
break;
case SqlTypes.BINARY:
case SqlTypes.VARBINARY:
case SqlTypes.LONGVARBINARY:
case SqlTypes.LONG32VARBINARY:
final byte[] bytes = jdbcJavaType.unwrap(
subValue,
byte[].class,
options
);
final int escapes = 1 << appender.quote;
appender.ensureCanFit( escapes + 1 + ( bytes.length << 1 ) );
for ( int i = 0; i < escapes; i++ ) {
appender.append( '\\' );
}
appender.append( 'x' );
PrimitiveByteArrayJavaType.INSTANCE.appendString(
appender,
bytes
);
break;
case SqlTypes.UUID:
appender.append( subValue.toString() );
break;
default:
throw new UnsupportedOperationException( "Unsupported JdbcType nested in struct: " + jdbcMapping.getJdbcType() );
}
}
private void appendTemporal(SqlAppender appender, JdbcMapping jdbcMapping, Object value, WrapperOptions options) {
final TimeZone jdbcTimeZone = getJdbcTimeZone( options );
//noinspection unchecked
final JavaType<Object> javaType = (JavaType<Object>) jdbcMapping.getJdbcJavaType();
appender.append( '"' );
switch ( jdbcMapping.getJdbcType().getJdbcTypeCode() ) {
case SqlTypes.DATE:
if ( value instanceof java.util.Date ) {
appendAsDate( appender, (java.util.Date) value );
}
else if ( value instanceof java.util.Calendar ) {
appendAsDate( appender, (java.util.Calendar) value );
}
else if ( value instanceof TemporalAccessor ) {
appendAsDate( appender, (TemporalAccessor) value );
}
else {
appendAsDate(
appender,
javaType.unwrap( value, java.util.Date.class, options )
);
}
break;
case SqlTypes.TIME:
if ( value instanceof java.util.Date ) {
appendAsTime( appender, (java.util.Date) value, jdbcTimeZone );
}
else if ( value instanceof java.util.Calendar ) {
appendAsTime( appender, (java.util.Calendar) value, jdbcTimeZone );
}
else if ( value instanceof TemporalAccessor ) {
final TemporalAccessor temporalAccessor = (TemporalAccessor) value;
if ( temporalAccessor.isSupported( ChronoField.OFFSET_SECONDS ) ) {
appendAsTime( appender, temporalAccessor, true, jdbcTimeZone );
}
else {
appendAsLocalTime( appender, temporalAccessor );
}
}
else {
appendAsTime(
appender,
javaType.unwrap( value, java.sql.Time.class, options ),
jdbcTimeZone
);
}
break;
case SqlTypes.TIMESTAMP:
case SqlTypes.TIMESTAMP_WITH_TIMEZONE:
case SqlTypes.TIMESTAMP_UTC:
if ( value instanceof java.util.Date ) {
appendAsTimestampWithMicros( appender, (java.util.Date) value, jdbcTimeZone );
}
else if ( value instanceof java.util.Calendar ) {
appendAsTimestampWithMillis( appender, (java.util.Calendar) value, jdbcTimeZone );
}
else if ( value instanceof TemporalAccessor ) {
final TemporalAccessor temporalAccessor = (TemporalAccessor) value;
if ( temporalAccessor.isSupported( ChronoField.OFFSET_SECONDS ) ) {
appendAsTimestampWithMicros( appender, temporalAccessor, true, jdbcTimeZone );
}
else {
appendAsTimestampWithMicros( appender, temporalAccessor, false, jdbcTimeZone );
}
}
else {
appendAsTimestampWithMicros(
appender,
javaType.unwrap( value, java.util.Date.class, options ),
jdbcTimeZone
);
}
break;
default:
throw new IllegalArgumentException();
}
appender.append( '"' );
}
private static TimeZone getJdbcTimeZone(WrapperOptions options) {
return options == null || options.getJdbcTimeZone() == null
? TimeZone.getDefault()
: options.getJdbcTimeZone();
}
private static class PostgreSQLAppender extends StringBuilderSqlAppender {
private int quote = 1;
public PostgreSQLAppender(StringBuilder sb) {
super( sb );
}
public void quoteStart() {
append( '"' );
quote = quote << 1;
}
public void quoteEnd() {
quote = quote >> 1;
append( '"' );
}
@Override
public PostgreSQLAppender append(char fragment) {
if ( quote != 1 ) {
appendWithQuote( fragment );
}
else {
sb.append( fragment );
}
return this;
}
@Override
public PostgreSQLAppender append(CharSequence csq) {
return append( csq, 0, csq.length() );
}
@Override
public PostgreSQLAppender append(CharSequence csq, int start, int end) {
if ( quote != 1 ) {
int len = end - start;
sb.ensureCapacity( sb.length() + len );
for ( int i = start; i < end; i++ ) {
appendWithQuote( csq.charAt( i ) );
}
}
else {
sb.append( csq, start, end );
}
return this;
}
private void appendWithQuote(char fragment) {
if ( fragment == '"' ) {
sb.ensureCapacity( sb.length() + quote );
for ( int i = 0; i < quote; i++ ) {
sb.append( '"' );
}
}
else {
sb.append( fragment );
}
}
public void ensureCanFit(int lengthIncrease) {
sb.ensureCapacity( sb.length() + lengthIncrease );
}
}
}

View File

@ -5,6 +5,7 @@
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.dialect;
import java.sql.CallableStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
@ -20,6 +21,7 @@ import org.hibernate.query.sqm.CastType;
import org.hibernate.query.sqm.TemporalUnit;
import org.hibernate.query.spi.QueryEngine;
import static org.hibernate.query.sqm.TemporalUnit.DAY;
/**

View File

@ -74,6 +74,7 @@ import org.hibernate.type.descriptor.jdbc.XmlJdbcType;
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
import org.hibernate.type.descriptor.sql.internal.DdlTypeImpl;
import org.hibernate.type.descriptor.sql.spi.DdlTypeRegistry;
import org.hibernate.type.spi.TypeConfiguration;
import java.util.List;
@ -113,6 +114,10 @@ import static org.hibernate.type.descriptor.DateTimeUtils.appendAsTimestampWithM
public class SQLServerDialect extends AbstractTransactSQLDialect {
private final static DatabaseVersion MINIMUM_VERSION = DatabaseVersion.make( 10, 0 );
private static final int PARAM_LIST_SIZE_LIMIT = 2100;
// See microsoft.sql.Types.GEOMETRY
private static final int GEOMETRY_TYPE_CODE = -157;
// See microsoft.sql.Types.GEOGRAPHY
private static final int GEOGRAPHY_TYPE_CODE = -158;
private final StandardSequenceExporter exporter;
private final UniqueDelegate uniqueDelegate = new AlterTableUniqueIndexDelegate(this);
@ -223,6 +228,12 @@ public class SQLServerDialect extends AbstractTransactSQLDialect {
break;
}
break;
case GEOMETRY_TYPE_CODE:
jdbcTypeCode = GEOMETRY;
break;
case GEOGRAPHY_TYPE_CODE:
jdbcTypeCode = GEOGRAPHY;
break;
}
return super.resolveSqlTypeDescriptor( columnTypeName, jdbcTypeCode, precision, scale, jdbcTypeRegistry );
}
@ -848,6 +859,7 @@ public class SQLServerDialect extends AbstractTransactSQLDialect {
PrimitiveByteArrayJavaType.INSTANCE.appendString( appender, bytes );
}
@Override
public void appendUUIDLiteral(SqlAppender appender, java.util.UUID literal) {
appender.appendSql( "cast('" );
appender.appendSql( literal.toString() );

View File

@ -0,0 +1,188 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.dialect;
import java.sql.SQLException;
import org.hibernate.Internal;
import org.hibernate.metamodel.mapping.AttributeMapping;
import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.metamodel.mapping.MappingType;
import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
/**
* A Helper for serializing and deserializing struct, based on an {@link EmbeddableMappingType}.
*/
@Internal
public class StructHelper {
public static Object[] getAttributeValues(
EmbeddableMappingType embeddableMappingType,
Object[] rawJdbcValues,
WrapperOptions options) throws SQLException {
final int numberOfAttributeMappings = embeddableMappingType.getNumberOfAttributeMappings();
final Object[] attributeValues;
if ( numberOfAttributeMappings != rawJdbcValues.length ) {
attributeValues = new Object[numberOfAttributeMappings];
}
else {
attributeValues = rawJdbcValues;
}
int jdbcIndex = 0;
for ( int i = 0; i < numberOfAttributeMappings; i++ ) {
final AttributeMapping attributeMapping = embeddableMappingType.getAttributeMapping( i );
jdbcIndex += injectAttributeValue( attributeMapping, attributeValues, i, rawJdbcValues, jdbcIndex, options );
}
return attributeValues;
}
private static int injectAttributeValue(
AttributeMapping attributeMapping,
Object[] attributeValues,
int attributeIndex,
Object[] rawJdbcValues,
int jdbcIndex,
WrapperOptions options) throws SQLException {
final MappingType mappedType = attributeMapping.getMappedType();
final int jdbcValueCount;
final Object rawJdbcValue = rawJdbcValues[jdbcIndex];
if ( mappedType instanceof EmbeddableMappingType ) {
final EmbeddableMappingType embeddableMappingType = (EmbeddableMappingType) mappedType;
if ( embeddableMappingType.getAggregateMapping() != null ) {
jdbcValueCount = 1;
attributeValues[attributeIndex] = rawJdbcValue;
}
else {
jdbcValueCount = embeddableMappingType.getJdbcValueCount();
final Object[] subJdbcValues = new Object[jdbcValueCount];
System.arraycopy( rawJdbcValues, jdbcIndex, subJdbcValues, 0, subJdbcValues.length );
final Object[] subValues = getAttributeValues( embeddableMappingType, subJdbcValues, options );
attributeValues[attributeIndex] = embeddableMappingType.getRepresentationStrategy()
.getInstantiator()
.instantiate(
() -> subValues,
embeddableMappingType.findContainingEntityMapping()
.getEntityPersister()
.getFactory()
);
}
}
else {
assert attributeMapping.getJdbcTypeCount() == 1;
jdbcValueCount = 1;
final JdbcMapping jdbcMapping = attributeMapping.getJdbcMappings().get( 0 );
final Object jdbcValue = jdbcMapping.getJdbcJavaType().wrap(
rawJdbcValue,
options
);
attributeValues[attributeIndex] = jdbcMapping.convertToDomainValue( jdbcValue );
}
return jdbcValueCount;
}
public static Object[] getJdbcValues(
EmbeddableMappingType embeddableMappingType,
Object[] attributeValues,
WrapperOptions options) throws SQLException {
final int jdbcValueCount = embeddableMappingType.getJdbcValueCount();
final Object[] jdbcValues;
if ( jdbcValueCount != attributeValues.length ) {
jdbcValues = new Object[jdbcValueCount];
}
else {
jdbcValues = attributeValues;
}
int jdbcIndex = 0;
for ( int i = 0; i < attributeValues.length; i++ ) {
final AttributeMapping attributeMapping = embeddableMappingType.getAttributeMapping( i );
jdbcIndex += injectJdbcValue(
attributeMapping,
attributeValues,
i,
jdbcValues,
jdbcIndex,
options
);
}
assert jdbcIndex == jdbcValueCount;
return jdbcValues;
}
private static int injectJdbcValue(
AttributeMapping attributeMapping,
Object[] attributeValues,
int attributeIndex,
Object[] jdbcValues,
int jdbcIndex,
WrapperOptions options) throws SQLException {
final MappingType mappedType = attributeMapping.getMappedType();
final int jdbcValueCount;
if ( mappedType instanceof EmbeddableMappingType ) {
final EmbeddableMappingType embeddableMappingType = (EmbeddableMappingType) mappedType;
if ( embeddableMappingType.getAggregateMapping() != null ) {
final AggregateJdbcType aggregateJdbcType = (AggregateJdbcType) embeddableMappingType.getAggregateMapping()
.getJdbcMapping()
.getJdbcType();
jdbcValueCount = 1;
jdbcValues[jdbcIndex] = aggregateJdbcType.createJdbcValue(
attributeValues[attributeIndex],
options
);
}
else {
jdbcValueCount = embeddableMappingType.getJdbcValueCount();
final int numberOfAttributeMappings = embeddableMappingType.getNumberOfAttributeMappings();
final Object[] subValues = embeddableMappingType.getValues( attributeValues[attributeIndex] );
int offset = 0;
for ( int i = 0; i < numberOfAttributeMappings; i++ ) {
offset += injectJdbcValue(
embeddableMappingType.getAttributeMapping( i ),
subValues,
i,
jdbcValues,
jdbcIndex + offset,
options
);
}
assert offset == jdbcValueCount;
}
}
else {
assert attributeMapping.getJdbcTypeCount() == 1;
jdbcValueCount = 1;
final JdbcMapping jdbcMapping = attributeMapping.getJdbcMappings().get( 0 );
final JavaType<Object> relationalJavaType;
if ( jdbcMapping.getValueConverter() == null ) {
//noinspection unchecked
relationalJavaType = (JavaType<Object>) jdbcMapping.getJdbcJavaType();
}
else {
//noinspection unchecked
relationalJavaType = jdbcMapping.getValueConverter().getRelationalJavaType();
}
final Class<?> preferredJavaTypeClass = jdbcMapping.getJdbcType().getPreferredJavaTypeClass( options );
if ( preferredJavaTypeClass == null ) {
jdbcValues[jdbcIndex] = relationalJavaType.wrap(
jdbcMapping.convertToRelationalValue( attributeValues[attributeIndex] ),
options
);
}
else {
jdbcValues[jdbcIndex] = relationalJavaType.unwrap(
jdbcMapping.convertToRelationalValue( attributeValues[attributeIndex] ),
preferredJavaTypeClass,
options
);
}
}
return jdbcValueCount;
}
}

View File

@ -17,11 +17,11 @@ import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
import org.hibernate.sql.ast.spi.AbstractSqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlSelection;
import org.hibernate.sql.ast.tree.Statement;
import org.hibernate.sql.ast.tree.cte.CteStatement;
import org.hibernate.sql.ast.tree.expression.BinaryArithmeticExpression;
import org.hibernate.sql.ast.tree.expression.CaseSearchedExpression;
import org.hibernate.sql.ast.tree.expression.CaseSimpleExpression;
import org.hibernate.sql.ast.tree.expression.ColumnReference;
import org.hibernate.sql.ast.tree.expression.AggregateColumnWriteExpression;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.expression.Literal;
import org.hibernate.sql.ast.tree.expression.QueryLiteral;
@ -355,6 +355,33 @@ public class SybaseASESqlAstTranslator<T extends JdbcOperation> extends Abstract
}
}
@Override
public void visitAggregateColumnWriteExpression(AggregateColumnWriteExpression aggregateColumnWriteExpression) {
final String dmlTargetTableAlias = getDmlTargetTableAlias();
final ColumnReference columnReference = aggregateColumnWriteExpression.getColumnReference();
if ( dmlTargetTableAlias != null && dmlTargetTableAlias.equals( columnReference.getQualifier() ) ) {
// Sybase needs a table name prefix
// but not if this is a restricted union table reference subquery
final QuerySpec currentQuerySpec = (QuerySpec) getQueryPartStack().getCurrent();
final List<TableGroup> roots;
if ( currentQuerySpec != null && !currentQuerySpec.isRoot()
&& (roots = currentQuerySpec.getFromClause().getRoots()).size() == 1
&& roots.get( 0 ).getPrimaryTableReference() instanceof UnionTableReference ) {
aggregateColumnWriteExpression.appendWriteExpression( this, this );
}
else {
aggregateColumnWriteExpression.appendWriteExpression(
this,
this,
getCurrentDmlStatement().getTargetTable().getTableExpression()
);
}
}
else {
aggregateColumnWriteExpression.appendWriteExpression( this, this );
}
}
@Override
protected boolean needsRowsToSkip() {
return true;

View File

@ -0,0 +1,779 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.dialect;
import java.io.IOException;
import java.io.OutputStream;
import java.sql.SQLException;
import java.time.OffsetDateTime;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Base64;
import java.util.List;
import org.hibernate.Internal;
import org.hibernate.internal.util.CharSequenceHelper;
import org.hibernate.metamodel.mapping.AttributeMapping;
import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.metamodel.mapping.SelectableMapping;
import org.hibernate.metamodel.mapping.internal.EmbeddedAttributeMapping;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.java.JdbcDateJavaType;
import org.hibernate.type.descriptor.java.JdbcTimeJavaType;
import org.hibernate.type.descriptor.java.JdbcTimestampJavaType;
import org.hibernate.type.descriptor.java.OffsetDateTimeJavaType;
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
/**
* A Helper for serializing and deserializing XML, based on an {@link EmbeddableMappingType}.
*/
@Internal
public class XmlHelper {
/**
* The root tag under which values are placed as XML elements.
*/
public static final String ROOT_TAG = "e";
private static final String START_TAG = "<" + ROOT_TAG + ">";
private static final String END_TAG = "</" + ROOT_TAG + ">";
private static Object fromEscapedString(
JdbcMapping jdbcMapping,
String string,
int start,
int end) {
final String unescaped = unescape( string, start, end );
return fromString( jdbcMapping, unescaped, 0, unescaped.length() );
}
private static Object fromString(
JdbcMapping jdbcMapping,
String string,
int start,
int end) {
return jdbcMapping.getJdbcJavaType().fromEncodedString(
string,
start,
end
);
}
private static Object fromRawObject(
JdbcMapping jdbcMapping,
Object raw,
WrapperOptions options) {
return jdbcMapping.getJdbcJavaType().wrap(
raw,
options
);
}
private static String unescape(String string, int start, int end) {
final StringBuilder sb = new StringBuilder( end - start );
for ( int i = start; i < end; i++ ) {
final char c = string.charAt( i );
OUTER: switch ( c ) {
case '<':
sb.append( "&lt;" );
break;
case '&':
// It must be &amp; or &lt;
if ( i + 3 < end ) {
final char c1 = string.charAt( i + 1 );
switch ( c1 ) {
case 'l':
if ( string.charAt( i + 2 ) == 't' && string.charAt( i + 3 ) == ';' ) {
sb.append( '<' );
i += 3;
}
break OUTER;
case 'a':
if ( i + 4 < end
&& string.charAt( i + 2 ) == 'm'
&& string.charAt( i + 3 ) == 'p'
&& string.charAt( i + 4 ) == ';' ) {
sb.append( '&' );
i += 4;
}
break OUTER;
case 'g':
if ( string.charAt( i + 2 ) == 't' && string.charAt( i + 3 ) == ';' ) {
sb.append( '>' );
i += 3;
}
break OUTER;
}
}
throw new IllegalArgumentException( "Illegal XML content: " + string.substring( start, end ) );
default:
sb.append( c );
break;
}
}
return sb.toString();
}
private static Object fromString(
EmbeddableMappingType embeddableMappingType,
String string,
WrapperOptions options,
int selectableIndex,
int start,
int end) {
final JdbcMapping jdbcMapping = embeddableMappingType.getJdbcValueSelectable( selectableIndex ).getJdbcMapping();
switch ( jdbcMapping.getJdbcType().getDefaultSqlTypeCode() ) {
case SqlTypes.BOOLEAN:
case SqlTypes.BIT:
case SqlTypes.TINYINT:
case SqlTypes.SMALLINT:
case SqlTypes.INTEGER:
case SqlTypes.BIGINT:
case SqlTypes.FLOAT:
case SqlTypes.REAL:
case SqlTypes.DOUBLE:
case SqlTypes.DECIMAL:
case SqlTypes.NUMERIC:
return fromString(
jdbcMapping,
string,
start,
end
);
case SqlTypes.DATE:
return fromRawObject(
jdbcMapping,
JdbcDateJavaType.INSTANCE.fromEncodedString(
string,
start,
end
),
options
);
case SqlTypes.TIME:
case SqlTypes.TIME_WITH_TIMEZONE:
return fromRawObject(
jdbcMapping,
JdbcTimeJavaType.INSTANCE.fromEncodedString(
string,
start,
end
),
options
);
case SqlTypes.TIMESTAMP:
return fromRawObject(
jdbcMapping,
JdbcTimestampJavaType.INSTANCE.fromEncodedString(
string,
start,
end
),
options
);
case SqlTypes.TIMESTAMP_WITH_TIMEZONE:
case SqlTypes.TIMESTAMP_UTC:
return fromRawObject(
jdbcMapping,
OffsetDateTimeJavaType.INSTANCE.fromEncodedString(
string,
start,
end
),
options
);
case SqlTypes.BINARY:
case SqlTypes.VARBINARY:
case SqlTypes.LONGVARBINARY:
case SqlTypes.LONG32VARBINARY:
case SqlTypes.UUID:
return fromRawObject(
jdbcMapping,
Base64.getDecoder().decode( string.substring( start, end ) ),
options
);
default:
return fromEscapedString(
jdbcMapping,
string,
start,
end
);
}
}
public static <X> X fromString(
EmbeddableMappingType embeddableMappingType,
String string,
boolean returnEmbeddable,
WrapperOptions options) throws SQLException {
if ( !string.startsWith( START_TAG ) || !string.endsWith( END_TAG ) ) {
throw new IllegalArgumentException( "Illegal XML for struct: " + string );
}
int end;
final Object[] array;
if ( embeddableMappingType == null ) {
assert !returnEmbeddable;
final List<Object> values = new ArrayList<>( 8 );
end = fromString( string, values, START_TAG.length() );
array = values.toArray();
}
else {
array = new Object[embeddableMappingType.getJdbcValueCount()];
end = fromString( embeddableMappingType, string, returnEmbeddable, options, array, START_TAG.length() );
}
assert end + END_TAG.length() == string.length();
if ( returnEmbeddable ) {
final Object[] attributeValues = StructHelper.getAttributeValues( embeddableMappingType, array, options );
//noinspection unchecked
return (X) embeddableMappingType.getRepresentationStrategy().getInstantiator().instantiate(
() -> attributeValues,
options.getSessionFactory()
);
}
//noinspection unchecked
return (X) array;
}
private static int fromString(
String string,
List<Object> values,
int start) {
int tagNameStart = -1;
int contentStart = -1;
String tagName = null;
for ( int i = start; i < string.length(); i++ ) {
final char c = string.charAt( i );
switch ( c ) {
case '<':
if ( tagNameStart == -1 ) {
if ( string.charAt( i + 1 ) == '/' ) {
// This is the parent closing tag, so we stop here
assert tagName == null;
assert contentStart == -1;
return i;
}
// A start tag
tagNameStart = i + 1;
}
else {
assert contentStart != -1;
if ( string.charAt( i + 1 ) == '/' ) {
// This is a closing tag
assert tagName != null;
values.add( unescape( string, contentStart, i ) );
}
else {
// Nested tag
final List<Object> subValues = new ArrayList<>( 8 );
final int end = fromString( string, subValues, i );
values.add( subValues.toArray() );
// The end is the start angle bracket for the end tag
assert string.charAt( end ) == '<';
assert string.charAt( end + 1 ) == '/';
assert string.regionMatches( end + 2, tagName, 0, tagName.length() );
i = end;
}
// consume the whole closing tag
i += tagName.length() + 2;
tagNameStart = -1;
contentStart = -1;
tagName = null;
}
break;
case '>':
if ( tagName == null ) {
// The closing angle bracket of the start tag
assert contentStart == -1;
assert tagNameStart != -1;
tagName = string.substring( tagNameStart, i );
contentStart = i + 1;
}
else {
// This must be a char in the content
assert contentStart != -1;
}
break;
case '/':
if ( tagName == null ) {
// A shorthand tag encodes null
values.add( null );
// skip the closing angle bracket
i++;
tagNameStart = -1;
assert string.charAt( i ) == '>';
}
else {
// This must be a char in the content
assert contentStart != -1;
}
break;
}
}
throw new IllegalArgumentException( "XML not properly formed: " + string.substring( start ) );
}
private static int fromString(
EmbeddableMappingType embeddableMappingType,
String string,
boolean returnEmbeddable,
WrapperOptions options,
Object[] values,
int start) throws SQLException {
int tagNameStart = -1;
int contentStart = -1;
String tagName = null;
for ( int i = start; i < string.length(); i++ ) {
final char c = string.charAt( i );
switch ( c ) {
case '<':
if ( tagNameStart == -1 ) {
if ( string.charAt( i + 1 ) == '/' ) {
// This is the parent closing tag, so we stop here
assert tagName == null;
assert contentStart == -1;
return i;
}
// A start tag
tagNameStart = i + 1;
}
else {
assert contentStart != -1;
if ( string.charAt( i + 1 ) == '/' ) {
// This is a closing tag
assert tagName != null;
final int selectableMapping = getSelectableMapping( embeddableMappingType, tagName );
values[selectableMapping] = fromString(
embeddableMappingType,
string,
options,
selectableMapping,
contentStart,
i
);
}
else {
// Nested tag
final int selectableIndex = getSelectableMapping( embeddableMappingType, tagName );
final SelectableMapping selectable = embeddableMappingType.getJdbcValueSelectable(
selectableIndex
);
if ( !( selectable.getJdbcMapping().getJdbcType() instanceof AggregateJdbcType ) ) {
throw new IllegalArgumentException(
String.format(
"XML starts sub-object for a non-aggregate type at index %d. Selectable [%s] is of type [%s]",
i,
selectable.getSelectableName(),
selectable.getJdbcMapping().getJdbcType().getClass().getName()
)
);
}
final AggregateJdbcType aggregateJdbcType = (AggregateJdbcType) selectable.getJdbcMapping().getJdbcType();
final EmbeddableMappingType subMappingType = aggregateJdbcType.getEmbeddableMappingType();
final Object[] subValues;
final int end;
if ( aggregateJdbcType.getJdbcTypeCode() == SqlTypes.SQLXML || aggregateJdbcType.getDefaultSqlTypeCode() == SqlTypes.SQLXML ) {
// If we stay in XML land, we can recurse instead
subValues = new Object[subMappingType.getJdbcValueCount()];
end = fromString(
subMappingType,
string,
returnEmbeddable,
options,
subValues,
i
);
}
else {
// Determine the end of the XML element
while ( string.charAt( i ) != '<' ) {
i++;
}
end = i;
subValues = aggregateJdbcType.extractJdbcValues(
CharSequenceHelper.subSequence(
string,
start,
end
),
options
);
}
if ( returnEmbeddable ) {
final Object[] attributeValues = StructHelper.getAttributeValues(
subMappingType,
subValues,
options
);
final Object subValue = subMappingType.getRepresentationStrategy()
.getInstantiator()
.instantiate( () -> attributeValues, options.getSessionFactory() );
values[selectableIndex] = subValue;
}
else {
values[selectableIndex] = subValues;
}
// The end is the start angle bracket for the end tag
assert string.charAt( end ) == '<';
assert string.charAt( end + 1 ) == '/';
assert string.regionMatches( end + 2, tagName, 0, tagName.length() );
i = end;
}
// consume the whole closing tag
i += tagName.length() + 2;
tagNameStart = -1;
contentStart = -1;
tagName = null;
}
break;
case '>':
if ( tagName == null ) {
// The closing angle bracket of the start tag
assert contentStart == -1;
assert tagNameStart != -1;
tagName = string.substring( tagNameStart, i );
contentStart = i + 1;
}
else {
// This must be a char in the content
assert contentStart != -1;
}
break;
case '/':
if ( tagName == null ) {
// A shorthand tag encodes null,
// but we don't have to do anything because null is the default.
// Also, skip the closing angle bracket
i++;
tagNameStart = -1;
assert string.charAt( i ) == '>';
}
else {
// This must be a char in the content
assert contentStart != -1;
}
break;
}
}
throw new IllegalArgumentException( "XML not properly formed: " + string.substring( start ) );
}
public static String toString(
EmbeddableMappingType embeddableMappingType,
Object value,
WrapperOptions options) {
final StringBuilder sb = new StringBuilder();
sb.append( START_TAG );
toString( embeddableMappingType, value, options, new XMLAppender( sb ) );
sb.append( END_TAG );
return sb.toString();
}
private static void toString(
EmbeddableMappingType embeddableMappingType,
Object value,
WrapperOptions options,
XMLAppender sb) {
final Object[] array = embeddableMappingType.getValues( value );
for ( int i = 0; i < array.length; i++ ) {
if ( array[i] == null ) {
continue;
}
final AttributeMapping attributeMapping = embeddableMappingType.getAttributeMapping( i );
if ( attributeMapping instanceof SelectableMapping ) {
final SelectableMapping selectable = (SelectableMapping) attributeMapping;
final String tagName = selectable.getSelectableName();
sb.append( '<' );
sb.append( tagName );
sb.append( '>' );
serializeValueTo( sb, selectable, array[i], options );
sb.append( '<' );
sb.append( '/' );
sb.append( tagName );
sb.append( '>' );
}
else if ( attributeMapping instanceof EmbeddedAttributeMapping ) {
final EmbeddableMappingType mappingType = (EmbeddableMappingType) attributeMapping.getMappedType();
final SelectableMapping aggregateMapping = mappingType.getAggregateMapping();
if ( aggregateMapping == null ) {
toString(
mappingType,
array[i],
options,
sb
);
}
else {
final String tagName = aggregateMapping.getSelectableName();
sb.append( '<' );
sb.append( tagName );
sb.append( '>' );
toString(
mappingType,
array[i],
options,
sb
);
sb.append( '<' );
sb.append( '/' );
sb.append( tagName );
sb.append( '>' );
}
}
else {
throw new UnsupportedOperationException( "Unsupported attribute mapping: " + attributeMapping );
}
}
}
private static void serializeValueTo(XMLAppender appender, SelectableMapping selectable, Object value, WrapperOptions options) {
final JdbcMapping jdbcMapping = selectable.getJdbcMapping();
//noinspection unchecked
final JavaType<Object> jdbcJavaType = (JavaType<Object>) jdbcMapping.getJdbcJavaType();
final Object relationalValue = jdbcMapping.convertToRelationalValue( value );
switch ( jdbcMapping.getJdbcType().getDefaultSqlTypeCode() ) {
case SqlTypes.TINYINT:
case SqlTypes.SMALLINT:
case SqlTypes.INTEGER:
if ( relationalValue instanceof Boolean ) {
// BooleanJavaType has this as an implicit conversion
appender.append( (Boolean) relationalValue ? '1' : '0' );
break;
}
case SqlTypes.BOOLEAN:
case SqlTypes.BIT:
case SqlTypes.BIGINT:
case SqlTypes.FLOAT:
case SqlTypes.REAL:
case SqlTypes.DOUBLE:
case SqlTypes.DECIMAL:
case SqlTypes.NUMERIC:
jdbcJavaType.appendEncodedString(
appender,
jdbcJavaType.unwrap(
relationalValue,
jdbcJavaType.getJavaTypeClass(),
options
)
);
break;
case SqlTypes.CHAR:
case SqlTypes.NCHAR:
case SqlTypes.VARCHAR:
case SqlTypes.NVARCHAR:
if ( relationalValue instanceof Boolean ) {
// BooleanJavaType has this as an implicit conversion
appender.append( (Boolean) relationalValue ? 'Y' : 'N' );
break;
}
case SqlTypes.LONGVARCHAR:
case SqlTypes.LONGNVARCHAR:
case SqlTypes.LONG32VARCHAR:
case SqlTypes.LONG32NVARCHAR:
appender.startEscaping();
jdbcJavaType.appendEncodedString(
appender,
jdbcJavaType.unwrap(
relationalValue,
jdbcJavaType.getJavaTypeClass(),
options
)
);
appender.endEscaping();
break;
case SqlTypes.DATE:
JdbcDateJavaType.INSTANCE.appendEncodedString(
appender,
jdbcJavaType.unwrap( value, java.sql.Date.class, options )
);
break;
case SqlTypes.TIME:
case SqlTypes.TIME_WITH_TIMEZONE:
JdbcTimeJavaType.INSTANCE.appendEncodedString(
appender,
jdbcJavaType.unwrap( value, java.sql.Time.class, options )
);
break;
case SqlTypes.TIMESTAMP:
JdbcTimestampJavaType.INSTANCE.appendEncodedString(
appender,
jdbcJavaType.unwrap( value, java.sql.Timestamp.class, options )
);
break;
case SqlTypes.TIMESTAMP_WITH_TIMEZONE:
case SqlTypes.TIMESTAMP_UTC:
DateTimeFormatter.ISO_OFFSET_DATE_TIME.formatTo( jdbcJavaType.unwrap( value, OffsetDateTime.class, options ), appender );
break;
case SqlTypes.BINARY:
case SqlTypes.VARBINARY:
case SqlTypes.LONGVARBINARY:
case SqlTypes.LONG32VARBINARY:
case SqlTypes.UUID:
appender.writeBase64( jdbcJavaType.unwrap( relationalValue, byte[].class, options ) );
break;
default:
throw new UnsupportedOperationException( "Unsupported JdbcType nested in struct: " + jdbcMapping.getJdbcType() );
}
}
private static int getSelectableMapping(
EmbeddableMappingType embeddableMappingType,
String name) {
final int selectableIndex = embeddableMappingType.getSelectableIndex( name );
if ( selectableIndex == -1 ) {
throw new IllegalArgumentException(
String.format(
"Could not find selectable [%s] in embeddable type [%s] for JSON processing.",
name,
embeddableMappingType.getMappedJavaType().getJavaTypeClass().getName()
)
);
}
return selectableIndex;
}
private static class XMLAppender extends OutputStream implements SqlAppender {
private final static char[] HEX_ARRAY = "0123456789ABCDEF".toCharArray();
private final StringBuilder sb;
private boolean escape;
private OutputStream base64OutputStream;
public XMLAppender(StringBuilder sb) {
this.sb = sb;
}
@Override
public void appendSql(String fragment) {
append( fragment );
}
@Override
public void appendSql(char fragment) {
append( fragment );
}
@Override
public void appendSql(int value) {
sb.append( value );
}
@Override
public void appendSql(long value) {
sb.append( value );
}
@Override
public void appendSql(boolean value) {
sb.append( value );
}
@Override
public String toString() {
return sb.toString();
}
public void startEscaping() {
assert !escape;
escape = true;
}
public void endEscaping() {
assert escape;
escape = false;
}
@Override
public XMLAppender append(char fragment) {
if ( escape ) {
appendEscaped( fragment );
}
else {
sb.append( fragment );
}
return this;
}
@Override
public XMLAppender append(CharSequence csq) {
return append( csq, 0, csq.length() );
}
@Override
public XMLAppender append(CharSequence csq, int start, int end) {
if ( escape ) {
int len = end - start;
sb.ensureCapacity( sb.length() + len );
for ( int i = start; i < end; i++ ) {
appendEscaped( csq.charAt( i ) );
}
}
else {
sb.append( csq, start, end );
}
return this;
}
private void appendEscaped(char fragment) {
switch ( fragment ) {
case '<':
sb.append( "&lt;" );
break;
case '&':
sb.append( "&amp;" );
break;
default:
sb.append( fragment );
break;
}
}
@Override
public void write(int v) {
final String hex = Integer.toHexString( v );
sb.ensureCapacity( sb.length() + hex.length() + 1 );
if ( ( hex.length() & 1 ) == 1 ) {
sb.append( '0' );
}
sb.append( hex );
}
@Override
public void write(byte[] bytes) {
write(bytes, 0, bytes.length);
}
@Override
public void write(byte[] bytes, int off, int len) {
sb.ensureCapacity( sb.length() + ( len << 1 ) );
for ( int i = 0; i < len; i++ ) {
final int v = bytes[off + i] & 0xFF;
sb.append( HEX_ARRAY[v >>> 4] );
sb.append( HEX_ARRAY[v & 0x0F] );
}
}
public void writeBase64(byte[] bytes) {
if ( base64OutputStream == null ) {
base64OutputStream = Base64.getEncoder().wrap( this );
}
try {
base64OutputStream.write( bytes );
}
catch (IOException e) {
// Should never happen
throw new RuntimeException( e );
}
}
}
}

View File

@ -0,0 +1,154 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.dialect.aggregate;
import java.util.List;
import org.hibernate.Incubating;
import org.hibernate.boot.model.relational.AuxiliaryDatabaseObject;
import org.hibernate.boot.model.relational.Namespace;
import org.hibernate.dialect.Dialect;
import org.hibernate.mapping.Column;
import org.hibernate.metamodel.mapping.SelectableMapping;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.tool.schema.extract.spi.ColumnTypeInformation;
import org.hibernate.type.spi.TypeConfiguration;
/**
* A set of operations providing support for aggregate column types
* in a certain {@link Dialect SQL dialect}.
*
* @since 6.2
*/
@Incubating
public interface AggregateSupport {
/**
* Returns the custom read expression to use for {@code column}.
* Replaces the given {@code placeholder} in the given {@code template}
* by the custom read expression to use for {@code column}.
*
* @param template The custom read expression template of the column
* @param placeholder The placeholder to replace with the actual read expression
* @param aggregateParentReadExpression The expression to the aggregate column, which contains the column
* @param column The column within the aggregate type, for which to return the read expression
* @param aggregateColumnType The type information for the aggregate column
* @param columnType The type information for the column within the aggregate type
*/
String aggregateComponentCustomReadExpression(
String template,
String placeholder,
String aggregateParentReadExpression,
String column,
ColumnTypeInformation aggregateColumnType,
ColumnTypeInformation columnType);
/**
* Returns the assignment expression to use for {@code column},
* which is part of the aggregate type of {@code aggregatePath}.
*
* @param aggregateParentAssignmentExpression The expression to the aggregate column, which contains the column
* @param column The column within the aggregate type, for which to return the assignment expression
* @param aggregateColumnType The type information for the aggregate column
* @param columnType The type information for the column within the aggregate type
*/
String aggregateComponentAssignmentExpression(
String aggregateParentAssignmentExpression,
String column,
ColumnTypeInformation aggregateColumnType,
ColumnTypeInformation columnType);
/**
* Returns the custom write expression to use for an aggregate column
* of the given column type, containing the given aggregated columns.
*
* @param aggregateColumnType The type information for the aggregate column
* @param aggregatedColumns The columns of the aggregate type
*/
String aggregateCustomWriteExpression(ColumnTypeInformation aggregateColumnType, List<Column> aggregatedColumns);
/**
* Whether {@link #aggregateCustomWriteExpressionRenderer(SelectableMapping, SelectableMapping[], TypeConfiguration)} is needed
* when assigning an expression to individual aggregated columns in an update statement.
*/
boolean requiresAggregateCustomWriteExpressionRenderer(int aggregateSqlTypeCode);
/**
* Whether to prefer selecting the aggregate column as a whole instead of individual parts.
*/
boolean preferSelectAggregateMapping(int aggregateSqlTypeCode);
/**
* Whether to prefer binding the aggregate column as a whole instead of individual parts.
*/
boolean preferBindAggregateMapping(int aggregateSqlTypeCode);
/**
* @param aggregateColumn The mapping of the aggregate column
* @param columnsToUpdate The mappings of the columns that should be updated
* @param typeConfiguration The type configuration
*/
WriteExpressionRenderer aggregateCustomWriteExpressionRenderer(
SelectableMapping aggregateColumn,
SelectableMapping[] columnsToUpdate,
TypeConfiguration typeConfiguration);
/**
* Contract for rendering the custom write expression that updates a selected set of aggregated columns
* within an aggregate column to the value expressions as given by the {@code aggregateColumnWriteExpression}.
*/
interface WriteExpressionRenderer {
/**
* Renders the qualified custom write expression to the {@link SqlAppender} with the value expressions for each
* selectable as returned by {@link AggregateColumnWriteExpression#getValueExpression(SelectableMapping)}.
*/
void render(
SqlAppender sqlAppender,
SqlAstTranslator<?> translator,
AggregateColumnWriteExpression aggregateColumnWriteExpression,
String qualifier);
}
/**
* The actual write expression for an aggregate column
* which gives access to the value expressions for the respective selectable mapping.
*/
interface AggregateColumnWriteExpression {
/**
* Returns the value expression to assign to the given selectable mapping,
* or throws an {@link IllegalArgumentException} when an invalid selectable mapping is passed.
*/
Expression getValueExpression(SelectableMapping selectableMapping);
}
/**
* Allows to generate auxiliary database objects for an aggregate type.
*/
List<AuxiliaryDatabaseObject> aggregateAuxiliaryDatabaseObjects(
Namespace namespace,
String aggregatePath,
ColumnTypeInformation aggregateColumnType,
List<Column> aggregatedColumns);
/**
* Returns the {@link org.hibernate.type.SqlTypes} type code to use for the given column type code,
* when aggregated within a column of the given aggregate column type code.
* Allows to change types when a database does not allow to use certain types within an aggregate type,
* like DB2 doesn't allow the use of {@code boolean} within an object/struct type.
*
* @param aggregateColumnSqlTypeCode The {@link org.hibernate.type.SqlTypes} type code of the aggregate column
* @param columnSqlTypeCode The {@link org.hibernate.type.SqlTypes} type code of the column
*/
int aggregateComponentSqlTypeCode(int aggregateColumnSqlTypeCode, int columnSqlTypeCode);
/**
* Returns whether the database supports the use of a check constraint on tables,
* to implement not-null and other constraints of an aggregate type.
*/
boolean supportsComponentCheckConstraints();
}

View File

@ -0,0 +1,93 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.dialect.aggregate;
import java.util.Collections;
import java.util.List;
import org.hibernate.boot.model.relational.AuxiliaryDatabaseObject;
import org.hibernate.boot.model.relational.Namespace;
import org.hibernate.mapping.Column;
import org.hibernate.metamodel.mapping.SelectableMapping;
import org.hibernate.tool.schema.extract.spi.ColumnTypeInformation;
import org.hibernate.type.spi.TypeConfiguration;
public class AggregateSupportImpl implements AggregateSupport {
public static final AggregateSupport INSTANCE = new AggregateSupportImpl();
@Override
public String aggregateComponentCustomReadExpression(
String template,
String placeholder,
String aggregateParentReadExpression,
String column,
ColumnTypeInformation aggregateColumnType,
ColumnTypeInformation columnType) {
throw new UnsupportedOperationException( "Dialect does not support aggregateComponentCustomReadExpression: " + getClass().getName() );
}
@Override
public String aggregateComponentAssignmentExpression(
String aggregateParentAssignmentExpression,
String column,
ColumnTypeInformation aggregateColumnType,
ColumnTypeInformation columnType) {
throw new UnsupportedOperationException( "Dialect does not support aggregateComponentAssignmentExpression: " + getClass().getName() );
}
@Override
public String aggregateCustomWriteExpression(
ColumnTypeInformation aggregateColumnType,
List<Column> aggregatedColumns) {
return null;
}
@Override
public boolean requiresAggregateCustomWriteExpressionRenderer(int aggregateSqlTypeCode) {
throw new UnsupportedOperationException( "Dialect does not support requiresAggregateCustomWriteExpressionRenderer: " + getClass().getName() );
}
@Override
public boolean preferSelectAggregateMapping(int aggregateSqlTypeCode) {
// By default, assume the driver supports this and prefer selecting the aggregate column
return true;
}
@Override
public boolean preferBindAggregateMapping(int aggregateSqlTypeCode) {
// By default, assume the driver supports this and prefer binding the aggregate column
return true;
}
@Override
public WriteExpressionRenderer aggregateCustomWriteExpressionRenderer(
SelectableMapping aggregateColumn,
SelectableMapping[] columnsToUpdate,
TypeConfiguration typeConfiguration) {
throw new UnsupportedOperationException( "Dialect does not support aggregateCustomWriteExpressionRenderer: " + getClass().getName() );
}
@Override
public List<AuxiliaryDatabaseObject> aggregateAuxiliaryDatabaseObjects(
Namespace namespace,
String aggregatePath,
ColumnTypeInformation aggregateColumnType,
List<Column> aggregatedColumns) {
return Collections.emptyList();
}
@Override
public int aggregateComponentSqlTypeCode(int aggregateColumnSqlTypeCode, int columnSqlTypeCode) {
return columnSqlTypeCode;
}
@Override
public boolean supportsComponentCheckConstraints() {
return true;
}
}

View File

@ -0,0 +1,472 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.dialect.aggregate;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import org.hibernate.boot.model.relational.AuxiliaryDatabaseObject;
import org.hibernate.boot.model.relational.NamedAuxiliaryDatabaseObject;
import org.hibernate.boot.model.relational.Namespace;
import org.hibernate.dialect.DB2Dialect;
import org.hibernate.dialect.DB2StructJdbcType;
import org.hibernate.dialect.XmlHelper;
import org.hibernate.mapping.AggregateColumn;
import org.hibernate.mapping.Column;
import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.metamodel.mapping.SelectableMapping;
import org.hibernate.metamodel.mapping.SelectablePath;
import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.tool.schema.extract.spi.ColumnTypeInformation;
import org.hibernate.type.descriptor.sql.DdlType;
import org.hibernate.type.spi.TypeConfiguration;
import static org.hibernate.type.SqlTypes.BOOLEAN;
import static org.hibernate.type.SqlTypes.SMALLINT;
import static org.hibernate.type.SqlTypes.STRUCT;
public class DB2AggregateSupport extends AggregateSupportImpl {
public static final AggregateSupport INSTANCE = new DB2AggregateSupport();
@Override
public String aggregateComponentCustomReadExpression(
String template,
String placeholder,
String aggregateParentReadExpression,
String column,
ColumnTypeInformation aggregateColumnType,
ColumnTypeInformation columnType) {
switch ( aggregateColumnType.getTypeCode() ) {
case STRUCT:
return template.replace( placeholder, aggregateParentReadExpression + ".." + column );
}
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateColumnType.getTypeCode() );
}
@Override
public String aggregateComponentAssignmentExpression(
String aggregateParentAssignmentExpression,
String column,
ColumnTypeInformation aggregateColumnType,
ColumnTypeInformation columnType) {
switch ( aggregateColumnType.getTypeCode() ) {
case STRUCT:
return aggregateParentAssignmentExpression + ".." + column;
}
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateColumnType.getTypeCode() );
}
@Override
public String aggregateCustomWriteExpression(
ColumnTypeInformation aggregateColumnType,
List<Column> aggregatedColumns) {
switch ( aggregateColumnType.getTypeCode() ) {
case STRUCT:
final StringBuilder sb = new StringBuilder();
appendStructCustomWriteExpression( aggregateColumnType, aggregatedColumns, sb );
return sb.toString();
}
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateColumnType.getTypeCode() );
}
private static void appendStructCustomWriteExpression(
ColumnTypeInformation aggregateColumnType,
List<Column> aggregatedColumns,
StringBuilder sb) {
sb.append( aggregateColumnType.getTypeName() ).append( "()" );
for ( Column udtColumn : aggregatedColumns ) {
sb.append( ".." ).append( udtColumn.getName() ).append( '(' );
if ( udtColumn.getSqlTypeCode() == STRUCT ) {
final AggregateColumn aggregateColumn = (AggregateColumn) udtColumn;
appendStructCustomWriteExpression(
aggregateColumn,
aggregateColumn.getComponent().getAggregatedColumns(),
sb
);
}
else {
sb.append( "cast(? as " ).append( udtColumn.getSqlType() ).append( ')' );
}
sb.append( ')' );
}
}
@Override
public int aggregateComponentSqlTypeCode(int aggregateColumnSqlTypeCode, int columnSqlTypeCode) {
if ( aggregateColumnSqlTypeCode == STRUCT && columnSqlTypeCode == BOOLEAN ) {
// DB2 doesn't support booleans in structs
return SMALLINT;
}
return columnSqlTypeCode;
}
@Override
public boolean requiresAggregateCustomWriteExpressionRenderer(int aggregateSqlTypeCode) {
return aggregateSqlTypeCode == STRUCT;
}
@Override
public WriteExpressionRenderer aggregateCustomWriteExpressionRenderer(
SelectableMapping aggregateColumn,
SelectableMapping[] columnsToUpdate,
TypeConfiguration typeConfiguration) {
final int aggregateSqlTypeCode = aggregateColumn.getJdbcMapping().getJdbcType().getDefaultSqlTypeCode();
switch ( aggregateSqlTypeCode ) {
case STRUCT:
return structAggregateColumnWriter( aggregateColumn, columnsToUpdate, typeConfiguration );
}
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateSqlTypeCode );
}
private WriteExpressionRenderer structAggregateColumnWriter(
SelectableMapping aggregateColumn,
SelectableMapping[] columns,
TypeConfiguration typeConfiguration) {
return new RootStructWriteExpression( aggregateColumn, columns, typeConfiguration );
}
private static String determineTypeName(SelectableMapping column, TypeConfiguration typeConfiguration) {
final String typeName;
if ( column.getColumnDefinition() == null ) {
final DdlType ddlType = typeConfiguration.getDdlTypeRegistry().getDescriptor(
column.getJdbcMapping().getJdbcType().getDefaultSqlTypeCode()
);
return ddlType.getCastTypeName(
column.getJdbcMapping().getJdbcType(),
column.getJdbcMapping().getJavaTypeDescriptor(),
column.getLength(),
column.getPrecision(),
column.getScale()
);
}
else{
typeName = column.getColumnDefinition();
}
return typeName;
}
interface AggregateWriteExpression {
void append(
SqlAppender sb,
String path,
SqlAstTranslator<?> translator,
AggregateColumnWriteExpression expression);
}
private static class AggregateStructWriteExpression implements AggregateWriteExpression {
private final LinkedHashMap<String, AggregateWriteExpression> subExpressions = new LinkedHashMap<>();
protected final EmbeddableMappingType embeddableMappingType;
protected final String structTypeName;
protected final boolean nullable;
public AggregateStructWriteExpression(SelectableMapping selectableMapping) {
final DB2StructJdbcType structJdbcType = (DB2StructJdbcType) selectableMapping.getJdbcMapping().getJdbcType();
this.embeddableMappingType = structJdbcType.getEmbeddableMappingType();
this.structTypeName = structJdbcType.getStructTypeName();
this.nullable = selectableMapping.isNullable();
}
protected void initializeSubExpressions(SelectableMapping[] columns, TypeConfiguration typeConfiguration) {
for ( SelectableMapping column : columns ) {
final SelectablePath selectablePath = column.getSelectablePath();
final SelectablePath[] parts = selectablePath.getParts();
final String typeName = determineTypeName( column, typeConfiguration );
AggregateStructWriteExpression currentAggregate = this;
EmbeddableMappingType currentMappingType = embeddableMappingType;
for ( int i = 1; i < parts.length - 1; i++ ) {
final SelectableMapping selectableMapping = currentMappingType.getJdbcValueSelectable(
currentMappingType.getSelectableIndex( parts[i].getSelectableName() )
);
currentAggregate = (AggregateStructWriteExpression) currentAggregate.subExpressions.computeIfAbsent(
parts[i].getSelectableName(),
k -> new AggregateStructWriteExpression( selectableMapping )
);
currentMappingType = currentAggregate.embeddableMappingType;
}
final String customWriteExpression = column.getWriteExpression();
currentAggregate.subExpressions.put(
parts[parts.length - 1].getSelectableName(),
new BasicStructWriteExpression(
column,
typeName,
customWriteExpression
)
);
}
}
@Override
public void append(
SqlAppender sb,
String path,
SqlAstTranslator<?> translator,
AggregateColumnWriteExpression expression) {
if ( nullable ) {
sb.append( "coalesce(" );
sb.append( path );
sb.append( "," );
sb.append( structTypeName );
sb.append( "())" );
}
else {
sb.append( path );
}
for ( Map.Entry<String, AggregateWriteExpression> entry : subExpressions.entrySet() ) {
final String column = entry.getKey();
final AggregateWriteExpression value = entry.getValue();
sb.append( ".." );
sb.append( column );
sb.append( '(' );
value.append( sb, path + ".." + column, translator, expression );
sb.append( ')' );
}
}
}
private static class RootStructWriteExpression extends AggregateStructWriteExpression
implements WriteExpressionRenderer {
private final String selectableName;
RootStructWriteExpression(
SelectableMapping aggregateColumn,
SelectableMapping[] columns,
TypeConfiguration typeConfiguration) {
super( aggregateColumn );
this.selectableName = aggregateColumn.getSelectableName();
initializeSubExpressions( columns, typeConfiguration );
}
@Override
public void render(
SqlAppender sqlAppender,
SqlAstTranslator<?> translator,
AggregateColumnWriteExpression aggregateColumnWriteExpression,
String qualifier) {
final String path;
if ( qualifier == null || qualifier.isBlank() ) {
path = selectableName;
}
else {
path = qualifier + "." + selectableName;
}
append( sqlAppender, path, translator, aggregateColumnWriteExpression );
}
}
private static class BasicStructWriteExpression implements AggregateWriteExpression {
private final SelectableMapping selectableMapping;
private final String typeName;
private final String customWriteExpressionStart;
private final String customWriteExpressionEnd;
BasicStructWriteExpression(SelectableMapping selectableMapping, String typeName, String customWriteExpression) {
this.selectableMapping = selectableMapping;
this.typeName = typeName;
if ( customWriteExpression.equals( "?" ) ) {
this.customWriteExpressionStart = "";
this.customWriteExpressionEnd = "";
}
else {
final String[] parts = customWriteExpression.split( "\\?" );
assert parts.length == 2;
this.customWriteExpressionStart = parts[0];
this.customWriteExpressionEnd = parts[1];
}
}
@Override
public void append(
SqlAppender sb,
String path,
SqlAstTranslator<?> translator,
AggregateColumnWriteExpression expression) {
sb.append( "cast(" );
sb.append( customWriteExpressionStart );
translator.render( expression.getValueExpression( selectableMapping ), SqlAstNodeRenderingMode.DEFAULT );
sb.append( customWriteExpressionEnd );
sb.append( " as " );
sb.append( typeName );
sb.append( ')' );
}
}
@Override
public boolean preferSelectAggregateMapping(int aggregateSqlTypeCode) {
// The JDBC driver does not support selecting java.sql.Struct, so return false to select individual parts
return aggregateSqlTypeCode != STRUCT;
}
@Override
public boolean preferBindAggregateMapping(int aggregateSqlTypeCode) {
// We bind individual parts through a special custom write expression because the JDBC driver support is bad
return aggregateSqlTypeCode != STRUCT;
}
@Override
public List<AuxiliaryDatabaseObject> aggregateAuxiliaryDatabaseObjects(
Namespace namespace,
String aggregatePath,
ColumnTypeInformation aggregateColumnType,
List<Column> aggregatedColumns) {
if ( aggregateColumnType.getTypeCode() != STRUCT ) {
return Collections.emptyList();
}
final String columnType = aggregateColumnType.getTypeName();
// The serialize and deserialize functions, as well as the transform are for supporting struct types in native queries and functions
var list = new ArrayList<AuxiliaryDatabaseObject>( 3 );
var serializerSb = new StringBuilder();
var deserializerSb = new StringBuilder();
serializerSb.append( "create function " ).append( columnType ).append( "_serializer(v " ).append( columnType ).append( ") returns xml language sql " )
.append( "return xmlelement(name \"").append( XmlHelper.ROOT_TAG ).append( "\"" );
appendSerializer( aggregatedColumns, serializerSb, "v.." );
serializerSb.append( ')' );
deserializerSb.append( "create function " ).append( columnType ).append( "_deserializer(v xml) returns " ).append( columnType ).append( " language sql " )
.append( "return select " ).append( columnType ).append( "()" );
appendDeserializerConstructor( aggregatedColumns, deserializerSb, "" );
deserializerSb.append( " from xmltable('$" ).append( XmlHelper.ROOT_TAG ).append( "' passing v as \"" )
.append( XmlHelper.ROOT_TAG ).append( "\" columns" );
appendDeserializerColumns( aggregatedColumns, deserializerSb, ' ', "" );
deserializerSb.append( ") as t" );
list.add(
new NamedAuxiliaryDatabaseObject(
"DB2 " + columnType + " serializer",
namespace,
serializerSb.toString(),
"drop function " + columnType + "_serializer",
Set.of( DB2Dialect.class.getName() )
)
);
list.add(
new NamedAuxiliaryDatabaseObject(
"DB2 " + columnType + " deserializer",
namespace,
deserializerSb.toString(),
"drop function " + columnType + "_deserializer",
Set.of( DB2Dialect.class.getName() )
)
);
list.add(
new NamedAuxiliaryDatabaseObject(
"DB2 " + columnType + " transform",
namespace,
"create transform for " + columnType + " db2_program (from sql with function " + columnType + "_serializer, to sql with function " + columnType + "_deserializer)",
"drop transform db2_program for " + columnType,
Set.of( DB2Dialect.class.getName() )
)
);
return list;
}
private static void appendSerializer(List<Column> aggregatedColumns, StringBuilder serializerSb, String prefix) {
char sep;
if ( aggregatedColumns.size() > 1 ) {
serializerSb.append( ",xmlconcat" );
sep = '(';
}
else {
sep = ',';
}
for ( Column udtColumn : aggregatedColumns ) {
serializerSb.append( sep );
serializerSb.append( "xmlelement(name \"" ).append( udtColumn.getName() ).append( "\"" );
if ( udtColumn.getSqlTypeCode() == STRUCT ) {
final AggregateColumn aggregateColumn = (AggregateColumn) udtColumn;
appendSerializer(
aggregateColumn.getComponent().getAggregatedColumns(),
serializerSb,
prefix + udtColumn.getName() + ".."
);
}
else if ( needsVarcharForBitDataCast( udtColumn.getSqlType() ) ) {
serializerSb.append( ",cast(" ).append( prefix ).append( udtColumn.getName() ).append( " as varchar(" )
.append( udtColumn.getColumnSize( null, null ).getLength() ).append( ") for bit data)" );
}
else {
serializerSb.append( ',' ).append( prefix ).append( udtColumn.getName() );
}
serializerSb.append( ')' );
sep = ',';
}
if ( aggregatedColumns.size() > 1 ) {
serializerSb.append( ')' );
}
}
private static void appendDeserializerConstructor(
List<Column> aggregatedColumns,
StringBuilder deserializerSb,
String prefix) {
for ( Column udtColumn : aggregatedColumns ) {
deserializerSb.append( ".." ).append( udtColumn.getName() ).append( '(' );
if ( udtColumn.getSqlTypeCode() == STRUCT ) {
final AggregateColumn aggregateColumn = (AggregateColumn) udtColumn;
deserializerSb.append( udtColumn.getSqlType() ).append( "()" );
appendDeserializerConstructor(
aggregateColumn.getComponent().getAggregatedColumns(),
deserializerSb,
udtColumn.getName() + "_"
);
deserializerSb.append( ')' );
}
else if ( needsVarcharForBitDataCast( udtColumn.getSqlType() ) ) {
deserializerSb.append( "cast(t." ).append( prefix ).append( udtColumn.getName() ).append( " as " )
.append( udtColumn.getSqlType() ).append( "))" );
}
else {
deserializerSb.append( "t." ).append( prefix ).append( udtColumn.getName() ).append( ')' );
}
}
}
private static void appendDeserializerColumns(
List<Column> aggregatedColumns,
StringBuilder deserializerSb,
char sep,
String prefix) {
for ( Column udtColumn : aggregatedColumns ) {
if ( udtColumn.getSqlTypeCode() == STRUCT ) {
final AggregateColumn aggregateColumn = (AggregateColumn) udtColumn;
appendDeserializerColumns(
aggregateColumn.getComponent().getAggregatedColumns(),
deserializerSb,
sep,
udtColumn.getName() + "_"
);
}
else {
deserializerSb.append( sep );
deserializerSb.append( prefix ).append( udtColumn.getName() ).append( ' ' );
if ( needsVarcharForBitDataCast( udtColumn.getSqlType() ) ) {
deserializerSb.append( "varchar(" )
.append( udtColumn.getColumnSize( null, null ).getLength() ).append( ") for bit data" );
}
else {
deserializerSb.append( udtColumn.getSqlType() );
}
deserializerSb.append( " path '/" ).append( XmlHelper.ROOT_TAG ).append( '/' ).append( udtColumn.getName() ).append( '\'' );
}
sep = ',';
}
}
private static boolean needsVarcharForBitDataCast(String columnType) {
// xmlelement and xmltable don't seem to support the "varbinary", "binary" or "char for bit data" types
final String columTypeLC = columnType.toLowerCase( Locale.ROOT ).trim();
return columTypeLC.contains( "binary" )
|| columTypeLC.startsWith( "char" ) && columTypeLC.endsWith( " bit data" );
}
}

View File

@ -0,0 +1,496 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.dialect.aggregate;
import java.util.LinkedHashMap;
import java.util.Locale;
import java.util.Map;
import org.hibernate.dialect.DatabaseVersion;
import org.hibernate.dialect.Dialect;
import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.metamodel.mapping.SelectableMapping;
import org.hibernate.metamodel.mapping.SelectablePath;
import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.tool.schema.extract.spi.ColumnTypeInformation;
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
import org.hibernate.type.spi.TypeConfiguration;
import static org.hibernate.type.SqlTypes.BIGINT;
import static org.hibernate.type.SqlTypes.BINARY;
import static org.hibernate.type.SqlTypes.BLOB;
import static org.hibernate.type.SqlTypes.BOOLEAN;
import static org.hibernate.type.SqlTypes.CLOB;
import static org.hibernate.type.SqlTypes.DATE;
import static org.hibernate.type.SqlTypes.INTEGER;
import static org.hibernate.type.SqlTypes.JSON;
import static org.hibernate.type.SqlTypes.LONG32VARBINARY;
import static org.hibernate.type.SqlTypes.NCLOB;
import static org.hibernate.type.SqlTypes.SMALLINT;
import static org.hibernate.type.SqlTypes.STRUCT;
import static org.hibernate.type.SqlTypes.TIME;
import static org.hibernate.type.SqlTypes.TIMESTAMP;
import static org.hibernate.type.SqlTypes.TIMESTAMP_UTC;
import static org.hibernate.type.SqlTypes.TIMESTAMP_WITH_TIMEZONE;
import static org.hibernate.type.SqlTypes.TINYINT;
import static org.hibernate.type.SqlTypes.VARBINARY;
public class OracleAggregateSupport extends AggregateSupportImpl {
private static final AggregateSupport V23_INSTANCE = new OracleAggregateSupport( true, JsonSupport.OSON );
private static final AggregateSupport V21_INSTANCE = new OracleAggregateSupport( false, JsonSupport.OSON );
private static final AggregateSupport V19_INSTANCE = new OracleAggregateSupport( false, JsonSupport.MERGEPATCH );
private static final AggregateSupport V18_INSTANCE = new OracleAggregateSupport( false, JsonSupport.QUERY_AND_PATH );
private static final AggregateSupport V12_INSTANCE = new OracleAggregateSupport( false, JsonSupport.QUERY );
private static final AggregateSupport LEGACY_INSTANCE = new OracleAggregateSupport( false, JsonSupport.NONE );
private final boolean checkConstraintSupport;
private final JsonSupport jsonSupport;
private OracleAggregateSupport(boolean checkConstraintSupport, JsonSupport jsonSupport) {
this.checkConstraintSupport = checkConstraintSupport;
this.jsonSupport = jsonSupport;
}
public static AggregateSupport valueOf(Dialect dialect) {
final DatabaseVersion version = dialect.getVersion();
switch ( version.getMajor() ) {
case 12:
case 13:
case 14:
case 15:
case 16:
case 17:
return V12_INSTANCE;
case 18:
return V18_INSTANCE;
case 19:
case 20:
return V19_INSTANCE;
case 21:
case 22:
return V21_INSTANCE;
}
return version.isSameOrAfter( 23 )
? OracleAggregateSupport.V23_INSTANCE
: OracleAggregateSupport.LEGACY_INSTANCE;
}
@Override
public String aggregateComponentCustomReadExpression(
String template,
String placeholder,
String aggregateParentReadExpression,
String column,
ColumnTypeInformation aggregateColumnType,
ColumnTypeInformation columnType) {
switch ( aggregateColumnType.getTypeCode() ) {
case JSON:
switch ( jsonSupport ) {
case OSON:
case MERGEPATCH:
switch ( columnType.getTypeCode() ) {
case BOOLEAN:
if ( columnType.getTypeName().toLowerCase( Locale.ROOT ).trim().startsWith( "number" ) ) {
return template.replace(
placeholder,
"decode(" + aggregateParentReadExpression + "." + column + ".boolean(),'true',1,'false',0,null)"
);
}
case TINYINT:
case SMALLINT:
case INTEGER:
case BIGINT:
return template.replace(
placeholder,
"" + aggregateParentReadExpression + "." + column + ".number()"
);
case DATE:
return template.replace(
placeholder,
aggregateParentReadExpression + "." + column + ".date()"
);
case TIME:
return template.replace(
placeholder,
"to_timestamp(" + aggregateParentReadExpression + "." + column + ".string(),'hh24:mi:ss')"
);
case TIMESTAMP:
return template.replace(
placeholder,
// Don't use .timestamp() directly because that is limited to precision 6
"to_timestamp(" + aggregateParentReadExpression + "." + column + ".string(),'YYYY-MM-DD\"T\"hh24:mi:ss.FF9')"
);
case TIMESTAMP_WITH_TIMEZONE:
case TIMESTAMP_UTC:
return template.replace(
placeholder,
// Don't use .timestamp() directly because that is limited to precision 6
"to_timestamp_tz(" + aggregateParentReadExpression + "." + column + ".string(),'YYYY-MM-DD\"T\"hh24:mi:ss.FF9TZH:TZM')"
);
case BINARY:
case VARBINARY:
case LONG32VARBINARY:
// We encode binary data as hex, so we have to decode here
return template.replace(
placeholder,
"hextoraw(" + aggregateParentReadExpression + "." + column + ".string())"
);
case CLOB:
case NCLOB:
case BLOB:
// We encode binary data as hex, so we have to decode here
return template.replace(
placeholder,
"(select * from json_table(" + aggregateParentReadExpression + ",'$' columns (" + column + " " + columnType.getTypeName() + " path '$." + column + "')))"
);
case JSON:
return template.replace(
placeholder,
aggregateParentReadExpression + "." + column
);
default:
return template.replace(
placeholder,
"cast(" + aggregateParentReadExpression + "." + column + ".string() as " + columnType.getTypeName() + ')'
);
}
case QUERY_AND_PATH:
case QUERY:
switch ( columnType.getTypeCode() ) {
case BOOLEAN:
if ( columnType.getTypeName().toLowerCase( Locale.ROOT ).trim().startsWith( "number" ) ) {
return template.replace(
placeholder,
"decode(json_value(" + aggregateParentReadExpression + ",'$." + column + "'),'true',1,'false',0,null)"
);
}
case TINYINT:
case SMALLINT:
case INTEGER:
case BIGINT:
return template.replace(
placeholder,
"json_value(" + aggregateParentReadExpression + ",'$." + column + "' returning " + columnType.getTypeName() + ')'
);
case DATE:
return template.replace(
placeholder,
"to_date(json_value(" + aggregateParentReadExpression + ",'$." + column + "'),'YYYY-MM-DD')"
);
case TIME:
return template.replace(
placeholder,
"to_timestamp(json_value(" + aggregateParentReadExpression + ",'$." + column + "'),'hh24:mi:ss')"
);
case TIMESTAMP:
return template.replace(
placeholder,
"to_timestamp(json_value(" + aggregateParentReadExpression + ",'$." + column + "'),'YYYY-MM-DD\"T\"hh24:mi:ss.FF9')"
);
case TIMESTAMP_WITH_TIMEZONE:
case TIMESTAMP_UTC:
return template.replace(
placeholder,
"to_timestamp_tz(json_value(" + aggregateParentReadExpression + ",'$." + column + "'),'YYYY-MM-DD\"T\"hh24:mi:ss.FF9TZH:TZM')"
);
case BINARY:
case VARBINARY:
case LONG32VARBINARY:
// We encode binary data as hex, so we have to decode here
return template.replace(
placeholder,
"hextoraw(json_value(" + aggregateParentReadExpression + ",'$." + column + "'))"
);
case CLOB:
case NCLOB:
case BLOB:
// We encode binary data as hex, so we have to decode here
return template.replace(
placeholder,
"(select * from json_table(" + aggregateParentReadExpression + ",'$' columns (" + column + " " + columnType.getTypeName() + " path '$." + column + "')))"
);
case JSON:
return template.replace(
placeholder,
"json_value(" + aggregateParentReadExpression + ",'$." + column + "')"
);
default:
return template.replace(
placeholder,
"cast(json_value(" + aggregateParentReadExpression + ",'$." + column + "') as " + columnType.getTypeName() + ')'
);
}
case NONE:
throw new UnsupportedOperationException( "The Oracle version doesn't support JSON aggregates!" );
}
case STRUCT:
return template.replace( placeholder, aggregateParentReadExpression + "." + column );
}
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateColumnType.getTypeCode() );
}
@Override
public String aggregateComponentAssignmentExpression(
String aggregateParentAssignmentExpression,
String column,
ColumnTypeInformation aggregateColumnType,
ColumnTypeInformation columnType) {
switch ( aggregateColumnType.getTypeCode() ) {
case JSON:
// For JSON we always have to replace the whole object
return aggregateParentAssignmentExpression;
case STRUCT:
return aggregateParentAssignmentExpression + "." + column;
}
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateColumnType.getTypeCode() );
}
private String jsonCustomWriteExpression(String customWriteExpression, int sqlTypeCode, String typeName) {
switch ( jsonSupport ) {
case OSON:
// return customWriteExpression;
case MERGEPATCH:
switch ( sqlTypeCode ) {
case CLOB:
return "to_clob(" + customWriteExpression + ")";
case BOOLEAN:
if ( typeName.toLowerCase( Locale.ROOT ).trim().startsWith( "number" ) ) {
return "decode(" + customWriteExpression + ",1,'true',0,'false',null)";
}
default:
return customWriteExpression;
}
}
throw new IllegalStateException( "JSON not supported!" );
}
@Override
public boolean requiresAggregateCustomWriteExpressionRenderer(int aggregateSqlTypeCode) {
return aggregateSqlTypeCode == JSON;
}
@Override
public WriteExpressionRenderer aggregateCustomWriteExpressionRenderer(
SelectableMapping aggregateColumn,
SelectableMapping[] columnsToUpdate,
TypeConfiguration typeConfiguration) {
final int aggregateSqlTypeCode = aggregateColumn.getJdbcMapping().getJdbcType().getDefaultSqlTypeCode();
switch ( aggregateSqlTypeCode ) {
case JSON:
return jsonAggregateColumnWriter( aggregateColumn, columnsToUpdate );
}
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateSqlTypeCode );
}
@Override
public boolean supportsComponentCheckConstraints() {
return checkConstraintSupport;
}
private String determineJsonTypeName(SelectableMapping aggregateColumn) {
final String columnDefinition = aggregateColumn.getColumnDefinition();
if ( columnDefinition == null ) {
assert aggregateColumn.getJdbcMapping().getJdbcType().getDefaultSqlTypeCode() == JSON;
switch ( jsonSupport ) {
case OSON:
return "json";
case MERGEPATCH:
case QUERY_AND_PATH:
case QUERY:
return "blob";
case NONE:
return "clob";
}
}
return columnDefinition;
}
enum JsonSupport {
OSON,
MERGEPATCH,
QUERY_AND_PATH,
QUERY,
NONE;
}
private WriteExpressionRenderer jsonAggregateColumnWriter(
SelectableMapping aggregateColumn,
SelectableMapping[] columns) {
return new RootJsonWriteExpression( aggregateColumn, columns, this );
}
interface JsonWriteExpression {
void append(
SqlAppender sb,
String path,
SqlAstTranslator<?> translator,
AggregateColumnWriteExpression expression);
}
private static class AggregateJsonWriteExpression implements JsonWriteExpression {
private final LinkedHashMap<String, JsonWriteExpression> subExpressions = new LinkedHashMap<>();
protected final EmbeddableMappingType embeddableMappingType;
protected final String ddlTypeName;
public AggregateJsonWriteExpression(
SelectableMapping selectableMapping,
OracleAggregateSupport aggregateSupport) {
this.embeddableMappingType = ( (AggregateJdbcType) selectableMapping.getJdbcMapping().getJdbcType() )
.getEmbeddableMappingType();
this.ddlTypeName = aggregateSupport.determineJsonTypeName( selectableMapping );
}
protected void initializeSubExpressions(SelectableMapping[] columns, OracleAggregateSupport aggregateSupport) {
for ( SelectableMapping column : columns ) {
final SelectablePath selectablePath = column.getSelectablePath();
final SelectablePath[] parts = selectablePath.getParts();
AggregateJsonWriteExpression currentAggregate = this;
EmbeddableMappingType currentMappingType = embeddableMappingType;
for ( int i = 1; i < parts.length - 1; i++ ) {
final SelectableMapping selectableMapping = currentMappingType.getJdbcValueSelectable(
currentMappingType.getSelectableIndex( parts[i].getSelectableName() )
);
currentAggregate = (AggregateJsonWriteExpression) currentAggregate.subExpressions.computeIfAbsent(
parts[i].getSelectableName(),
k -> new AggregateJsonWriteExpression( selectableMapping, aggregateSupport )
);
currentMappingType = currentAggregate.embeddableMappingType;
}
final int sqlTypeCode = column.getJdbcMapping().getJdbcType().getDefaultSqlTypeCode();
final String customWriteExpression = column.getWriteExpression();
currentAggregate.subExpressions.put(
parts[parts.length - 1].getSelectableName(),
new BasicJsonWriteExpression(
column,
aggregateSupport.jsonCustomWriteExpression(
customWriteExpression,
sqlTypeCode,
column.getColumnDefinition()
)
)
);
}
}
@Override
public void append(
SqlAppender sb,
String path,
SqlAstTranslator<?> translator,
AggregateColumnWriteExpression expression) {
sb.append( "json_object" );
char separator = '(';
for ( Map.Entry<String, JsonWriteExpression> entry : subExpressions.entrySet() ) {
final String column = entry.getKey();
final JsonWriteExpression value = entry.getValue();
final String subPath = path + "->'" + column + "'";
sb.append( separator );
if ( value instanceof AggregateJsonWriteExpression ) {
sb.append( '\'' );
sb.append( column );
sb.append( "':" );
value.append( sb, subPath, translator, expression );
}
else {
value.append( sb, subPath, translator, expression );
}
separator = ',';
}
sb.append( " returning " );
sb.append( ddlTypeName );
sb.append( ')' );
}
}
private static class RootJsonWriteExpression extends AggregateJsonWriteExpression
implements WriteExpressionRenderer {
private final boolean nullable;
private final String path;
RootJsonWriteExpression(
SelectableMapping aggregateColumn,
SelectableMapping[] columns,
OracleAggregateSupport aggregateSupport) {
super( aggregateColumn, aggregateSupport );
this.nullable = aggregateColumn.isNullable();
this.path = aggregateColumn.getSelectionExpression();
initializeSubExpressions( columns, aggregateSupport );
}
@Override
public void render(
SqlAppender sqlAppender,
SqlAstTranslator<?> translator,
AggregateColumnWriteExpression aggregateColumnWriteExpression,
String qualifier) {
final String basePath;
if ( qualifier == null || qualifier.isBlank() ) {
basePath = path;
}
else {
basePath = qualifier + "." + path;
}
sqlAppender.append( "json_mergepatch(" );
if ( nullable ) {
sqlAppender.append( "coalesce(" );
sqlAppender.append( basePath );
sqlAppender.append( ",cast('{}' as " );
sqlAppender.append( ddlTypeName );
sqlAppender.append( "))" );
}
else {
sqlAppender.append( basePath );
}
sqlAppender.append( ',' );
append( sqlAppender, basePath, translator, aggregateColumnWriteExpression );
sqlAppender.append( " returning " );
sqlAppender.append( ddlTypeName );
sqlAppender.append( ')' );
}
}
private static class BasicJsonWriteExpression implements JsonWriteExpression {
private final SelectableMapping selectableMapping;
private final String customWriteExpressionStart;
private final String customWriteExpressionEnd;
BasicJsonWriteExpression(SelectableMapping selectableMapping, String customWriteExpression) {
this.selectableMapping = selectableMapping;
if ( customWriteExpression.equals( "?" ) ) {
this.customWriteExpressionStart = "";
this.customWriteExpressionEnd = "";
}
else {
final String[] parts = customWriteExpression.split( "\\?" );
assert parts.length == 2;
this.customWriteExpressionStart = parts[0];
this.customWriteExpressionEnd = parts[1];
}
}
@Override
public void append(
SqlAppender sb,
String path,
SqlAstTranslator<?> translator,
AggregateColumnWriteExpression expression) {
sb.append( '\'' );
sb.append( selectableMapping.getSelectableName() );
sb.append( "':" );
sb.append( customWriteExpressionStart );
// We use NO_UNTYPED here so that expressions which require type inference are casted explicitly,
// since we don't know how the custom write expression looks like where this is embedded,
// so we have to be pessimistic and avoid ambiguities
translator.render( expression.getValueExpression( selectableMapping ), SqlAstNodeRenderingMode.NO_UNTYPED );
sb.append( customWriteExpressionEnd );
}
}
}

View File

@ -0,0 +1,269 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.dialect.aggregate;
import java.util.LinkedHashMap;
import java.util.Map;
import org.hibernate.dialect.Dialect;
import org.hibernate.metamodel.mapping.SelectableMapping;
import org.hibernate.metamodel.mapping.SelectablePath;
import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.tool.schema.extract.spi.ColumnTypeInformation;
import org.hibernate.type.spi.TypeConfiguration;
import static org.hibernate.type.SqlTypes.BINARY;
import static org.hibernate.type.SqlTypes.JSON;
import static org.hibernate.type.SqlTypes.LONG32VARBINARY;
import static org.hibernate.type.SqlTypes.STRUCT;
import static org.hibernate.type.SqlTypes.VARBINARY;
public class PostgreSQLAggregateSupport extends AggregateSupportImpl {
private static final AggregateSupport INSTANCE = new PostgreSQLAggregateSupport();
public static AggregateSupport valueOf(Dialect dialect) {
return PostgreSQLAggregateSupport.INSTANCE;
}
@Override
public String aggregateComponentCustomReadExpression(
String template,
String placeholder,
String aggregateParentReadExpression,
String column,
ColumnTypeInformation aggregateColumnType,
ColumnTypeInformation columnType) {
switch ( aggregateColumnType.getTypeCode() ) {
case JSON:
switch ( columnType.getTypeCode() ) {
case JSON:
return template.replace(
placeholder,
aggregateParentReadExpression + "->'" + column + "'"
);
case BINARY:
case VARBINARY:
case LONG32VARBINARY:
// We encode binary data as hex, so we have to decode here
return template.replace(
placeholder,
"decode(" + aggregateParentReadExpression + "->>'" + column + "','hex')"
);
default:
return template.replace(
placeholder,
"cast(" + aggregateParentReadExpression + "->>'" + column + "' as " + columnType.getTypeName() + ')'
);
}
case STRUCT:
return template.replace( placeholder, '(' + aggregateParentReadExpression + ")." + column );
}
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateColumnType.getTypeCode() );
}
private static String jsonCustomWriteExpression(String customWriteExpression, int sqlTypeCode) {
switch ( sqlTypeCode ) {
case BINARY:
case VARBINARY:
case LONG32VARBINARY:
// We encode binary data as hex
return "to_jsonb(encode(" + customWriteExpression + ",'hex'))";
default:
return "to_jsonb(" + customWriteExpression + ")";
}
}
@Override
public String aggregateComponentAssignmentExpression(
String aggregateParentAssignmentExpression,
String column,
ColumnTypeInformation aggregateColumnType,
ColumnTypeInformation columnType) {
switch ( aggregateColumnType.getTypeCode() ) {
case JSON:
// For JSON we always have to replace the whole object
return aggregateParentAssignmentExpression;
case STRUCT:
return aggregateParentAssignmentExpression + "." + column;
}
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateColumnType.getTypeCode() );
}
@Override
public boolean requiresAggregateCustomWriteExpressionRenderer(int aggregateSqlTypeCode) {
switch ( aggregateSqlTypeCode ) {
case JSON:
return true;
}
return false;
}
@Override
public boolean preferSelectAggregateMapping(int aggregateSqlTypeCode) {
// The JDBC driver does not support selecting java.sql.Struct, so return false to select individual parts
return aggregateSqlTypeCode != STRUCT;
}
@Override
public WriteExpressionRenderer aggregateCustomWriteExpressionRenderer(
SelectableMapping aggregateColumn,
SelectableMapping[] columnsToUpdate,
TypeConfiguration typeConfiguration) {
final int aggregateSqlTypeCode = aggregateColumn.getJdbcMapping().getJdbcType().getDefaultSqlTypeCode();
switch ( aggregateSqlTypeCode ) {
case JSON:
return jsonAggregateColumnWriter( aggregateColumn, columnsToUpdate );
}
throw new IllegalArgumentException( "Unsupported aggregate SQL type: " + aggregateSqlTypeCode );
}
private WriteExpressionRenderer jsonAggregateColumnWriter(
SelectableMapping aggregateColumn,
SelectableMapping[] columns) {
return new RootJsonWriteExpression( aggregateColumn, columns );
}
interface JsonWriteExpression {
void append(
SqlAppender sb,
String path,
SqlAstTranslator<?> translator,
AggregateColumnWriteExpression expression);
}
private static class AggregateJsonWriteExpression implements JsonWriteExpression {
private final LinkedHashMap<String, JsonWriteExpression> subExpressions = new LinkedHashMap<>();
protected void initializeSubExpressions(SelectableMapping[] columns) {
for ( SelectableMapping column : columns ) {
final SelectablePath selectablePath = column.getSelectablePath();
final SelectablePath[] parts = selectablePath.getParts();
AggregateJsonWriteExpression currentAggregate = this;
for ( int i = 1; i < parts.length - 1; i++ ) {
currentAggregate = (AggregateJsonWriteExpression) currentAggregate.subExpressions.computeIfAbsent(
parts[i].getSelectableName(),
k -> new AggregateJsonWriteExpression()
);
}
final int sqlTypeCode = column.getJdbcMapping().getJdbcType().getDefaultSqlTypeCode();
final String customWriteExpression = column.getWriteExpression();
currentAggregate.subExpressions.put(
parts[parts.length - 1].getSelectableName(),
new BasicJsonWriteExpression(
column,
jsonCustomWriteExpression( customWriteExpression, sqlTypeCode )
)
);
}
}
@Override
public void append(
SqlAppender sb,
String path,
SqlAstTranslator<?> translator,
AggregateColumnWriteExpression expression) {
sb.append( "||jsonb_build_object" );
char separator = '(';
for ( Map.Entry<String, JsonWriteExpression> entry : subExpressions.entrySet() ) {
final String column = entry.getKey();
final JsonWriteExpression value = entry.getValue();
final String subPath = path + "->'" + column + "'";
sb.append( separator );
if ( value instanceof AggregateJsonWriteExpression ) {
sb.append( '\'' );
sb.append( column );
sb.append( "',coalesce(" );
sb.append( subPath );
sb.append( ",'{}')" );
value.append( sb, subPath, translator, expression );
}
else {
value.append( sb, subPath, translator, expression );
}
separator = ',';
}
sb.append( ')' );
}
}
private static class RootJsonWriteExpression extends AggregateJsonWriteExpression
implements WriteExpressionRenderer {
private final boolean nullable;
private final String path;
RootJsonWriteExpression(SelectableMapping aggregateColumn, SelectableMapping[] columns) {
this.nullable = aggregateColumn.isNullable();
this.path = aggregateColumn.getSelectionExpression();
initializeSubExpressions( columns );
}
@Override
public void render(
SqlAppender sqlAppender,
SqlAstTranslator<?> translator,
AggregateColumnWriteExpression aggregateColumnWriteExpression,
String qualifier) {
final String basePath;
if ( qualifier == null || qualifier.isBlank() ) {
basePath = path;
}
else {
basePath = qualifier + "." + path;
}
if ( nullable ) {
sqlAppender.append( "coalesce(" );
sqlAppender.append( basePath );
sqlAppender.append( ",'{}')" );
}
else {
sqlAppender.append( basePath );
}
append( sqlAppender, basePath, translator, aggregateColumnWriteExpression );
}
}
private static class BasicJsonWriteExpression implements JsonWriteExpression {
private final SelectableMapping selectableMapping;
private final String customWriteExpressionStart;
private final String customWriteExpressionEnd;
BasicJsonWriteExpression(SelectableMapping selectableMapping, String customWriteExpression) {
this.selectableMapping = selectableMapping;
if ( customWriteExpression.equals( "?" ) ) {
this.customWriteExpressionStart = "";
this.customWriteExpressionEnd = "";
}
else {
final String[] parts = customWriteExpression.split( "\\?" );
assert parts.length == 2;
this.customWriteExpressionStart = parts[0];
this.customWriteExpressionEnd = parts[1];
}
}
@Override
public void append(
SqlAppender sb,
String path,
SqlAstTranslator<?> translator,
AggregateColumnWriteExpression expression) {
sb.append( '\'' );
sb.append( selectableMapping.getSelectableName() );
sb.append( "'," );
sb.append( customWriteExpressionStart );
// We use NO_UNTYPED here so that expressions which require type inference are casted explicitly,
// since we don't know how the custom write expression looks like where this is embedded,
// so we have to be pessimistic and avoid ambiguities
translator.render( expression.getValueExpression( selectableMapping ), SqlAstNodeRenderingMode.NO_UNTYPED );
sb.append( customWriteExpressionEnd );
}
}
}

View File

@ -15,7 +15,10 @@ public final class CharSequenceHelper {
}
public static CharSequence subSequence(CharSequence sequence, int start, int end) {
if ( sequence instanceof SubSequence ) {
if ( start == 0 && end == sequence.length() ) {
return sequence;
}
else if ( sequence instanceof SubSequence ) {
return sequence.subSequence( start, end );
}
else {
@ -102,4 +105,23 @@ public final class CharSequenceHelper {
return indexOf( charSequence.toString(), target, fromIndex, endIndex );
}
}
public static boolean regionMatchesIgnoreCase(
CharSequence lhs,
int lhsStart,
CharSequence rhs,
int rhsStart,
int length) {
if ( lhsStart + length <= lhs.length() && rhsStart + length <= rhs.length() ) {
for ( int i = 0; i < length; i++ ) {
final char c1 = lhs.charAt( lhsStart + i );
final char c2 = rhs.charAt( rhsStart + i );
if ( c1 != c2 && Character.toLowerCase( c1 ) != Character.toLowerCase( c2 ) ) {
return false;
}
}
return true;
}
return false;
}
}

View File

@ -123,6 +123,15 @@ public class LoaderSqlAstCreationState
return fetchProcessor.visitFetches( fetchParent, this );
}
@Override
public List<Fetch> visitNestedFetches(FetchParent fetchParent) {
final FetchParent nestingFetchParent = processingState.getNestingFetchParent();
processingState.setNestingFetchParent( fetchParent );
final List<Fetch> fetches = fetchProcessor.visitFetches( fetchParent, this );
processingState.setNestingFetchParent( nestingFetchParent );
return fetches;
}
@Override
public boolean isResolvingCircularFetch() {
return resolvingCircularFetch;

View File

@ -0,0 +1,124 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.mapping;
import org.hibernate.dialect.Dialect;
import org.hibernate.metamodel.mapping.SelectablePath;
import org.hibernate.sql.Template;
/**
* An aggregate column is a column of type {@link org.hibernate.type.SqlTypes#STRUCT},
* {@link org.hibernate.type.SqlTypes#JSON} or {@link org.hibernate.type.SqlTypes#SQLXML}
* that aggregates a component into a single column.
*/
public class AggregateColumn extends Column {
private final Component component;
public AggregateColumn(Column column, Component component) {
setLength( column.getLength() );
setPrecision( column.getPrecision() );
setScale( column.getScale() );
setValue( column.getValue() );
setTypeIndex( column.getTypeIndex() );
setName( column.getQuotedName() );
setNullable( column.isNullable() );
setUnique( column.isUnique() );
setSqlType( column.getSqlType() );
setSqlTypeCode( column.getSqlTypeCode() );
uniqueInteger = column.uniqueInteger; //usually useless
setCheckConstraint( column.getCheckConstraint() );
setComment( column.getComment() );
setDefaultValue( column.getDefaultValue() );
setGeneratedAs( column.getGeneratedAs() );
setAssignmentExpression( column.getAssignmentExpression() );
setCustomRead( column.getCustomRead() );
setCustomWrite( column.getCustomWrite() );
this.component = component;
}
public Component getComponent() {
return component;
}
public SelectablePath getSelectablePath() {
return getSelectablePath( component );
}
private static SelectablePath getSelectablePath(Component component) {
final AggregateColumn aggregateColumn = component.getAggregateColumn();
final AggregateColumn parentAggregateColumn = component.getParentAggregateColumn();
final String simpleAggregateName = aggregateColumn.getQuotedName();
if ( parentAggregateColumn == null ) {
return new SelectablePath( simpleAggregateName );
}
else {
return getSelectablePath( parentAggregateColumn.getComponent() ).append( simpleAggregateName );
}
}
public String getAggregateReadExpressionTemplate(Dialect dialect) {
return getAggregateReadExpressionTemplate( dialect, component );
}
private static String getAggregateReadExpressionTemplate(Dialect dialect, Component component) {
final AggregateColumn aggregateColumn = component.getAggregateColumn();
final AggregateColumn parentAggregateColumn = component.getParentAggregateColumn();
final String simpleAggregateName = aggregateColumn.getQuotedName( dialect );
final String aggregateSelectableExpression;
if ( parentAggregateColumn == null ) {
aggregateSelectableExpression = Template.TEMPLATE + "." + simpleAggregateName;
}
else {
aggregateSelectableExpression = dialect.getAggregateSupport().aggregateComponentCustomReadExpression(
"",
"",
getAggregateReadExpressionTemplate(
dialect,
parentAggregateColumn.getComponent()
),
simpleAggregateName,
parentAggregateColumn, aggregateColumn
);
}
return aggregateSelectableExpression;
}
public String getAggregateAssignmentExpressionTemplate(Dialect dialect) {
return getAggregateAssignmentExpressionTemplate( dialect, component );
}
private static String getAggregateAssignmentExpressionTemplate(Dialect dialect, Component component) {
final AggregateColumn aggregateColumn = component.getAggregateColumn();
final AggregateColumn parentAggregateColumn = component.getParentAggregateColumn();
final String simpleAggregateName = aggregateColumn.getQuotedName( dialect );
final String aggregateSelectableExpression;
if ( parentAggregateColumn == null ) {
aggregateSelectableExpression = Template.TEMPLATE + "." + simpleAggregateName;
}
else {
aggregateSelectableExpression = dialect.getAggregateSupport().aggregateComponentAssignmentExpression(
getAggregateAssignmentExpressionTemplate(
dialect,
parentAggregateColumn.getComponent()
),
simpleAggregateName,
parentAggregateColumn,
aggregateColumn
);
}
return aggregateSelectableExpression;
}
/**
* Shallow copy, the value is not copied
*/
@Override
public AggregateColumn clone() {
return new AggregateColumn( this, component );
}
}

View File

@ -39,6 +39,7 @@ import org.hibernate.internal.util.ReflectHelper;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.metamodel.mapping.SelectablePath;
import org.hibernate.metamodel.model.convert.spi.BasicValueConverter;
import org.hibernate.resource.beans.spi.BeanInstanceProducer;
import org.hibernate.resource.beans.spi.ManagedBean;
@ -93,6 +94,7 @@ public class BasicValue extends SimpleValue implements JdbcTypeIndicators, Resol
private String ownerName;
private String propertyName;
private AggregateColumn aggregateColumn;
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Resolved state - available after `#resolve`
@ -347,6 +349,21 @@ public class BasicValue extends SimpleValue implements JdbcTypeIndicators, Resol
}
}
public AggregateColumn getAggregateColumn() {
return aggregateColumn;
}
public void setAggregateColumn(AggregateColumn aggregateColumn) {
this.aggregateColumn = aggregateColumn;
}
public SelectablePath createSelectablePath(String selectableName) {
if ( aggregateColumn != null ) {
return aggregateColumn.getSelectablePath().append( selectableName );
}
return new SelectablePath( selectableName );
}
protected Resolution<?> buildResolution() {
Properties typeParameters = getTypeParameters();
if ( typeParameters != null
@ -450,15 +467,16 @@ public class BasicValue extends SimpleValue implements JdbcTypeIndicators, Resol
? explicitJdbcTypeAccess.apply( typeConfiguration )
: null;
final BasicJavaType<?> basicJavaType = javaType == null && jdbcType != null
final JavaType<?> basicJavaType = javaType == null && jdbcType != null
? jdbcType.getJdbcRecommendedJavaTypeMapping( null, null, typeConfiguration )
: (BasicJavaType<?>) javaType;
: javaType;
if ( basicJavaType == null ) {
throw new MappingException( "Unable to determine JavaType to use : " + this );
}
final TypeDefinition autoAppliedTypeDef =
getBuildingContext().getTypeDefinitionRegistry().resolveAutoApplied( basicJavaType );
final TypeDefinition autoAppliedTypeDef = basicJavaType instanceof BasicJavaType<?>
? getBuildingContext().getTypeDefinitionRegistry().resolveAutoApplied( (BasicJavaType<?>) basicJavaType )
: null;
if ( autoAppliedTypeDef != null
&& ( !basicJavaType.getJavaTypeClass().isEnum() || enumerationStyle == null ) ) {
log.debug( "BasicValue resolution matched auto-applied type-definition" );
@ -681,27 +699,35 @@ public class BasicValue extends SimpleValue implements JdbcTypeIndicators, Resol
@Override
public int getPreferredSqlTypeCodeForBoolean() {
return getBuildingContext().getPreferredSqlTypeCodeForBoolean();
return resolveJdbcTypeCode( getBuildingContext().getPreferredSqlTypeCodeForBoolean() );
}
@Override
public int getPreferredSqlTypeCodeForDuration() {
return getBuildingContext().getPreferredSqlTypeCodeForDuration();
return resolveJdbcTypeCode( getBuildingContext().getPreferredSqlTypeCodeForDuration() );
}
@Override
public int getPreferredSqlTypeCodeForUuid() {
return getBuildingContext().getPreferredSqlTypeCodeForUuid();
return resolveJdbcTypeCode( getBuildingContext().getPreferredSqlTypeCodeForUuid() );
}
@Override
public int getPreferredSqlTypeCodeForInstant() {
return getBuildingContext().getPreferredSqlTypeCodeForInstant();
return resolveJdbcTypeCode( getBuildingContext().getPreferredSqlTypeCodeForInstant() );
}
@Override
public int getPreferredSqlTypeCodeForArray() {
return getBuildingContext().getPreferredSqlTypeCodeForArray();
return resolveJdbcTypeCode( getBuildingContext().getPreferredSqlTypeCodeForArray() );
}
@Override
public int resolveJdbcTypeCode(int jdbcTypeCode) {
return aggregateColumn == null
? jdbcTypeCode
: getMetadata().getDatabase().getDialect().getAggregateSupport()
.aggregateComponentSqlTypeCode( aggregateColumn.getSqlTypeCode(), jdbcTypeCode );
}
@Override

View File

@ -58,6 +58,7 @@ public class Column implements Selectable, Serializable, Cloneable, ColumnTypeIn
private String comment;
private String defaultValue;
private String generatedAs;
private String assignmentExpression;
private String customWrite;
private String customRead;
private Size columnSize;
@ -205,25 +206,28 @@ public class Column implements Selectable, Serializable, Cloneable, ColumnTypeIn
}
public int getSqlTypeCode(Mapping mapping) throws MappingException {
Type type = getValue().getType();
try {
int sqlTypeCode = type.getSqlTypeCodes( mapping )[getTypeIndex()];
if ( getSqlTypeCode() != null && getSqlTypeCode() != sqlTypeCode ) {
throw new MappingException( "SQLType code's does not match. mapped as " + sqlTypeCode + " but is " + getSqlTypeCode() );
if ( sqlTypeCode == null ) {
Type type = getValue().getType();
try {
int sqlTypeCode = type.getSqlTypeCodes( mapping )[getTypeIndex()];
if ( getSqlTypeCode() != null && getSqlTypeCode() != sqlTypeCode ) {
throw new MappingException( "SQLType code's does not match. mapped as " + sqlTypeCode + " but is " + getSqlTypeCode() );
}
return this.sqlTypeCode = sqlTypeCode;
}
catch (Exception e) {
throw new MappingException(
"Could not determine type for column " +
name +
" of type " +
type.getClass().getName() +
": " +
e.getClass().getName(),
e
);
}
return sqlTypeCode;
}
catch (Exception e) {
throw new MappingException(
"Could not determine type for column " +
name +
" of type " +
type.getClass().getName() +
": " +
e.getClass().getName(),
e
);
}
return sqlTypeCode;
}
private String getSqlTypeName(TypeConfiguration typeConfiguration, Dialect dialect, Mapping mapping) {
@ -495,12 +499,12 @@ public class Column implements Selectable, Serializable, Cloneable, ColumnTypeIn
@Override
public String getText(Dialect d) {
return getQuotedName( d );
return assignmentExpression != null ? assignmentExpression : getQuotedName( d );
}
@Override
public String getText() {
return getName();
return assignmentExpression != null ? assignmentExpression : getName();
}
@Override
@ -553,6 +557,14 @@ public class Column implements Selectable, Serializable, Cloneable, ColumnTypeIn
this.generatedAs = generatedAs;
}
public String getAssignmentExpression() {
return assignmentExpression;
}
public void setAssignmentExpression(String assignmentExpression) {
this.assignmentExpression = assignmentExpression;
}
public String getCustomWrite() {
return customWrite;
}
@ -599,6 +611,7 @@ public class Column implements Selectable, Serializable, Cloneable, ColumnTypeIn
copy.setComment( comment );
copy.setDefaultValue( defaultValue );
copy.setGeneratedAs( generatedAs );
copy.setAssignmentExpression( assignmentExpression );
copy.setCustomRead( customRead );
copy.setCustomWrite( customWrite );
return copy;

View File

@ -9,10 +9,12 @@ package org.hibernate.mapping;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import org.hibernate.MappingException;
@ -25,6 +27,7 @@ import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.boot.registry.classloading.spi.ClassLoadingException;
import org.hibernate.boot.spi.MetadataBuildingContext;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.spi.Mapping;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.id.CompositeNestedGeneratedValueGenerator;
import org.hibernate.id.IdentifierGenerator;
@ -71,6 +74,9 @@ public class Component extends SimpleValue implements MetaAttributable, Sortable
// cache the status of the type
private volatile Type type;
private AggregateColumn aggregateColumn;
private AggregateColumn parentAggregateColumn;
private String structName;
// lazily computed based on 'properties' field: invalidate by setting to null when properties are modified
private transient List<Selectable> cachedSelectables;
// lazily computed based on 'properties' field: invalidate by setting to null when properties are modified
@ -152,11 +158,7 @@ public class Component extends SimpleValue implements MetaAttributable, Sortable
@Override
public int getColumnSpan() {
int span = 0;
for ( Property property : getProperties() ) {
span += property.getColumnSpan();
}
return span;
return getSelectables().size();
}
@Override @Deprecated @SuppressWarnings("deprecation")
@ -197,6 +199,121 @@ public class Component extends SimpleValue implements MetaAttributable, Sortable
return embedded;
}
public AggregateColumn getAggregateColumn() {
return aggregateColumn;
}
public void setAggregateColumn(AggregateColumn aggregateColumn) {
this.aggregateColumn = aggregateColumn;
notifyPropertiesAboutAggregateColumn( aggregateColumn, this );
}
public List<Column> getAggregatedColumns() {
final List<Column> aggregatedColumns = new ArrayList<>( getPropertySpan() );
collectAggregatedColumns( aggregatedColumns, this );
return aggregatedColumns;
}
private void collectAggregatedColumns(List<Column> aggregatedColumns, Component component) {
for ( Property property : component.getProperties() ) {
final Value value = property.getValue();
if ( value instanceof Component ) {
final Component subComponent = (Component) value;
final AggregateColumn subAggregate = subComponent.getAggregateColumn();
if ( subAggregate != null ) {
aggregatedColumns.add( subAggregate );
}
else {
collectAggregatedColumns( aggregatedColumns, subComponent );
}
}
else {
aggregatedColumns.addAll( value.getColumns() );
}
}
}
private void notifyPropertiesAboutAggregateColumn(AggregateColumn aggregateColumn, Component component) {
for ( Property property : component.getProperties() ) {
// Let the BasicValue of every sub-column know about the aggregate,
// which is needed in type resolution
final Value value = property.getValue();
if ( value instanceof BasicValue ) {
assert ( (BasicValue) value ).getResolution() == null;
( (BasicValue) value ).setAggregateColumn( aggregateColumn );
}
else if ( value instanceof Component ) {
final Component subComponent = (Component) value;
if ( subComponent.getAggregateColumn() == null ) {
subComponent.notifyPropertiesAboutAggregateColumn( aggregateColumn, subComponent );
}
else {
( (Component) value ).setParentAggregateColumn( aggregateColumn );
}
}
}
}
public AggregateColumn getParentAggregateColumn() {
return parentAggregateColumn;
}
public void setParentAggregateColumn(AggregateColumn parentAggregateColumn) {
this.parentAggregateColumn = parentAggregateColumn;
}
public String getStructName() {
return structName;
}
public void setStructName(String structName) {
this.structName = structName;
}
protected void checkColumnDuplication() throws MappingException {
checkPropertyColumnDuplication( new HashSet<>(), getProperties() );
}
protected void checkColumnDuplication(java.util.Set<String> distinctColumns, Value value)
throws MappingException {
if ( value != null ) {
for ( Selectable columnOrFormula : value.getSelectables() ) {
if ( !columnOrFormula.isFormula() ) {
final Column col = (Column) columnOrFormula;
if ( !distinctColumns.add( col.getName() ) ) {
throw new MappingException(
"Column '" + col.getName()
+ "' is duplicated in mapping for component '" + getRoleName()
+ "' (use '@Column(insertable=false, updatable=false)' when mapping multiple properties to the same column)"
);
}
}
}
}
}
protected void checkPropertyColumnDuplication(Set<String> distinctColumns, List<Property> properties)
throws MappingException {
for ( Property prop : properties ) {
Value value = prop.getValue();
if ( value instanceof Component ) {
Component component = (Component) value;
AggregateColumn aggregateColumn = component.getAggregateColumn();
if ( aggregateColumn == null ) {
checkPropertyColumnDuplication( distinctColumns, component.getProperties() );
}
else {
component.checkColumnDuplication();
checkColumnDuplication( distinctColumns, aggregateColumn.getValue() );
}
}
else {
if ( prop.isUpdateable() || prop.isInsertable() ) {
checkColumnDuplication( distinctColumns, value);
}
}
}
}
public String getComponentClassName() {
return componentClassName;
}
@ -309,6 +426,9 @@ public class Component extends SimpleValue implements MetaAttributable, Sortable
&& Objects.equals( properties, other.properties )
&& Objects.equals( componentClassName, other.componentClassName )
&& embedded == other.embedded
&& Objects.equals( aggregateColumn, other.aggregateColumn )
&& Objects.equals( parentAggregateColumn, other.parentAggregateColumn )
&& Objects.equals( structName, other.structName )
&& Objects.equals( parentProperty, other.parentProperty )
&& Objects.equals( metaAttributes, other.metaAttributes );
}

View File

@ -1046,7 +1046,14 @@ public abstract class PersistentClass implements AttributeContainer, Serializabl
Value value = prop.getValue();
if ( value instanceof Component ) {
Component component = (Component) value;
checkPropertyColumnDuplication( distinctColumns, component.getProperties() );
AggregateColumn aggregateColumn = component.getAggregateColumn();
if ( aggregateColumn == null ) {
checkPropertyColumnDuplication( distinctColumns, component.getProperties() );
}
else {
component.checkColumnDuplication();
checkColumnDuplication( distinctColumns, aggregateColumn.getValue() );
}
}
else {
if ( prop.isUpdateable() || prop.isInsertable() ) {

View File

@ -0,0 +1,258 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.mapping;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.hibernate.HibernateException;
import org.hibernate.boot.Metadata;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.boot.model.relational.ContributableDatabaseObject;
import org.hibernate.boot.model.relational.Namespace;
import org.hibernate.boot.model.relational.QualifiedTableName;
import org.hibernate.boot.model.relational.QualifiedTypeName;
import org.hibernate.boot.model.relational.SqlStringGenerationContext;
import org.hibernate.dialect.Dialect;
import org.hibernate.tool.schema.extract.spi.ColumnInformation;
import org.hibernate.tool.schema.extract.spi.TableInformation;
import org.jboss.logging.Logger;
/**
* A mapping model object representing a relational database {@linkplain org.hibernate.annotations.Struct UDT}.
*/
public class UserDefinedType implements Serializable, ContributableDatabaseObject {
private final String contributor;
private Identifier catalog;
private Identifier schema;
private Identifier name;
private final Map<String, Column> columns = new LinkedHashMap<>();
private String comment;
public UserDefinedType(
String contributor,
Namespace namespace,
Identifier physicalTypeName) {
this.contributor = contributor;
this.catalog = namespace.getPhysicalName().getCatalog();
this.schema = namespace.getPhysicalName().getSchema();
this.name = physicalTypeName;
}
@Override
public String getContributor() {
return contributor;
}
public String getQualifiedName(SqlStringGenerationContext context) {
return context.format( new QualifiedTypeName( catalog, schema, name ) );
}
public void setName(String name) {
this.name = Identifier.toIdentifier( name );
}
public String getName() {
return name == null ? null : name.getText();
}
public Identifier getNameIdentifier() {
return name;
}
public String getQuotedName() {
return name == null ? null : name.toString();
}
public String getQuotedName(Dialect dialect) {
return name == null ? null : name.render( dialect );
}
public QualifiedTableName getQualifiedTableName() {
return name == null ? null : new QualifiedTableName( catalog, schema, name );
}
public boolean isQuoted() {
return name.isQuoted();
}
public void setQuoted(boolean quoted) {
if ( quoted == name.isQuoted() ) {
return;
}
this.name = new Identifier( name.getText(), quoted );
}
public void setSchema(String schema) {
this.schema = Identifier.toIdentifier( schema );
}
public String getSchema() {
return schema == null ? null : schema.getText();
}
public String getQuotedSchema() {
return schema == null ? null : schema.toString();
}
public String getQuotedSchema(Dialect dialect) {
return schema == null ? null : schema.render( dialect );
}
public boolean isSchemaQuoted() {
return schema != null && schema.isQuoted();
}
public void setCatalog(String catalog) {
this.catalog = Identifier.toIdentifier( catalog );
}
public String getCatalog() {
return catalog == null ? null : catalog.getText();
}
public String getQuotedCatalog() {
return catalog == null ? null : catalog.render();
}
public String getQuotedCatalog(Dialect dialect) {
return catalog == null ? null : catalog.render( dialect );
}
public boolean isCatalogQuoted() {
return catalog != null && catalog.isQuoted();
}
/**
* Return the column which is identified by column provided as argument.
*
* @param column column with at least a name.
* @return the underlying column or null if not inside this table.
* Note: the instance *can* be different than the input parameter,
* but the name will be the same.
*/
public Column getColumn(Column column) {
if ( column == null ) {
return null;
}
else {
final Column existing = columns.get( column.getCanonicalName() );
return column.equals( existing ) ? existing : null;
}
}
public Column getColumn(Identifier name) {
if ( name == null ) {
return null;
}
return columns.get( name.getCanonicalName() );
}
public Column getColumn(int n) {
final Iterator<Column> iter = columns.values().iterator();
for ( int i = 0; i < n - 1; i++ ) {
iter.next();
}
return iter.next();
}
public void addColumn(Column column) {
final Column old = getColumn( column );
if ( old == null ) {
columns.put( column.getCanonicalName(), column );
column.uniqueInteger = columns.size();
}
else {
column.uniqueInteger = old.uniqueInteger;
}
}
public int getColumnSpan() {
return columns.size();
}
public Collection<Column> getColumns() {
return columns.values();
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + (catalog == null ? 0 : catalog.hashCode());
result = prime * result + (name == null ? 0 : name.hashCode());
result = prime * result + (schema == null ? 0 : schema.hashCode());
return result;
}
@Override
public boolean equals(Object object) {
return object instanceof UserDefinedType && equals( (UserDefinedType) object);
}
public boolean equals(UserDefinedType table) {
if ( null == table ) {
return false;
}
else if ( this == table ) {
return true;
}
else {
return Identifier.areEqual( name, table.name )
&& Identifier.areEqual( schema, table.schema )
&& Identifier.areEqual( catalog, table.catalog );
}
}
public boolean containsColumn(Column column) {
return columns.containsValue( column );
}
public String toString() {
final StringBuilder buf = new StringBuilder()
.append( getClass().getSimpleName() )
.append( '(' );
if ( getCatalog() != null ) {
buf.append( getCatalog() ).append( "." );
}
if ( getSchema() != null ) {
buf.append( getSchema() ).append( "." );
}
buf.append( getName() ).append( ')' );
return buf.toString();
}
public String getComment() {
return comment;
}
public void setComment(String comment) {
this.comment = comment;
}
@Override
public String getExportIdentifier() {
final StringBuilder qualifiedName = new StringBuilder();
if ( catalog != null ) {
qualifiedName.append( catalog.render() ).append( '.' );
}
if ( schema != null ) {
qualifiedName.append( schema.render() ).append( '.' );
}
return qualifiedName.append( name.render() ).toString();
}
}

View File

@ -9,6 +9,7 @@ package org.hibernate.metamodel.mapping;
import java.util.List;
import java.util.function.BiConsumer;
import org.hibernate.internal.util.MutableInteger;
import org.hibernate.mapping.IndexedConsumer;
import org.hibernate.metamodel.mapping.internal.EmbeddedAttributeMapping;
import org.hibernate.metamodel.mapping.internal.MappingModelCreationProcess;
@ -37,6 +38,143 @@ public interface EmbeddableMappingType extends ManagedMappingType, SelectableMap
boolean isCreateEmptyCompositesEnabled();
default SelectableMapping getAggregateMapping() {
return null;
}
default boolean shouldSelectAggregateMapping() {
return getAggregateMapping() != null;
}
default boolean shouldMutateAggregateMapping() {
// For insert and update we always want to mutate the whole aggregate
return getAggregateMapping() != null;
}
default boolean shouldBindAggregateMapping() {
return getAggregateMapping() != null;
}
@Override
default boolean anyRequiresAggregateColumnWriter() {
return requiresAggregateColumnWriter() || ManagedMappingType.super.anyRequiresAggregateColumnWriter();
}
default boolean requiresAggregateColumnWriter() {
final SelectableMapping aggregateMapping = getAggregateMapping();
if ( aggregateMapping == null ) {
return false;
}
// Cache this maybe?
final int aggregateSqlTypeCode = aggregateMapping.getJdbcMapping().getJdbcType().getDefaultSqlTypeCode();
return findContainingEntityMapping().getEntityPersister().getFactory()
.getJdbcServices()
.getDialect()
.getAggregateSupport()
.requiresAggregateCustomWriteExpressionRenderer( aggregateSqlTypeCode );
}
/**
* Different from {@link #getJdbcTypeCount()} as this will treat an aggregate as a single element.
*/
default int getJdbcValueCount() {
final int numberOfAttributeMappings = getNumberOfAttributeMappings();
int count = 0;
for ( int i = 0; i < numberOfAttributeMappings; i++ ) {
final AttributeMapping attributeMapping = getAttributeMapping( i );
final MappingType mappedType = attributeMapping.getMappedType();
if ( mappedType instanceof EmbeddableMappingType
&& ( (EmbeddableMappingType) mappedType ).getAggregateMapping() != null ) {
count++;
}
else {
count += attributeMapping.getJdbcTypeCount();
}
}
return count;
}
default SelectableMapping getJdbcValueSelectable(int columnIndex) {
final int numberOfAttributeMappings = getNumberOfAttributeMappings();
int count = 0;
for ( int i = 0; i < numberOfAttributeMappings; i++ ) {
final AttributeMapping attributeMapping = getAttributeMapping( i );
final MappingType mappedType = attributeMapping.getMappedType();
if ( mappedType instanceof EmbeddableMappingType ) {
final EmbeddableMappingType embeddableMappingType = (EmbeddableMappingType) mappedType;
final SelectableMapping aggregateMapping = embeddableMappingType.getAggregateMapping();
if ( aggregateMapping == null ) {
final SelectableMapping subSelectable = embeddableMappingType.getJdbcValueSelectable( columnIndex - count );
if ( subSelectable != null ) {
return subSelectable;
}
count += embeddableMappingType.getJdbcValueCount();
}
else {
if ( count == columnIndex ) {
return aggregateMapping;
}
count++;
}
}
else {
if ( count == columnIndex ) {
return (SelectableMapping) attributeMapping;
}
count += attributeMapping.getJdbcTypeCount();
}
}
return null;
}
@Override
default int getSelectableIndex(String selectableName) {
final int numberOfAttributeMappings = getNumberOfAttributeMappings();
int offset = 0;
for ( int i = 0; i < numberOfAttributeMappings; i++ ) {
final AttributeMapping attributeMapping = getAttributeMapping( i );
final MappingType mappedType = attributeMapping.getMappedType();
if ( mappedType instanceof EmbeddableMappingType ) {
final EmbeddableMappingType embeddableMappingType = (EmbeddableMappingType) mappedType;
final SelectableMapping aggregateMapping = embeddableMappingType.getAggregateMapping();
if ( aggregateMapping != null ) {
if ( aggregateMapping.getSelectableName().equals( selectableName ) ) {
return offset;
}
offset++;
}
else {
final int selectableIndex = embeddableMappingType.getSelectableIndex( selectableName );
if ( selectableIndex != -1 ) {
return offset + selectableIndex;
}
offset += embeddableMappingType.getJdbcTypeCount();
}
}
else if ( attributeMapping instanceof SelectableMapping ) {
if ( ( (SelectableMapping) attributeMapping ).getSelectableName().equals( selectableName ) ) {
return offset;
}
offset++;
}
else {
final MutableInteger position = new MutableInteger( -1 );
final int jdbcTypeCount = attributeMapping.forEachSelectable(
(selectionIndex, selectableMapping) -> {
if ( selectableMapping.getSelectableName().equals( selectableName ) ) {
position.set( selectionIndex );
}
}
);
if ( position.get() != -1 ) {
return offset + position.get();
}
offset += jdbcTypeCount;
}
}
return -1;
}
EmbeddableMappingType createInverseMappingType(
EmbeddedAttributeMapping valueMapping,
TableGroupProducer declaringTableGroupProducer,
@ -51,6 +189,32 @@ public interface EmbeddableMappingType extends ManagedMappingType, SelectableMap
@Override
int forEachSelectable(int offset, SelectableConsumer consumer);
default void forEachInsertable(int offset, SelectableConsumer consumer) {
forEachSelectable(
offset,
(selectionIndex, selectableMapping) -> {
if ( ! selectableMapping.isInsertable() || selectableMapping.isFormula() ) {
return;
}
consumer.accept( selectionIndex, selectableMapping );
}
);
}
default void forEachUpdatable(int offset, SelectableConsumer consumer) {
forEachSelectable(
offset,
(selectionIndex, selectableMapping) -> {
if ( ! selectableMapping.isUpdateable() || selectableMapping.isFormula() ) {
return;
}
consumer.accept( selectionIndex, selectableMapping );
}
);
}
@Override
int getJdbcTypeCount();

View File

@ -119,6 +119,11 @@ public interface JdbcMapping extends MappingType, JdbcMappingContainer {
return valueConverter == null ? value : valueConverter.toRelationalValue( value );
}
default Object convertToDomainValue(Object value) {
BasicValueConverter valueConverter = getValueConverter();
return valueConverter == null ? value : valueConverter.toDomainValue( value );
}
@Override
default int getJdbcTypeCount() {

View File

@ -73,4 +73,16 @@ public interface ManagedMappingType extends MappingType, FetchableContainer {
getAttributeMapping( position ).setValue( instance, value );
}
default boolean anyRequiresAggregateColumnWriter() {
final int end = getNumberOfAttributeMappings();
for ( int i = 0; i < end; i++ ) {
final MappingType mappedType = getAttributeMapping( i ).getMappedType();
if ( mappedType instanceof EmbeddableMappingType ) {
if ( ( (EmbeddableMappingType) mappedType ).anyRequiresAggregateColumnWriter() ) {
return true;
}
}
}
return false;
}
}

View File

@ -25,6 +25,12 @@ public interface SelectableMapping extends SqlTypedMapping {
* The selection's expression. This is the column name or formula
*/
String getSelectionExpression();
default String getSelectableName() {
return getSelectionExpression();
}
default SelectablePath getSelectablePath() {
return new SelectablePath( getSelectableName() );
}
/**
* The selection's read expression accounting for formula treatment as well

View File

@ -0,0 +1,121 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.metamodel.mapping;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Objects;
import org.hibernate.Incubating;
/**
* The path for a selectable.
*
* @author Christian Beikov
*/
@Incubating
public class SelectablePath implements Serializable {
private final SelectablePath parent;
private final String name;
private final int index;
public SelectablePath(String root) {
this.parent = null;
this.name = root.intern();
this.index = 0;
}
private SelectablePath(SelectablePath parent, String name) {
this.parent = parent;
this.name = name;
this.index = parent.index + 1;
}
public SelectablePath[] getParts() {
final SelectablePath[] array = new SelectablePath[index + 1];
parts( array );
return array;
}
private void parts(SelectablePath[] array) {
if ( parent != null ) {
parent.parts( array );
}
array[index] = this;
}
public SelectablePath[] relativize(SelectablePath basePath) {
final SelectablePath[] array = new SelectablePath[index - basePath.index];
relativize( array, basePath );
return array;
}
private boolean relativize(SelectablePath[] array, SelectablePath basePath) {
if ( equals( basePath ) ) {
return true;
}
if ( parent != null ) {
if ( parent.relativize( array, basePath ) ) {
array[index - basePath.index - 1] = this;
return true;
}
}
return false;
}
public String getSelectableName() {
return name;
}
public SelectablePath getParent() {
return parent;
}
public SelectablePath append(String selectableName) {
return new SelectablePath( this, selectableName );
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder( name.length() * index );
toString( sb );
return sb.toString();
}
private void toString(StringBuilder sb) {
if ( parent != null ) {
parent.toString( sb );
sb.append( '.' );
}
sb.append( name );
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( o == null || getClass() != o.getClass() ) {
return false;
}
SelectablePath that = (SelectablePath) o;
if ( !Objects.equals( parent, that.parent ) ) {
return false;
}
return name.equals( that.name );
}
@Override
public int hashCode() {
int result = parent != null ? parent.hashCode() : 0;
result = 31 * result + name.hashCode();
return result;
}
}

View File

@ -41,4 +41,28 @@ public interface ValuedModelPart extends ModelPart, ValueMapping, SelectableMapp
default int forEachSelectable(SelectableConsumer consumer) {
return ModelPart.super.forEachSelectable( consumer );
}
default void forEachInsertable(SelectableConsumer consumer) {
ModelPart.super.forEachSelectable(
(selectionIndex, selectableMapping) -> {
if ( ! selectableMapping.isInsertable() || selectableMapping.isFormula() ) {
return;
}
consumer.accept( selectionIndex, selectableMapping );
}
);
}
default void forEachUpdatable(SelectableConsumer consumer) {
ModelPart.super.forEachSelectable(
(selectionIndex, selectableMapping) -> {
if ( ! selectableMapping.isUpdateable() || selectableMapping.isFormula() ) {
return;
}
consumer.accept( selectionIndex, selectableMapping );
}
);
}
}

View File

@ -35,6 +35,7 @@ import org.hibernate.metamodel.mapping.ForeignKeyDescriptor;
import org.hibernate.metamodel.mapping.ManagedMappingType;
import org.hibernate.metamodel.mapping.SelectableMapping;
import org.hibernate.metamodel.mapping.SelectableMappings;
import org.hibernate.metamodel.mapping.SelectablePath;
import org.hibernate.metamodel.model.domain.NavigableRole;
import org.hibernate.metamodel.spi.EmbeddableRepresentationStrategy;
import org.hibernate.metamodel.spi.RuntimeModelCreationContext;
@ -84,12 +85,12 @@ public abstract class AbstractEmbeddableMapping implements EmbeddableMappingType
}
final Object[] results = new Object[getNumberOfAttributeMappings()];
forEachAttributeMapping( (position, attribute) -> {
final Getter getter = attribute.getAttributeMetadata()
for ( int i = 0; i < results.length; i++ ) {
final Getter getter = getAttributeMapping( i ).getAttributeMetadata()
.getPropertyAccess()
.getGetter();
results[position] = getter.get( compositeInstance );
} );
results[i] = getter.get( compositeInstance );
}
return results;
}
@ -100,9 +101,9 @@ public abstract class AbstractEmbeddableMapping implements EmbeddableMappingType
optimizer.getAccessOptimizer().setPropertyValues( component, values );
}
else {
forEachAttributeMapping( (position, attribute) -> {
attribute.getPropertyAccess().getSetter().set( component, values[position] );
} );
for ( int i = 0; i < values.length; i++ ) {
getAttributeMapping( i ).getPropertyAccess().getSetter().set( component, values[i] );
}
}
}
@ -193,7 +194,7 @@ public abstract class AbstractEmbeddableMapping implements EmbeddableMappingType
for ( int i = 0; i < subMappings.length; i++ ) {
subMappings[i] = selectableMappings.getSelectable( currentIndex++ );
}
attributeMapping = (AttributeMapping) MappingModelCreationHelper.createInverseModelPart(
attributeMapping = MappingModelCreationHelper.createInverseModelPart(
(EmbeddableValuedModelPart) attributeMapping,
declaringType,
declaringTableGroupProducer,
@ -271,18 +272,20 @@ public abstract class AbstractEmbeddableMapping implements EmbeddableMappingType
containingTableExpression = rootTableExpression;
columnExpression = rootTableKeyColumnNames[ columnPosition ];
}
final SelectablePath selectablePath;
final String columnDefinition;
final Long length;
final Integer precision;
final Integer scale;
final boolean nullable;
if ( selectable instanceof Column ) {
Column column = (Column) selectable;
final Column column = (Column) selectable;
columnDefinition = column.getSqlType();
length = column.getLength();
precision = column.getPrecision();
scale = column.getScale();
nullable = column.isNullable();
selectablePath = basicValue.createSelectablePath( column.getQuotedName( dialect ) );
}
else {
columnDefinition = null;
@ -290,6 +293,7 @@ public abstract class AbstractEmbeddableMapping implements EmbeddableMappingType
precision = null;
scale = null;
nullable = true;
selectablePath = basicValue.createSelectablePath( bootPropertyDescriptor.getName() );
}
attributeMapping = MappingModelCreationHelper.buildBasicAttributeMapping(
@ -301,6 +305,7 @@ public abstract class AbstractEmbeddableMapping implements EmbeddableMappingType
(BasicType<?>) subtype,
containingTableExpression,
columnExpression,
selectablePath,
selectable.isFormula(),
selectable.getCustomReadExpression(),
selectable.getCustomWriteExpression(),

View File

@ -21,6 +21,7 @@ import org.hibernate.metamodel.mapping.ManagedMappingType;
import org.hibernate.metamodel.mapping.MappingType;
import org.hibernate.metamodel.mapping.SelectableConsumer;
import org.hibernate.metamodel.mapping.SelectableMapping;
import org.hibernate.metamodel.mapping.SelectablePath;
import org.hibernate.metamodel.mapping.SingularAttributeMapping;
import org.hibernate.metamodel.model.domain.NavigableRole;
import org.hibernate.property.access.spi.PropertyAccess;
@ -50,6 +51,7 @@ public class BasicAttributeMapping
private final String tableExpression;
private final String mappedColumnExpression;
private final SelectablePath selectablePath;
private final boolean isFormula;
private final String customReadExpression;
private final String customWriteExpression;
@ -74,6 +76,7 @@ public class BasicAttributeMapping
FetchStyle mappedFetchStyle,
String tableExpression,
String mappedColumnExpression,
SelectablePath selectablePath,
boolean isFormula,
String customReadExpression,
String customWriteExpression,
@ -99,6 +102,12 @@ public class BasicAttributeMapping
this.navigableRole = navigableRole;
this.tableExpression = tableExpression;
this.mappedColumnExpression = mappedColumnExpression;
if ( selectablePath == null ) {
this.selectablePath = new SelectablePath( mappedColumnExpression );
}
else {
this.selectablePath = selectablePath;
}
this.isFormula = isFormula;
this.columnDefinition = columnDefinition;
this.length = length;
@ -153,6 +162,7 @@ public class BasicAttributeMapping
FetchStyle.JOIN,
selectableMapping.getContainingTableExpression(),
selectableMapping.getSelectionExpression(),
selectableMapping.getSelectablePath(),
selectableMapping.isFormula(),
selectableMapping.getCustomReadExpression(),
selectableMapping.getCustomWriteExpression(),
@ -189,6 +199,16 @@ public class BasicAttributeMapping
return mappedColumnExpression;
}
@Override
public String getSelectableName() {
return selectablePath.getSelectableName();
}
@Override
public SelectablePath getSelectablePath() {
return selectablePath;
}
@Override
public boolean isFormula() {
return isFormula;

View File

@ -17,6 +17,7 @@ import org.hibernate.NotYetImplementedFor6Exception;
import org.hibernate.SharedSessionContract;
import org.hibernate.cfg.Environment;
import org.hibernate.dialect.Dialect;
import org.hibernate.dialect.aggregate.AggregateSupport;
import org.hibernate.engine.FetchTiming;
import org.hibernate.engine.config.spi.ConfigurationService;
import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
@ -25,6 +26,7 @@ import org.hibernate.engine.spi.CascadeStyle;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.internal.util.config.ConfigurationHelper;
import org.hibernate.mapping.AggregateColumn;
import org.hibernate.mapping.Any;
import org.hibernate.mapping.BasicValue;
import org.hibernate.mapping.Column;
@ -43,6 +45,7 @@ import org.hibernate.metamodel.mapping.PluralAttributeMapping;
import org.hibernate.metamodel.mapping.SelectableConsumer;
import org.hibernate.metamodel.mapping.SelectableMapping;
import org.hibernate.metamodel.mapping.SelectableMappings;
import org.hibernate.metamodel.mapping.SelectablePath;
import org.hibernate.metamodel.model.domain.NavigableRole;
import org.hibernate.metamodel.spi.EmbeddableRepresentationStrategy;
import org.hibernate.metamodel.spi.RuntimeModelCreationContext;
@ -60,9 +63,11 @@ import org.hibernate.sql.results.graph.Fetchable;
import org.hibernate.sql.results.graph.embeddable.internal.EmbeddableResultImpl;
import org.hibernate.type.AnyType;
import org.hibernate.type.BasicType;
import org.hibernate.type.BasicTypeRegistry;
import org.hibernate.type.CollectionType;
import org.hibernate.type.CompositeType;
import org.hibernate.type.EntityType;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.Type;
import org.hibernate.type.descriptor.java.ImmutableMutabilityPlan;
import org.hibernate.type.descriptor.java.JavaType;
@ -84,7 +89,17 @@ public class EmbeddableMappingTypeImpl extends AbstractEmbeddableMapping impleme
boolean[] updateability,
Function<EmbeddableMappingType, EmbeddableValuedModelPart> embeddedPartBuilder,
MappingModelCreationProcess creationProcess) {
return from( bootDescriptor, compositeType, null, null, insertability, updateability, embeddedPartBuilder, creationProcess );
return from(
bootDescriptor,
compositeType,
null,
null,
null,
insertability,
updateability,
embeddedPartBuilder,
creationProcess
);
}
public static EmbeddableMappingTypeImpl from(
@ -92,6 +107,7 @@ public class EmbeddableMappingTypeImpl extends AbstractEmbeddableMapping impleme
CompositeType compositeType,
String rootTableExpression,
String[] rootTableKeyColumnNames,
Property componentProperty,
boolean[] insertability,
boolean[] updateability,
Function<EmbeddableMappingType,EmbeddableValuedModelPart> embeddedPartBuilder,
@ -100,6 +116,7 @@ public class EmbeddableMappingTypeImpl extends AbstractEmbeddableMapping impleme
final EmbeddableMappingTypeImpl mappingType = new EmbeddableMappingTypeImpl(
bootDescriptor,
componentProperty,
embeddedPartBuilder,
creationContext
);
@ -134,9 +151,14 @@ public class EmbeddableMappingTypeImpl extends AbstractEmbeddableMapping impleme
private final EmbeddableValuedModelPart valueMapping;
private final boolean createEmptyCompositesEnabled;
private final SelectableMapping aggregateMapping;
private final boolean aggregateMappingRequiresColumnWriter;
private final boolean preferSelectAggregateMapping;
private final boolean preferBindAggregateMapping;
private EmbeddableMappingTypeImpl(
Component bootDescriptor,
Property componentProperty,
Function<EmbeddableMappingType, EmbeddableValuedModelPart> embeddedPartBuilder,
RuntimeModelCreationContext creationContext) {
super( creationContext );
@ -156,7 +178,78 @@ public class EmbeddableMappingTypeImpl extends AbstractEmbeddableMapping impleme
cs.getSettings(),
false
);
final AggregateColumn aggregateColumn = bootDescriptor.getAggregateColumn();
if ( aggregateColumn != null ) {
final Dialect dialect = creationContext.getBootstrapContext()
.getServiceRegistry()
.getService( JdbcServices.class )
.getDialect();
final boolean insertable;
final boolean updatable;
if ( componentProperty == null ) {
insertable = true;
updatable = true;
}
else {
insertable = componentProperty.isInsertable();
updatable = componentProperty.isUpdateable();
}
this.aggregateMapping = SelectableMappingImpl.from(
bootDescriptor.getOwner().getTable().getName(),
aggregateColumn,
bootDescriptor.getParentAggregateColumn() != null
? bootDescriptor.getParentAggregateColumn().getSelectablePath()
: null,
resolveJdbcMapping( bootDescriptor, creationContext ),
creationContext.getTypeConfiguration(),
insertable,
updatable,
dialect,
null
);
final AggregateSupport aggregateSupport = dialect.getAggregateSupport();
final int sqlTypeCode = aggregateColumn.getSqlTypeCode();
this.aggregateMappingRequiresColumnWriter = aggregateSupport
.requiresAggregateCustomWriteExpressionRenderer( sqlTypeCode );
this.preferSelectAggregateMapping = aggregateSupport.preferSelectAggregateMapping( sqlTypeCode );
this.preferBindAggregateMapping = aggregateSupport.preferBindAggregateMapping( sqlTypeCode );
}
else {
this.aggregateMapping = null;
this.aggregateMappingRequiresColumnWriter = false;
this.preferSelectAggregateMapping = false;
this.preferBindAggregateMapping = false;
}
}
private JdbcMapping resolveJdbcMapping(Component bootDescriptor, RuntimeModelCreationContext creationContext) {
// The following is a bit "hacky" because ideally, this should happen in InferredBasicValueResolver#from,
// but since we don't have access to the EmbeddableMappingType there yet, we do it here.
// A possible alternative design would be to change AggregateJdbcType#resolveAggregateDescriptor
// to accept a CompositeType instead of EmbeddableMappingType, and I even tried that,
// but it doesn't work out unfortunately, because the type would have to be created too early,
// when the values of the component properties aren't fully initialized yet.
// Both designs would do this as part of the "finishInitialization" phase,
// so there is IMO no real win to do it differently
final TypeConfiguration typeConfiguration = creationContext.getTypeConfiguration();
final BasicTypeRegistry basicTypeRegistry = typeConfiguration.getBasicTypeRegistry();
final Column aggregateColumn = bootDescriptor.getAggregateColumn();
final BasicType<?> basicType = basicTypeRegistry.resolve(
getMappedJavaType(),
typeConfiguration.getJdbcTypeRegistry().resolveAggregateDescriptor(
aggregateColumn.getSqlTypeCode(),
aggregateColumn.getSqlTypeCode() == SqlTypes.STRUCT
? aggregateColumn.getSqlType()
: null,
this
)
);
// Register the resolved type under its struct name and java class name
if ( bootDescriptor.getStructName() != null ) {
basicTypeRegistry.register( basicType, bootDescriptor.getStructName() );
basicTypeRegistry.register( basicType, getMappedJavaType().getJavaTypeClass().getName() );
}
return basicType;
}
public EmbeddableMappingTypeImpl(
@ -171,6 +264,10 @@ public class EmbeddableMappingTypeImpl extends AbstractEmbeddableMapping impleme
this.representationStrategy = inverseMappingType.getRepresentationStrategy();
this.valueMapping = valueMapping;
this.createEmptyCompositesEnabled = inverseMappingType.isCreateEmptyCompositesEnabled();
this.aggregateMapping = null;
this.aggregateMappingRequiresColumnWriter = false;
this.preferSelectAggregateMapping = false;
this.preferBindAggregateMapping = false;
this.selectableMappings = selectableMappings;
creationProcess.registerInitializationCallback(
"EmbeddableMappingType(" + inverseMappingType.getNavigableRole().getFullPath() + ".{inverse})#finishInitialization",
@ -270,8 +367,9 @@ public class EmbeddableMappingTypeImpl extends AbstractEmbeddableMapping impleme
columnExpression = selectable.getText( dialect );
}
if ( selectable instanceof Column ) {
final Column column = (Column) selectable;
containingTableExpression = MappingModelCreationHelper.getTableIdentifierExpression(
( (Column) selectable ).getValue().getTable(),
column.getValue().getTable(),
creationProcess
);
}
@ -283,18 +381,20 @@ public class EmbeddableMappingTypeImpl extends AbstractEmbeddableMapping impleme
containingTableExpression = rootTableExpression;
columnExpression = rootTableKeyColumnNames[columnPosition];
}
final SelectablePath selectablePath;
final String columnDefinition;
final Long length;
final Integer precision;
final Integer scale;
final boolean nullable;
if ( selectable instanceof Column ) {
Column column = (Column) selectable;
final Column column = (Column) selectable;
columnDefinition = column.getSqlType();
length = column.getLength();
precision = column.getPrecision();
scale = column.getScale();
nullable = column.isNullable();
selectablePath = basicValue.createSelectablePath( column.getQuotedName( dialect ) );
}
else {
columnDefinition = null;
@ -302,6 +402,7 @@ public class EmbeddableMappingTypeImpl extends AbstractEmbeddableMapping impleme
precision = null;
scale = null;
nullable = true;
selectablePath = basicValue.createSelectablePath( bootPropertyDescriptor.getName() );
}
attributeMapping = MappingModelCreationHelper.buildBasicAttributeMapping(
bootPropertyDescriptor.getName(),
@ -312,6 +413,7 @@ public class EmbeddableMappingTypeImpl extends AbstractEmbeddableMapping impleme
(BasicType<?>) subtype,
containingTableExpression,
columnExpression,
selectablePath,
selectable.isFormula(),
selectable.getCustomReadExpression(),
selectable.getCustomWriteExpression(),
@ -606,7 +708,10 @@ public class EmbeddableMappingTypeImpl extends AbstractEmbeddableMapping impleme
@Override
public void decompose(Object domainValue, JdbcValueConsumer valueConsumer, SharedSessionContractImplementor session) {
if ( domainValue instanceof Object[] ) {
if ( shouldBindAggregateMapping() ) {
valueConsumer.consume( domainValue, aggregateMapping );
}
else if ( domainValue instanceof Object[] ) {
final Object[] values = (Object[]) domainValue;
assert values.length == attributeMappings.size();
@ -688,6 +793,42 @@ public class EmbeddableMappingTypeImpl extends AbstractEmbeddableMapping impleme
return getSelectableMappings().forEachSelectable( offset, consumer );
}
@Override
public void forEachInsertable(int offset, SelectableConsumer consumer) {
if ( shouldMutateAggregateMapping() ) {
if ( aggregateMapping.isInsertable() ) {
consumer.accept( offset, aggregateMapping );
}
}
else {
final int jdbcTypeCount = selectableMappings.getJdbcTypeCount();
for ( int i = 0; i < jdbcTypeCount; i++ ) {
final SelectableMapping selectable = selectableMappings.getSelectable( i );
if ( selectable.isInsertable() ) {
consumer.accept( offset + i, selectable );
}
}
}
}
@Override
public void forEachUpdatable(int offset, SelectableConsumer consumer) {
if ( shouldMutateAggregateMapping() ) {
if ( aggregateMapping.isUpdateable() ) {
consumer.accept( offset, aggregateMapping );
}
}
else {
final int jdbcTypeCount = selectableMappings.getJdbcTypeCount();
for ( int i = 0; i < jdbcTypeCount; i++ ) {
final SelectableMapping selectable = selectableMappings.getSelectable( i );
if ( selectable.isUpdateable() ) {
consumer.accept( offset + i, selectable );
}
}
}
}
private SelectableMappings getSelectableMappings() {
if (selectableMappings == null) {
// This is expected to happen when processing a
@ -758,7 +899,28 @@ public class EmbeddableMappingTypeImpl extends AbstractEmbeddableMapping impleme
visitAttributeMappings( consumer );
}
@Override
public boolean isCreateEmptyCompositesEnabled() {
return createEmptyCompositesEnabled;
}
@Override
public SelectableMapping getAggregateMapping() {
return aggregateMapping;
}
@Override
public boolean requiresAggregateColumnWriter() {
return aggregateMappingRequiresColumnWriter;
}
@Override
public boolean shouldSelectAggregateMapping() {
return preferSelectAggregateMapping;
}
@Override
public boolean shouldBindAggregateMapping() {
return preferBindAggregateMapping;
}
}

View File

@ -6,6 +6,7 @@
*/
package org.hibernate.metamodel.mapping.internal;
import java.util.Collections;
import java.util.List;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
@ -23,6 +24,7 @@ import org.hibernate.metamodel.mapping.ManagedMappingType;
import org.hibernate.metamodel.mapping.ModelPart;
import org.hibernate.metamodel.mapping.PropertyBasedMapping;
import org.hibernate.metamodel.mapping.SelectableConsumer;
import org.hibernate.metamodel.mapping.SelectableMapping;
import org.hibernate.metamodel.mapping.SelectableMappings;
import org.hibernate.metamodel.model.domain.NavigableRole;
import org.hibernate.property.access.internal.PropertyAccessStrategyBasicImpl;
@ -52,8 +54,10 @@ import org.hibernate.sql.results.graph.Fetch;
import org.hibernate.sql.results.graph.FetchParent;
import org.hibernate.sql.results.graph.Fetchable;
import org.hibernate.sql.results.graph.embeddable.EmbeddableValuedFetchable;
import org.hibernate.sql.results.graph.embeddable.internal.AggregateEmbeddableResultImpl;
import org.hibernate.sql.results.graph.embeddable.internal.EmbeddableFetchImpl;
import org.hibernate.sql.results.graph.embeddable.internal.EmbeddableResultImpl;
import org.hibernate.sql.results.graph.embeddable.internal.AggregateEmbeddableFetchImpl;
/**
* @author Steve Ebersole
@ -185,6 +189,16 @@ public class EmbeddedAttributeMapping
return getEmbeddableTypeDescriptor().forEachSelectable( offset, consumer );
}
@Override
public void forEachInsertable(SelectableConsumer consumer) {
getEmbeddableTypeDescriptor().forEachInsertable( 0, consumer );
}
@Override
public void forEachUpdatable(SelectableConsumer consumer) {
getEmbeddableTypeDescriptor().forEachUpdatable( 0, consumer );
}
@Override
public void breakDownJdbcValues(Object domainValue, JdbcValueConsumer valueConsumer, SharedSessionContractImplementor session) {
getEmbeddableTypeDescriptor().breakDownJdbcValues( domainValue, valueConsumer, session );
@ -201,6 +215,14 @@ public class EmbeddedAttributeMapping
TableGroup tableGroup,
String resultVariable,
DomainResultCreationState creationState) {
if ( embeddableMappingType.shouldSelectAggregateMapping() ) {
return new AggregateEmbeddableResultImpl<>(
navigablePath,
this,
resultVariable,
creationState
);
}
return new EmbeddableResultImpl<>(
navigablePath,
this,
@ -239,6 +261,16 @@ public class EmbeddedAttributeMapping
boolean selected,
String resultVariable,
DomainResultCreationState creationState) {
if ( embeddableMappingType.shouldSelectAggregateMapping() ) {
return new AggregateEmbeddableFetchImpl(
fetchablePath,
this,
fetchParent,
fetchTiming,
selected,
creationState
);
}
return new EmbeddableFetchImpl(
fetchablePath,
this,
@ -255,6 +287,23 @@ public class EmbeddedAttributeMapping
Clause clause,
SqmToSqlAstConverter walker,
SqlAstCreationState sqlAstCreationState) {
if ( embeddableMappingType.shouldSelectAggregateMapping()
// We always want to set the whole aggregate mapping in the SET clause if a single expression is given
// This usually happens when we try to set the aggregate to e.g. null or a parameter
|| clause == Clause.SET && embeddableMappingType.getAggregateMapping() != null ) {
final SelectableMapping selection = embeddableMappingType.getAggregateMapping();
final NavigablePath navigablePath = tableGroup.getNavigablePath().append( getNavigableRole().getNavigableName() );
final TableReference tableReference = tableGroup.resolveTableReference( navigablePath, getContainingTableExpression() );
return new SqlTuple(
Collections.singletonList(
sqlAstCreationState.getSqlExpressionResolver().resolveSqlExpression(
tableReference,
selection
)
),
this
);
}
final List<ColumnReference> columnReferences = CollectionHelper.arrayList( embeddableMappingType.getJdbcTypeCount() );
final NavigablePath navigablePath = tableGroup.getNavigablePath().append( getNavigableRole().getNavigableName() );
final TableReference defaultTableReference = tableGroup.resolveTableReference( navigablePath, getContainingTableExpression() );
@ -353,6 +402,11 @@ public class EmbeddedAttributeMapping
return getEmbeddableTypeDescriptor().getFetchable( position );
}
@Override
public int getSelectableIndex(String selectableName) {
return getEmbeddableTypeDescriptor().getSelectableIndex( selectableName );
}
@Override
public String toString() {
return "EmbeddedAttributeMapping(" + navigableRole + ")@" + System.identityHashCode( this );

View File

@ -612,13 +612,24 @@ public class ManyToManyCollectionPart extends AbstractEntityCollectionPart imple
Dialect dialect,
String fkKeyTableName,
BasicValuedModelPart basicFkTargetPart) {
final boolean columnInsertable;
final boolean columnUpdateable;
if ( getNature() == Nature.ELEMENT ) {
// Replicate behavior of AbstractCollectionPersister#elementColumnIsSettable
columnInsertable = true;
columnUpdateable = true;
}
else {
columnInsertable = fkBootDescriptorSource.isColumnInsertable( 0 );
columnUpdateable = fkBootDescriptorSource.isColumnUpdateable( 0 );
}
final SelectableMapping keySelectableMapping = SelectableMappingImpl.from(
fkKeyTableName,
fkBootDescriptorSource.getSelectables().get(0),
basicFkTargetPart.getJdbcMapping(),
creationProcess.getCreationContext().getTypeConfiguration(),
fkBootDescriptorSource.isColumnInsertable( 0 ),
fkBootDescriptorSource.isColumnUpdateable( 0 ),
columnInsertable,
columnUpdateable,
dialect,
creationProcess.getSqmFunctionRegistry()
);

View File

@ -69,6 +69,7 @@ import org.hibernate.metamodel.mapping.PluralAttributeMapping;
import org.hibernate.metamodel.mapping.PropertyBasedMapping;
import org.hibernate.metamodel.mapping.SelectableMapping;
import org.hibernate.metamodel.mapping.SelectableMappings;
import org.hibernate.metamodel.mapping.SelectablePath;
import org.hibernate.metamodel.mapping.VirtualModelPart;
import org.hibernate.metamodel.model.domain.NavigableRole;
import org.hibernate.metamodel.spi.RuntimeModelCreationContext;
@ -135,6 +136,7 @@ public class MappingModelCreationHelper {
cidType,
rootTableName,
rootTableKeyColumnNames,
bootProperty,
component.getColumnInsertability(),
component.getColumnUpdateability(),
embeddable -> new EmbeddedIdentifierMappingImpl(
@ -181,6 +183,7 @@ public class MappingModelCreationHelper {
BasicType attrType,
String tableExpression,
String attrColumnName,
SelectablePath selectablePath,
boolean isAttrFormula,
String readExpr,
String writeExpr,
@ -225,6 +228,7 @@ public class MappingModelCreationHelper {
fetchStyle,
tableExpression,
attrColumnName,
selectablePath,
isAttrFormula,
readExpr,
writeExpr,
@ -267,6 +271,7 @@ public class MappingModelCreationHelper {
attrType,
tableExpression,
rootTableKeyColumnNames,
bootProperty,
component.getColumnInsertability(),
component.getColumnUpdateability(),
attributeMappingType -> {
@ -1152,13 +1157,25 @@ public class MappingModelCreationHelper {
if ( bootMapKeyDescriptor instanceof BasicValue ) {
final BasicValue basicValue = (BasicValue) bootMapKeyDescriptor;
final boolean insertable;
final boolean updatable;
if ( indexedCollection instanceof org.hibernate.mapping.Map
&& ( (org.hibernate.mapping.Map) indexedCollection ).getMapKeyPropertyName() != null ) {
// Replicate behavior of AbstractCollectionPersister#indexColumnIsSettable
insertable = false;
updatable = false;
}
else {
insertable = updatable = basicValue.isColumnInsertable( 0 )
|| basicValue.isColumnUpdateable( 0 );
}
final SelectableMapping selectableMapping = SelectableMappingImpl.from(
tableExpression,
basicValue.getSelectables().get(0),
basicValue.getSelectables().get( 0 ),
basicValue.resolve().getJdbcMapping(),
creationProcess.getCreationContext().getTypeConfiguration(),
basicValue.isColumnInsertable( 0 ),
basicValue.isColumnUpdateable( 0 ),
insertable,
updatable,
dialect,
creationProcess.getSqmFunctionRegistry()
);
@ -1394,13 +1411,6 @@ public class MappingModelCreationHelper {
}
}
public static Expression buildColumnReferenceExpression(
ModelPart modelPart,
SqlExpressionResolver sqlExpressionResolver,
SessionFactoryImplementor sessionFactory) {
return buildColumnReferenceExpression( null, modelPart, sqlExpressionResolver, sessionFactory );
}
public static Expression buildColumnReferenceExpression(
TableGroup tableGroup,
ModelPart modelPart,
@ -1425,7 +1435,7 @@ public class MappingModelCreationHelper {
}
else {
colRef = (ColumnReference) sqlExpressionResolver.resolveSqlExpression(
createColumnReferenceKey( qualifier, selection.getSelectionExpression() ),
createColumnReferenceKey( qualifier, selection ),
sqlAstProcessingState -> new ColumnReference( qualifier, selection )
);
}
@ -1449,7 +1459,7 @@ public class MappingModelCreationHelper {
}
else {
return sqlExpressionResolver.resolveSqlExpression(
createColumnReferenceKey( qualifier, basicPart.getSelectionExpression() ),
createColumnReferenceKey( qualifier, basicPart ),
sqlAstProcessingState -> new ColumnReference( qualifier, basicPart )
);
}

View File

@ -9,10 +9,12 @@ package org.hibernate.metamodel.mapping.internal;
import java.util.Locale;
import org.hibernate.dialect.Dialect;
import org.hibernate.mapping.BasicValue;
import org.hibernate.mapping.Column;
import org.hibernate.mapping.Selectable;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.metamodel.mapping.SelectableMapping;
import org.hibernate.metamodel.mapping.SelectablePath;
import org.hibernate.query.sqm.function.SqmFunctionRegistry;
import org.hibernate.type.spi.TypeConfiguration;
@ -23,6 +25,7 @@ public class SelectableMappingImpl extends SqlTypedMappingImpl implements Select
private final String containingTableExpression;
private final String selectionExpression;
private final SelectablePath selectablePath;
private final String customReadExpression;
private final String customWriteExpression;
private final boolean nullable;
@ -33,6 +36,7 @@ public class SelectableMappingImpl extends SqlTypedMappingImpl implements Select
public SelectableMappingImpl(
String containingTableExpression,
String selectionExpression,
SelectablePath selectablePath,
String customReadExpression,
String customWriteExpression,
String columnDefinition,
@ -45,9 +49,11 @@ public class SelectableMappingImpl extends SqlTypedMappingImpl implements Select
boolean isFormula,
JdbcMapping jdbcMapping) {
super( columnDefinition, length, precision, scale, jdbcMapping );
assert selectionExpression != null;
// Save memory by using interned strings. Probability is high that we have multiple duplicate strings
this.containingTableExpression = containingTableExpression == null ? null : containingTableExpression.intern();
this.selectionExpression = selectionExpression == null ? null : selectionExpression.intern();
this.selectionExpression = selectionExpression.intern();
this.selectablePath = selectablePath == null ? new SelectablePath( selectionExpression ) : selectablePath;
this.customReadExpression = customReadExpression == null ? null : customReadExpression.intern();
this.customWriteExpression = customWriteExpression == null ? null : customWriteExpression.intern();
this.nullable = nullable;
@ -65,6 +71,56 @@ public class SelectableMappingImpl extends SqlTypedMappingImpl implements Select
boolean updateable,
final Dialect dialect,
final SqmFunctionRegistry sqmFunctionRegistry) {
return from(
containingTableExpression,
selectable,
null,
jdbcMapping,
typeConfiguration,
insertable,
updateable,
dialect,
sqmFunctionRegistry
);
}
public static SelectableMapping from(
final String containingTableExpression,
final Selectable selectable,
final SelectablePath parentPath,
final JdbcMapping jdbcMapping,
final TypeConfiguration typeConfiguration,
boolean insertable,
boolean updateable,
final Dialect dialect,
final SqmFunctionRegistry sqmFunctionRegistry) {
return from(
containingTableExpression,
selectable,
parentPath,
selectable instanceof Column
? ( (Column) selectable ).getQuotedName( dialect )
: selectable.getText(),
jdbcMapping,
typeConfiguration,
insertable,
updateable,
dialect,
sqmFunctionRegistry
);
}
public static SelectableMapping from(
final String containingTableExpression,
final Selectable selectable,
final SelectablePath parentPath,
final String selectableName,
final JdbcMapping jdbcMapping,
final TypeConfiguration typeConfiguration,
boolean insertable,
boolean updateable,
final Dialect dialect,
final SqmFunctionRegistry sqmFunctionRegistry) {
final String columnExpression;
final String columnDefinition;
final Long length;
@ -92,6 +148,9 @@ public class SelectableMappingImpl extends SqlTypedMappingImpl implements Select
return new SelectableMappingImpl(
containingTableExpression,
columnExpression,
parentPath == null
? null
: parentPath.append( selectableName ),
selectable.getCustomReadExpression(),
selectable.getCustomWriteExpression(),
columnDefinition,
@ -126,6 +185,16 @@ public class SelectableMappingImpl extends SqlTypedMappingImpl implements Select
return selectionExpression;
}
@Override
public String getSelectableName() {
return selectablePath == null ? null : selectablePath.getSelectableName();
}
@Override
public SelectablePath getSelectablePath() {
return selectablePath;
}
@Override
public String getCustomReadExpression() {
return customReadExpression;

View File

@ -77,8 +77,6 @@ import org.hibernate.mapping.Value;
import org.hibernate.metadata.CollectionMetadata;
import org.hibernate.metamodel.mapping.ForeignKeyDescriptor;
import org.hibernate.metamodel.mapping.PluralAttributeMapping;
import org.hibernate.metamodel.mapping.SelectableConsumer;
import org.hibernate.metamodel.mapping.SelectableMapping;
import org.hibernate.metamodel.mapping.internal.MappingModelCreationHelper;
import org.hibernate.metamodel.mapping.internal.PluralAttributeMappingImpl;
import org.hibernate.metamodel.model.domain.NavigableRole;
@ -113,13 +111,13 @@ import org.hibernate.sql.ast.tree.predicate.Predicate;
import org.hibernate.sql.ast.tree.select.QuerySpec;
import org.hibernate.sql.ast.tree.select.SelectClause;
import org.hibernate.sql.ast.tree.select.SelectStatement;
import org.hibernate.sql.exec.spi.JdbcParameterBinder;
import org.hibernate.sql.model.ModelMutationLogging;
import org.hibernate.sql.model.MutationType;
import org.hibernate.sql.model.TableMapping;
import org.hibernate.sql.model.TableMapping.MutationDetails;
import org.hibernate.sql.model.ast.ColumnValueBinding;
import org.hibernate.sql.model.ast.ColumnValueParameter;
import org.hibernate.sql.model.ast.ColumnValueParameterList;
import org.hibernate.sql.model.ast.ColumnWriteFragment;
import org.hibernate.sql.model.ast.MutatingTableReference;
import org.hibernate.sql.model.ast.RestrictedTableMutation;
@ -1741,15 +1739,12 @@ public abstract class AbstractCollectionPersister
final PluralAttributeMapping attributeMapping = getAttributeMapping();
final ForeignKeyDescriptor keyDescriptor = attributeMapping.getKeyDescriptor();
final java.util.List<JdbcParameterBinder> parameterBinders = arrayList( keyDescriptor.getJdbcTypeCount() );
keyDescriptor.getKeyPart().forEachSelectable( (selectionIndex, selectableMapping) -> {
final ColumnReference columnReference = new ColumnReference( tableReference, selectableMapping );
final ColumnValueParameter columnValueParameter = new ColumnValueParameter(
columnReference,
ParameterUsage.RESTRICT
);
parameterBinders.add( columnValueParameter );
} );
final ColumnValueParameterList parameterBinders = new ColumnValueParameterList(
tableReference,
ParameterUsage.RESTRICT,
keyDescriptor.getJdbcTypeCount()
);
keyDescriptor.getKeyPart().forEachSelectable( parameterBinders );
final TableMapping tableMapping = tableReference.getTableMapping();
return new JdbcDeleteMutation(
@ -1780,31 +1775,26 @@ public abstract class AbstractCollectionPersister
assert fkDescriptor != null;
final int keyColumnCount = fkDescriptor.getJdbcTypeCount();
final ColumnValueParameterList parameterBinders = new ColumnValueParameterList(
tableReference,
ParameterUsage.RESTRICT,
keyColumnCount
);
final java.util.List<ColumnValueBinding> keyRestrictionBindings = arrayList( keyColumnCount );
final java.util.List<ColumnValueParameter> parameters = arrayList( keyColumnCount );
//noinspection Convert2Lambda
fkDescriptor.getKeyPart().forEachSelectable( new SelectableConsumer() {
@Override
public void accept(int selectionIndex, SelectableMapping selectableMapping) {
final ColumnReference columnReference = new ColumnReference( tableReference, selectableMapping );
final ColumnValueParameter columnValueParameter = new ColumnValueParameter(
columnReference,
ParameterUsage.RESTRICT
);
parameters.add( columnValueParameter );
keyRestrictionBindings.add(
new ColumnValueBinding(
columnReference,
new ColumnWriteFragment(
"?",
columnValueParameter,
selectableMapping.getJdbcMapping()
)
)
);
}
} );
fkDescriptor.getKeyPart().forEachSelectable( parameterBinders );
for ( ColumnValueParameter columnValueParameter : parameterBinders ) {
final ColumnReference columnReference = columnValueParameter.getColumnReference();
keyRestrictionBindings.add(
new ColumnValueBinding(
columnReference,
new ColumnWriteFragment(
"?",
columnValueParameter,
columnReference.getJdbcMapping()
)
)
);
}
//noinspection unchecked,rawtypes
return (RestrictedTableMutation) new TableDeleteStandard(
@ -1813,7 +1803,7 @@ public abstract class AbstractCollectionPersister
"one-shot delete for " + getRolePath(),
keyRestrictionBindings,
Collections.emptyList(),
parameters
parameterBinders
);
}

View File

@ -42,10 +42,8 @@ import org.hibernate.persister.collection.mutation.UpdateRowsCoordinatorNoOp;
import org.hibernate.persister.collection.mutation.UpdateRowsCoordinatorStandard;
import org.hibernate.persister.spi.PersisterCreationContext;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.tree.expression.ColumnReference;
import org.hibernate.sql.ast.tree.from.TableGroup;
import org.hibernate.sql.exec.spi.JdbcParameterBinder;
import org.hibernate.sql.model.ast.ColumnValueParameter;
import org.hibernate.sql.model.ast.ColumnValueParameterList;
import org.hibernate.sql.model.ast.MutatingTableReference;
import org.hibernate.sql.model.ast.RestrictedTableMutation;
import org.hibernate.sql.model.ast.TableInsert;
@ -57,7 +55,6 @@ import org.hibernate.sql.model.jdbc.JdbcDeleteMutation;
import org.hibernate.sql.model.jdbc.JdbcMutationOperation;
import org.hibernate.sql.model.jdbc.JdbcUpdateMutation;
import static org.hibernate.internal.util.collections.CollectionHelper.arrayList;
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER;
import static org.hibernate.sql.model.ModelMutationLogging.MODEL_MUTATION_LOGGER_DEBUG_ENABLED;
@ -289,26 +286,18 @@ public class BasicCollectionPersister extends AbstractCollectionPersister {
final PluralAttributeMapping attributeMapping = getAttributeMapping();
final ForeignKeyDescriptor foreignKey = attributeMapping.getKeyDescriptor();
foreignKey.getKeyPart().forEachSelectable( (position, mapping) -> insertBuilder.addValueColumn( mapping ) );
foreignKey.getKeyPart().forEachSelectable( insertBuilder );
final CollectionIdentifierDescriptor identifierDescriptor = attributeMapping.getIdentifierDescriptor();
final CollectionPart indexDescriptor = attributeMapping.getIndexDescriptor();
if ( identifierDescriptor != null ) {
identifierDescriptor.forEachSelectable( (position, mapping) -> insertBuilder.addValueColumn( mapping ) );
identifierDescriptor.forEachSelectable( insertBuilder );
}
else if ( indexDescriptor != null ) {
indexDescriptor.forEachSelectable( (position, mapping) -> {
if ( indexColumnIsSettable[position] ) {
insertBuilder.addValueColumn( mapping );
}
} );
indexDescriptor.forEachInsertable( insertBuilder );
}
attributeMapping.getElementDescriptor().forEachSelectable( (position, mapping) -> {
if ( elementColumnIsSettable[position] ) {
insertBuilder.addValueColumn( mapping );
}
} );
attributeMapping.getElementDescriptor().forEachInsertable( insertBuilder );
}
private JdbcMutationOperation buildGeneratedInsertRowOperation(MutatingTableReference tableReference) {
@ -422,13 +411,12 @@ public class BasicCollectionPersister extends AbstractCollectionPersister {
assert foreignKey != null;
final int keyColumnCount = foreignKey.getJdbcTypeCount();
final java.util.List<JdbcParameterBinder> parameterBinders = arrayList( keyColumnCount );
foreignKey.getKeyPart().forEachSelectable( (selectionIndex, selectableMapping) -> parameterBinders.add(
new ColumnValueParameter(
new ColumnReference( tableReference, selectableMapping ),
ParameterUsage.RESTRICT
)
) );
final ColumnValueParameterList parameterBinders = new ColumnValueParameterList(
tableReference,
ParameterUsage.RESTRICT,
keyColumnCount
);
foreignKey.getKeyPart().forEachSelectable( parameterBinders );
return new JdbcUpdateMutation(
getCollectionTableMapping(),
@ -460,14 +448,7 @@ public class BasicCollectionPersister extends AbstractCollectionPersister {
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// SET
attribute.getElementDescriptor().forEachSelectable( (selectionIndex, selectableMapping) -> {
if ( ! selectableMapping.isUpdateable() || selectableMapping.isFormula() ) {
return;
}
updateBuilder.addValueColumn( selectableMapping );
} );
attribute.getElementDescriptor().forEachUpdatable( updateBuilder );
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// WHERE
@ -578,15 +559,12 @@ public class BasicCollectionPersister extends AbstractCollectionPersister {
final CollectionTableMapping tableMapping = (CollectionTableMapping) tableReference.getTableMapping();
final int keyColumnCount = foreignKey.getJdbcTypeCount();
final java.util.List<JdbcParameterBinder> parameterBinders = arrayList( keyColumnCount );
foreignKey.getKeyPart().forEachSelectable( (selectionIndex, selectableMapping) -> {
final ColumnReference columnReference = new ColumnReference( tableReference, selectableMapping );
final ColumnValueParameter columnValueParameter = new ColumnValueParameter(
columnReference,
ParameterUsage.RESTRICT
);
parameterBinders.add( columnValueParameter );
} );
final ColumnValueParameterList parameterBinders = new ColumnValueParameterList(
tableReference,
ParameterUsage.RESTRICT,
keyColumnCount
);
foreignKey.getKeyPart().forEachSelectable( parameterBinders );
return new JdbcDeleteMutation(
tableMapping,

View File

@ -57,11 +57,11 @@ import org.hibernate.sql.ast.spi.SqlAstCreationState;
import org.hibernate.sql.ast.tree.expression.ColumnReference;
import org.hibernate.sql.ast.tree.from.TableGroup;
import org.hibernate.sql.ast.tree.predicate.Predicate;
import org.hibernate.sql.exec.spi.JdbcParameterBinder;
import org.hibernate.sql.model.MutationOperation;
import org.hibernate.sql.model.MutationType;
import org.hibernate.sql.model.ast.ColumnValueBinding;
import org.hibernate.sql.model.ast.ColumnValueParameter;
import org.hibernate.sql.model.ast.ColumnValueParameterList;
import org.hibernate.sql.model.ast.ColumnWriteFragment;
import org.hibernate.sql.model.ast.MutatingTableReference;
import org.hibernate.sql.model.ast.RestrictedTableMutation;
@ -343,36 +343,33 @@ public class OneToManyPersister extends AbstractCollectionPersister {
? keyColumnCount + indexColumnNames.length
: keyColumnCount;
final ColumnValueParameterList parameterBinders = new ColumnValueParameterList(
tableReference,
ParameterUsage.RESTRICT,
keyColumnCount
);
final List<ColumnValueBinding> keyRestrictionBindings = arrayList( keyColumnCount );
final List<ColumnValueParameter> parameters = arrayList( keyColumnCount );
final List<ColumnValueBinding> valueBindings = arrayList( valuesCount );
fkDescriptor.getKeyPart().forEachSelectable( (selectionIndex, selectableMapping) -> {
final ColumnReference columnReference = new ColumnReference( tableReference, selectableMapping );
final ColumnValueParameter columnValueParameter = new ColumnValueParameter(
columnReference,
ParameterUsage.RESTRICT
);
parameters.add( columnValueParameter );
fkDescriptor.getKeyPart().forEachSelectable( parameterBinders );
for ( ColumnValueParameter columnValueParameter : parameterBinders ) {
final ColumnReference columnReference = columnValueParameter.getColumnReference();
keyRestrictionBindings.add(
new ColumnValueBinding(
columnReference,
new ColumnWriteFragment(
"?",
columnValueParameter,
selectableMapping.getJdbcMapping()
columnReference.getJdbcMapping()
)
)
);
valueBindings.add( new ColumnValueBinding(
columnReference,
new ColumnWriteFragment(
"null",
null,
selectableMapping.getJdbcMapping()
valueBindings.add(
new ColumnValueBinding(
columnReference,
new ColumnWriteFragment( "null", columnReference.getJdbcMapping() )
)
) );
} );
);
}
if ( hasIndex && !indexContainsFormula ) {
getAttributeMapping().getIndexDescriptor().forEachSelectable( (selectionIndex, selectableMapping) -> {
@ -381,7 +378,7 @@ public class OneToManyPersister extends AbstractCollectionPersister {
}
valueBindings.add(
new ColumnValueBinding( new ColumnReference( tableReference, selectableMapping ),
new ColumnWriteFragment( "null", null, selectableMapping.getJdbcMapping() )
new ColumnWriteFragment( "null", selectableMapping.getJdbcMapping() )
) );
} );
}
@ -393,7 +390,7 @@ public class OneToManyPersister extends AbstractCollectionPersister {
valueBindings,
keyRestrictionBindings,
null,
parameters,
parameterBinders,
sqlWhereString
);
}
@ -532,15 +529,12 @@ public class OneToManyPersister extends AbstractCollectionPersister {
final CollectionTableMapping tableMapping = (CollectionTableMapping) tableReference.getTableMapping();
final int keyColumnCount = foreignKey.getJdbcTypeCount();
final java.util.List<JdbcParameterBinder> parameterBinders = arrayList( keyColumnCount );
foreignKey.getKeyPart().forEachSelectable( (selectionIndex, selectableMapping) -> {
final ColumnReference columnReference = new ColumnReference( tableReference, selectableMapping );
final ColumnValueParameter columnValueParameter = new ColumnValueParameter(
columnReference,
ParameterUsage.RESTRICT
);
parameterBinders.add( columnValueParameter );
} );
final ColumnValueParameterList parameterBinders = new ColumnValueParameterList(
tableReference,
ParameterUsage.RESTRICT,
keyColumnCount
);
foreignKey.getKeyPart().forEachSelectable( parameterBinders );
return new JdbcDeleteMutation(
tableMapping,
@ -637,16 +631,11 @@ public class OneToManyPersister extends AbstractCollectionPersister {
final TableUpdateBuilderStandard<JdbcMutationOperation> updateBuilder = new TableUpdateBuilderStandard<>( this, tableReference, getFactory() );
final PluralAttributeMapping attributeMapping = getAttributeMapping();
attributeMapping.getKeyDescriptor().getKeyPart().forEachSelectable( (position, mapping) -> updateBuilder.addValueColumn( mapping ) );
attributeMapping.getKeyDescriptor().getKeyPart().forEachSelectable( updateBuilder );
final CollectionPart indexDescriptor = attributeMapping.getIndexDescriptor();
if ( indexDescriptor != null ) {
indexDescriptor.forEachSelectable( (position,mapping) -> {
if ( !mapping.isUpdateable() ) {
return;
}
updateBuilder.addValueColumn( mapping );
} );
indexDescriptor.forEachUpdatable( updateBuilder );
}
final EntityCollectionPart elementDescriptor = (EntityCollectionPart) attributeMapping.getElementDescriptor();
@ -718,15 +707,12 @@ public class OneToManyPersister extends AbstractCollectionPersister {
final CollectionTableMapping tableMapping = (CollectionTableMapping) tableReference.getTableMapping();
final int keyColumnCount = foreignKey.getJdbcTypeCount();
final java.util.List<JdbcParameterBinder> parameterBinders = arrayList( keyColumnCount );
foreignKey.getKeyPart().forEachSelectable( (selectionIndex, selectableMapping) -> {
final ColumnReference columnReference = new ColumnReference( tableReference, selectableMapping );
final ColumnValueParameter columnValueParameter = new ColumnValueParameter(
columnReference,
ParameterUsage.RESTRICT
);
parameterBinders.add( columnValueParameter );
} );
final ColumnValueParameterList parameterBinders = new ColumnValueParameterList(
tableReference,
ParameterUsage.RESTRICT,
keyColumnCount
);
foreignKey.getKeyPart().forEachSelectable( parameterBinders );
return new JdbcUpdateMutation(
tableMapping,
@ -757,11 +743,7 @@ public class OneToManyPersister extends AbstractCollectionPersister {
// for each index column:
// * add a restriction based on the previous value
// * add an assignment for the new value
getAttributeMapping().getIndexDescriptor().forEachSelectable( (selectionIndex, selectableMapping) -> {
if ( selectableMapping.isUpdateable() ) {
updateBuilder.addValueColumn( selectableMapping );
}
} );
getAttributeMapping().getIndexDescriptor().forEachUpdatable( updateBuilder );
final RestrictedTableMutation<JdbcMutationOperation> tableUpdate = updateBuilder.buildMutation();
return tableUpdate.createMutationOperation( null, getFactory() );

View File

@ -5235,6 +5235,7 @@ public abstract class AbstractEntityPersister
(BasicType<?>) attrType,
tableExpression,
attrColumnNames[0],
null,
false,
null,
null,
@ -5323,6 +5324,7 @@ public abstract class AbstractEntityPersister
(BasicType<?>) attrType,
tableExpression,
attrColumnExpression,
null,
isAttrColumnExpressionFormula,
customReadExpr,
customWriteExpr,

View File

@ -66,10 +66,12 @@ import org.hibernate.sql.ast.tree.predicate.NullnessPredicate;
import org.hibernate.sql.ast.tree.predicate.Predicate;
import org.hibernate.sql.model.ast.builder.MutationGroupBuilder;
import org.hibernate.sql.model.ast.builder.TableInsertBuilder;
import org.hibernate.sql.model.ast.builder.TableMutationBuilder;
import org.hibernate.type.BasicType;
import org.hibernate.type.Type;
import static org.hibernate.persister.entity.DiscriminatorHelper.NULL_DISCRIMINATOR;
import static org.hibernate.sql.model.ast.builder.TableMutationBuilder.NULL;
/**
* The default implementation of the {@link EntityPersister} interface.
@ -634,7 +636,7 @@ public class SingleTableEntityPersister extends AbstractEntityPersister {
final TableInsertBuilder tableInsertBuilder = insertGroupBuilder.getTableDetailsBuilder( getRootTableName() );
tableInsertBuilder.addValueColumn(
discriminatorColumnName,
discriminatorValue == NULL_DISCRIMINATOR ? null : discriminatorSQLValue,
discriminatorValue == NULL_DISCRIMINATOR ? NULL : discriminatorSQLValue,
getDiscriminatorMapping().getJdbcMapping()
);
}
@ -771,7 +773,7 @@ public class SingleTableEntityPersister extends AbstractEntityPersister {
String alias,
TableGroup tableGroup,
SqlExpressionResolver sqlExpressionResolver) {
final String columnReferenceKey;
final SqlExpressionResolver.ColumnReferenceKey columnReferenceKey;
final String discriminatorExpression;
if ( isDiscriminatorFormula() ) {
discriminatorExpression = getDiscriminatorFormulaTemplate();

Some files were not shown because too many files have changed in this diff Show More