HHH-15443 Allow JdbcType to wrap read and write expressions

This commit is contained in:
Christian Beikov 2022-08-23 14:47:31 +02:00 committed by Steve Ebersole
parent 67f8bee35a
commit 16ca1a0595
53 changed files with 1564 additions and 293 deletions

View File

@ -14,6 +14,10 @@ import java.util.Map;
import org.hibernate.annotations.JdbcTypeCode; import org.hibernate.annotations.JdbcTypeCode;
import org.hibernate.cfg.AvailableSettings; import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.AbstractHANADialect;
import org.hibernate.dialect.DerbyDialect;
import org.hibernate.dialect.OracleDialect;
import org.hibernate.dialect.SybaseDialect;
import org.hibernate.metamodel.mapping.internal.BasicAttributeMapping; import org.hibernate.metamodel.mapping.internal.BasicAttributeMapping;
import org.hibernate.metamodel.spi.MappingMetamodelImplementor; import org.hibernate.metamodel.spi.MappingMetamodelImplementor;
import org.hibernate.persister.entity.EntityPersister; import org.hibernate.persister.entity.EntityPersister;
@ -26,6 +30,9 @@ import org.hibernate.testing.orm.junit.ServiceRegistry;
import org.hibernate.testing.orm.junit.SessionFactory; import org.hibernate.testing.orm.junit.SessionFactory;
import org.hibernate.testing.orm.junit.SessionFactoryScope; import org.hibernate.testing.orm.junit.SessionFactoryScope;
import org.hibernate.testing.orm.junit.Setting; import org.hibernate.testing.orm.junit.Setting;
import org.hibernate.testing.orm.junit.SkipForDialect;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import jakarta.persistence.Entity; import jakarta.persistence.Entity;
@ -36,6 +43,8 @@ import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.isOneOf; import static org.hamcrest.Matchers.isOneOf;
import static org.hamcrest.Matchers.isA;
import static org.hamcrest.Matchers.notNullValue;
/** /**
* @author Christian Beikov * @author Christian Beikov
@ -60,10 +69,37 @@ public abstract class JsonMappingTests {
} }
} }
private final boolean supportsObjectMapKey; private final Map<String, String> stringMap;
private final Map<StringNode, StringNode> objectMap;
private final List<StringNode> list;
private final String json;
protected JsonMappingTests(boolean supportsObjectMapKey) { protected JsonMappingTests(boolean supportsObjectMapKey) {
this.supportsObjectMapKey = supportsObjectMapKey; this.stringMap = Map.of( "name", "ABC" );
this.objectMap = supportsObjectMapKey ? Map.of(
new StringNode( "name" ),
new StringNode( "ABC" )
) : null;
this.list = List.of( new StringNode( "ABC" ) );
this.json = "{\"name\":\"abc\"}";
}
@BeforeEach
public void setup(SessionFactoryScope scope) {
scope.inTransaction(
(session) -> {
session.persist( new EntityWithJson( 1, stringMap, objectMap, list, json ) );
}
);
}
@AfterEach
public void tearDown(SessionFactoryScope scope) {
scope.inTransaction(
(session) -> {
session.remove( session.find( EntityWithJson.class, 1 ) );
}
);
} }
@Test @Test
@ -74,39 +110,56 @@ public abstract class JsonMappingTests {
final EntityPersister entityDescriptor = mappingMetamodel.findEntityDescriptor( EntityWithJson.class ); final EntityPersister entityDescriptor = mappingMetamodel.findEntityDescriptor( EntityWithJson.class );
final JdbcTypeRegistry jdbcTypeRegistry = mappingMetamodel.getTypeConfiguration().getJdbcTypeRegistry(); final JdbcTypeRegistry jdbcTypeRegistry = mappingMetamodel.getTypeConfiguration().getJdbcTypeRegistry();
final BasicAttributeMapping payloadAttribute = (BasicAttributeMapping) entityDescriptor.findAttributeMapping( "payload" ); final BasicAttributeMapping stringMapAttribute = (BasicAttributeMapping) entityDescriptor.findAttributeMapping(
final BasicAttributeMapping objectMapAttribute = (BasicAttributeMapping) entityDescriptor.findAttributeMapping( "objectMap" ); "stringMap" );
final BasicAttributeMapping listAttribute = (BasicAttributeMapping) entityDescriptor.findAttributeMapping( "list" ); final BasicAttributeMapping objectMapAttribute = (BasicAttributeMapping) entityDescriptor.findAttributeMapping(
"objectMap" );
final BasicAttributeMapping listAttribute = (BasicAttributeMapping) entityDescriptor.findAttributeMapping(
"list" );
final BasicAttributeMapping jsonAttribute = (BasicAttributeMapping) entityDescriptor.findAttributeMapping( "jsonString" ); final BasicAttributeMapping jsonAttribute = (BasicAttributeMapping) entityDescriptor.findAttributeMapping( "jsonString" );
assertThat( payloadAttribute.getJavaType().getJavaTypeClass(), equalTo( Map.class ) ); assertThat( stringMapAttribute.getJavaType().getJavaTypeClass(), equalTo( Map.class ) );
assertThat( objectMapAttribute.getJavaType().getJavaTypeClass(), equalTo( Map.class ) ); assertThat( objectMapAttribute.getJavaType().getJavaTypeClass(), equalTo( Map.class ) );
assertThat( listAttribute.getJavaType().getJavaTypeClass(), equalTo( List.class ) ); assertThat( listAttribute.getJavaType().getJavaTypeClass(), equalTo( List.class ) );
assertThat( jsonAttribute.getJavaType().getJavaTypeClass(), equalTo( String.class ) ); assertThat( jsonAttribute.getJavaType().getJavaTypeClass(), equalTo( String.class ) );
final JdbcType jsonType = jdbcTypeRegistry.getDescriptor( SqlTypes.JSON ); final JdbcType jsonType = jdbcTypeRegistry.getDescriptor( SqlTypes.JSON );
assertThat( payloadAttribute.getJdbcMapping().getJdbcType(), is( jsonType ) ); assertThat( stringMapAttribute.getJdbcMapping().getJdbcType(), isA( (Class<JdbcType>) jsonType.getClass() ) );
assertThat( objectMapAttribute.getJdbcMapping().getJdbcType(), is( jsonType ) ); assertThat( objectMapAttribute.getJdbcMapping().getJdbcType(), isA( (Class<JdbcType>) jsonType.getClass() ) );
assertThat( listAttribute.getJdbcMapping().getJdbcType(), is( jsonType ) ); assertThat( listAttribute.getJdbcMapping().getJdbcType(), isA( (Class<JdbcType>) jsonType.getClass() ) );
assertThat( jsonAttribute.getJdbcMapping().getJdbcType(), is( jsonType ) ); assertThat( jsonAttribute.getJdbcMapping().getJdbcType(), isA( (Class<JdbcType>) jsonType.getClass() ) );
Map<String, String> stringMap = Map.of( "name", "ABC" );
Map<StringNode, StringNode> objectMap = supportsObjectMapKey ? Map.of( new StringNode( "name" ), new StringNode( "ABC" ) ) : null;
List<StringNode> list = List.of( new StringNode( "ABC" ) );
String json = "{\"name\":\"abc\"}";
// PostgreSQL returns the JSON slightly formatted
String alternativeJson = "{\"name\": \"abc\"}";
scope.inTransaction(
(session) -> {
session.persist( new EntityWithJson( 1, stringMap, objectMap, list, json ) );
} }
);
@Test
public void verifyReadWorks(SessionFactoryScope scope) {
scope.inTransaction( scope.inTransaction(
(session) -> { (session) -> {
EntityWithJson entityWithJson = session.find( EntityWithJson.class, 1 ); EntityWithJson entityWithJson = session.find( EntityWithJson.class, 1 );
assertThat( entityWithJson.payload, is( stringMap ) ); assertThat( entityWithJson.stringMap, is( stringMap ) );
assertThat( entityWithJson.objectMap, is( objectMap ) );
assertThat( entityWithJson.list, is( list ) );
}
);
}
@Test
@SkipForDialect(dialectClass = DerbyDialect.class, reason = "Derby doesn't support comparing CLOBs with the = operator")
@SkipForDialect(dialectClass = AbstractHANADialect.class, matchSubTypes = true, reason = "HANA doesn't support comparing LOBs with the = operator")
@SkipForDialect(dialectClass = SybaseDialect.class, matchSubTypes = true, reason = "Sybase doesn't support comparing LOBs with the = operator")
@SkipForDialect(dialectClass = OracleDialect.class, matchSubTypes = true, reason = "Oracle doesn't support comparing JSON with the = operator")
public void verifyComparisonWorks(SessionFactoryScope scope) {
scope.inTransaction(
(session) -> {
// PostgreSQL returns the JSON slightly formatted
String alternativeJson = "{\"name\": \"abc\"}";
EntityWithJson entityWithJson = session.createQuery(
"from EntityWithJson e where e.stringMap = :param",
EntityWithJson.class
)
.setParameter( "param", stringMap )
.getSingleResult();
assertThat( entityWithJson, notNullValue() );
assertThat( entityWithJson.stringMap, is( stringMap ) );
assertThat( entityWithJson.objectMap, is( objectMap ) ); assertThat( entityWithJson.objectMap, is( objectMap ) );
assertThat( entityWithJson.list, is( list ) ); assertThat( entityWithJson.list, is( list ) );
assertThat( entityWithJson.jsonString, isOneOf( json, alternativeJson ) ); assertThat( entityWithJson.jsonString, isOneOf( json, alternativeJson ) );
@ -149,7 +202,7 @@ public abstract class JsonMappingTests {
//tag::basic-json-example[] //tag::basic-json-example[]
@JdbcTypeCode( SqlTypes.JSON ) @JdbcTypeCode( SqlTypes.JSON )
private Map<String, String> payload; private Map<String, String> stringMap;
//end::basic-json-example[] //end::basic-json-example[]
@JdbcTypeCode( SqlTypes.JSON ) @JdbcTypeCode( SqlTypes.JSON )
@ -166,12 +219,12 @@ public abstract class JsonMappingTests {
public EntityWithJson( public EntityWithJson(
Integer id, Integer id,
Map<String, String> payload, Map<String, String> stringMap,
Map<StringNode, StringNode> objectMap, Map<StringNode, StringNode> objectMap,
List<StringNode> list, List<StringNode> list,
String jsonString) { String jsonString) {
this.id = id; this.id = id;
this.payload = payload; this.stringMap = stringMap;
this.objectMap = objectMap; this.objectMap = objectMap;
this.list = list; this.list = list;
this.jsonString = jsonString; this.jsonString = jsonString;

View File

@ -11,6 +11,10 @@ import java.util.Map;
import org.hibernate.annotations.JdbcTypeCode; import org.hibernate.annotations.JdbcTypeCode;
import org.hibernate.cfg.AvailableSettings; import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.AbstractHANADialect;
import org.hibernate.dialect.DerbyDialect;
import org.hibernate.dialect.OracleDialect;
import org.hibernate.dialect.SybaseDialect;
import org.hibernate.metamodel.mapping.internal.BasicAttributeMapping; import org.hibernate.metamodel.mapping.internal.BasicAttributeMapping;
import org.hibernate.metamodel.spi.MappingMetamodelImplementor; import org.hibernate.metamodel.spi.MappingMetamodelImplementor;
import org.hibernate.persister.entity.EntityPersister; import org.hibernate.persister.entity.EntityPersister;
@ -23,6 +27,9 @@ import org.hibernate.testing.orm.junit.ServiceRegistry;
import org.hibernate.testing.orm.junit.SessionFactory; import org.hibernate.testing.orm.junit.SessionFactory;
import org.hibernate.testing.orm.junit.SessionFactoryScope; import org.hibernate.testing.orm.junit.SessionFactoryScope;
import org.hibernate.testing.orm.junit.Setting; import org.hibernate.testing.orm.junit.Setting;
import org.hibernate.testing.orm.junit.SkipForDialect;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import jakarta.persistence.Entity; import jakarta.persistence.Entity;
@ -34,6 +41,8 @@ import jakarta.xml.bind.annotation.XmlRootElement;
import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.isA;
import static org.hamcrest.Matchers.notNullValue;
/** /**
* @author Christian Beikov * @author Christian Beikov
@ -58,10 +67,36 @@ public abstract class XmlMappingTests {
} }
} }
private final boolean supportsObjectMapKey;
private final Map<String, StringNode> stringMap;
private final Map<StringNode, StringNode> objectMap;
private final List<StringNode> list;
protected XmlMappingTests(boolean supportsObjectMapKey) { protected XmlMappingTests(boolean supportsObjectMapKey) {
this.supportsObjectMapKey = supportsObjectMapKey; this.stringMap = Map.of( "name", new StringNode( "ABC" ) );
this.objectMap = supportsObjectMapKey ? Map.of(
new StringNode( "name" ),
new StringNode( "ABC" )
) : null;
this.list = List.of( new StringNode( "ABC" ) );
}
@BeforeEach
public void setup(SessionFactoryScope scope) {
scope.inTransaction(
(session) -> {
session.persist( new EntityWithXml( 1, stringMap, objectMap, list ) );
}
);
}
@AfterEach
public void tearDown(SessionFactoryScope scope) {
scope.inTransaction(
(session) -> {
session.remove( session.find( EntityWithXml.class, 1 ) );
}
);
} }
@Test @Test
@ -72,27 +107,24 @@ public abstract class XmlMappingTests {
final EntityPersister entityDescriptor = mappingMetamodel.findEntityDescriptor( EntityWithXml.class ); final EntityPersister entityDescriptor = mappingMetamodel.findEntityDescriptor( EntityWithXml.class );
final JdbcTypeRegistry jdbcTypeRegistry = mappingMetamodel.getTypeConfiguration().getJdbcTypeRegistry(); final JdbcTypeRegistry jdbcTypeRegistry = mappingMetamodel.getTypeConfiguration().getJdbcTypeRegistry();
final BasicAttributeMapping stringMapAttribute = (BasicAttributeMapping) entityDescriptor.findAttributeMapping( "stringMap" ); final BasicAttributeMapping stringMapAttribute = (BasicAttributeMapping) entityDescriptor.findAttributeMapping(
final BasicAttributeMapping objectMapAttribute = (BasicAttributeMapping) entityDescriptor.findAttributeMapping( "objectMap" ); "stringMap" );
final BasicAttributeMapping listAttribute = (BasicAttributeMapping) entityDescriptor.findAttributeMapping( "list" ); final BasicAttributeMapping objectMapAttribute = (BasicAttributeMapping) entityDescriptor.findAttributeMapping(
"objectMap" );
final BasicAttributeMapping listAttribute = (BasicAttributeMapping) entityDescriptor.findAttributeMapping(
"list" );
assertThat( stringMapAttribute.getJavaType().getJavaTypeClass(), equalTo( Map.class ) ); assertThat( stringMapAttribute.getJavaType().getJavaTypeClass(), equalTo( Map.class ) );
assertThat( objectMapAttribute.getJavaType().getJavaTypeClass(), equalTo( Map.class ) ); assertThat( objectMapAttribute.getJavaType().getJavaTypeClass(), equalTo( Map.class ) );
assertThat( listAttribute.getJavaType().getJavaTypeClass(), equalTo( List.class ) ); assertThat( listAttribute.getJavaType().getJavaTypeClass(), equalTo( List.class ) );
final JdbcType xmlType = jdbcTypeRegistry.getDescriptor( SqlTypes.SQLXML ); final JdbcType xmlType = jdbcTypeRegistry.getDescriptor( SqlTypes.SQLXML );
assertThat( stringMapAttribute.getJdbcMapping().getJdbcType(), is( xmlType ) ); assertThat( stringMapAttribute.getJdbcMapping().getJdbcType(), isA( (Class<JdbcType>) xmlType.getClass() ) );
assertThat( objectMapAttribute.getJdbcMapping().getJdbcType(), is( xmlType ) ); assertThat( objectMapAttribute.getJdbcMapping().getJdbcType(), isA( (Class<JdbcType>) xmlType.getClass() ) );
assertThat( listAttribute.getJdbcMapping().getJdbcType(), is( xmlType ) ); assertThat( listAttribute.getJdbcMapping().getJdbcType(), isA( (Class<JdbcType>) xmlType.getClass() ) );
Map<String, StringNode> stringMap = Map.of( "name", new StringNode( "ABC" ) );
Map<StringNode, StringNode> objectMap = supportsObjectMapKey ? Map.of( new StringNode( "name" ), new StringNode( "ABC" ) ) : null;
List<StringNode> list = List.of( new StringNode( "ABC" ) );
scope.inTransaction(
(session) -> {
session.persist( new EntityWithXml( 1, stringMap, objectMap, list ) );
} }
);
@Test
public void verifyReadWorks(SessionFactoryScope scope) {
scope.inTransaction( scope.inTransaction(
(session) -> { (session) -> {
EntityWithXml entityWithXml = session.find( EntityWithXml.class, 1 ); EntityWithXml entityWithXml = session.find( EntityWithXml.class, 1 );
@ -103,6 +135,28 @@ public abstract class XmlMappingTests {
); );
} }
@Test
@SkipForDialect(dialectClass = DerbyDialect.class, reason = "Derby doesn't support comparing CLOBs with the = operator")
@SkipForDialect(dialectClass = AbstractHANADialect.class, matchSubTypes = true, reason = "HANA doesn't support comparing LOBs with the = operator")
@SkipForDialect(dialectClass = SybaseDialect.class, matchSubTypes = true, reason = "Sybase doesn't support comparing LOBs with the = operator")
@SkipForDialect(dialectClass = OracleDialect.class, matchSubTypes = true, reason = "Oracle doesn't support comparing JSON with the = operator")
public void verifyComparisonWorks(SessionFactoryScope scope) {
scope.inTransaction(
(session) -> {
EntityWithXml entityWithJson = session.createQuery(
"from EntityWithXml e where e.stringMap = :param",
EntityWithXml.class
)
.setParameter( "param", stringMap )
.getSingleResult();
assertThat( entityWithJson, notNullValue() );
assertThat( entityWithJson.stringMap, is( stringMap ) );
assertThat( entityWithJson.objectMap, is( objectMap ) );
assertThat( entityWithJson.list, is( list ) );
}
);
}
@Entity(name = "EntityWithXml") @Entity(name = "EntityWithXml")
@Table(name = "EntityWithXml") @Table(name = "EntityWithXml")
public static class EntityWithXml { public static class EntityWithXml {

View File

@ -30,6 +30,9 @@ import org.hibernate.boot.model.TypeContributions;
import org.hibernate.dialect.DatabaseVersion; import org.hibernate.dialect.DatabaseVersion;
import org.hibernate.dialect.Dialect; import org.hibernate.dialect.Dialect;
import org.hibernate.dialect.NationalizationSupport; import org.hibernate.dialect.NationalizationSupport;
import org.hibernate.dialect.PostgreSQLCastingInetJdbcType;
import org.hibernate.dialect.PostgreSQLCastingIntervalSecondJdbcType;
import org.hibernate.dialect.PostgreSQLCastingJsonJdbcType;
import org.hibernate.dialect.PostgreSQLDriverKind; import org.hibernate.dialect.PostgreSQLDriverKind;
import org.hibernate.dialect.PostgreSQLInetJdbcType; import org.hibernate.dialect.PostgreSQLInetJdbcType;
import org.hibernate.dialect.PostgreSQLIntervalSecondJdbcType; import org.hibernate.dialect.PostgreSQLIntervalSecondJdbcType;
@ -228,7 +231,6 @@ public class CockroachLegacyDialect extends Dialect {
final DdlTypeRegistry ddlTypeRegistry = typeContributions.getTypeConfiguration().getDdlTypeRegistry(); final DdlTypeRegistry ddlTypeRegistry = typeContributions.getTypeConfiguration().getDdlTypeRegistry();
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( UUID, "uuid", this ) ); ddlTypeRegistry.addDescriptor( new DdlTypeImpl( UUID, "uuid", this ) );
if ( PostgreSQLPGObjectJdbcType.isUsable() ) {
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( GEOMETRY, "geometry", this ) ); ddlTypeRegistry.addDescriptor( new DdlTypeImpl( GEOMETRY, "geometry", this ) );
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( GEOGRAPHY, "geography", this ) ); ddlTypeRegistry.addDescriptor( new DdlTypeImpl( GEOGRAPHY, "geography", this ) );
ddlTypeRegistry.addDescriptor( new Scale6IntervalSecondDdlType( this ) ); ddlTypeRegistry.addDescriptor( new Scale6IntervalSecondDdlType( this ) );
@ -242,7 +244,6 @@ public class CockroachLegacyDialect extends Dialect {
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( JSON, "json", this ) ); ddlTypeRegistry.addDescriptor( new DdlTypeImpl( JSON, "json", this ) );
} }
} }
}
@Override @Override
public JdbcType resolveSqlTypeDescriptor( public JdbcType resolveSqlTypeDescriptor(
@ -338,6 +339,27 @@ public class CockroachLegacyDialect extends Dialect {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLJsonJdbcType.INSTANCE ); jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLJsonJdbcType.INSTANCE );
} }
} }
else {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingIntervalSecondJdbcType.INSTANCE );
if ( getVersion().isSameOrAfter( 20, 0 ) ) {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingInetJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonJdbcType.JSONB_INSTANCE );
}
else {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonJdbcType.JSON_INSTANCE );
}
}
}
else {
jdbcTypeRegistry.addDescriptorIfAbsent( UUIDJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingIntervalSecondJdbcType.INSTANCE );
if ( getVersion().isSameOrAfter( 20, 0 ) ) {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingInetJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonJdbcType.JSONB_INSTANCE );
}
else {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonJdbcType.JSON_INSTANCE );
}
} }
// Force Blob binding to byte[] for CockroachDB // Force Blob binding to byte[] for CockroachDB

View File

@ -34,8 +34,10 @@ import org.hibernate.sql.ast.tree.Statement;
import org.hibernate.sql.exec.spi.JdbcOperation; import org.hibernate.sql.exec.spi.JdbcOperation;
import org.hibernate.tool.schema.extract.internal.SequenceInformationExtractorMariaDBDatabaseImpl; import org.hibernate.tool.schema.extract.internal.SequenceInformationExtractorMariaDBDatabaseImpl;
import org.hibernate.tool.schema.extract.spi.SequenceInformationExtractor; import org.hibernate.tool.schema.extract.spi.SequenceInformationExtractor;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.StandardBasicTypes; import org.hibernate.type.StandardBasicTypes;
import org.hibernate.type.descriptor.jdbc.JdbcType; import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.JsonJdbcType;
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry; import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
import org.hibernate.type.descriptor.sql.internal.DdlTypeImpl; import org.hibernate.type.descriptor.sql.internal.DdlTypeImpl;
import org.hibernate.type.descriptor.sql.spi.DdlTypeRegistry; import org.hibernate.type.descriptor.sql.spi.DdlTypeRegistry;
@ -141,9 +143,11 @@ public class MariaDBLegacyDialect extends MySQLLegacyDialect {
@Override @Override
public void contributeTypes(TypeContributions typeContributions, ServiceRegistry serviceRegistry) { public void contributeTypes(TypeContributions typeContributions, ServiceRegistry serviceRegistry) {
final JdbcTypeRegistry jdbcTypeRegistry = typeContributions.getTypeConfiguration().getJdbcTypeRegistry();
// Make sure we register the JSON type descriptor before calling super, because MariaDB does not need casting
jdbcTypeRegistry.addDescriptorIfAbsent( SqlTypes.JSON, JsonJdbcType.INSTANCE );
super.contributeTypes( typeContributions, serviceRegistry ); super.contributeTypes( typeContributions, serviceRegistry );
final JdbcTypeRegistry jdbcTypeRegistry = typeContributions.getTypeConfiguration()
.getJdbcTypeRegistry();
if ( getVersion().isSameOrAfter( 10, 7 ) ) { if ( getVersion().isSameOrAfter( 10, 7 ) ) {
jdbcTypeRegistry.addDescriptorIfAbsent( VarcharUUIDJdbcType.INSTANCE ); jdbcTypeRegistry.addDescriptorIfAbsent( VarcharUUIDJdbcType.INSTANCE );
} }

View File

@ -21,6 +21,7 @@ import org.hibernate.dialect.DatabaseVersion;
import org.hibernate.dialect.Dialect; import org.hibernate.dialect.Dialect;
import org.hibernate.dialect.InnoDBStorageEngine; import org.hibernate.dialect.InnoDBStorageEngine;
import org.hibernate.dialect.MyISAMStorageEngine; import org.hibernate.dialect.MyISAMStorageEngine;
import org.hibernate.dialect.MySQLCastingJsonJdbcType;
import org.hibernate.dialect.MySQLServerConfiguration; import org.hibernate.dialect.MySQLServerConfiguration;
import org.hibernate.dialect.MySQLStorageEngine; import org.hibernate.dialect.MySQLStorageEngine;
import org.hibernate.dialect.Replacer; import org.hibernate.dialect.Replacer;
@ -630,11 +631,10 @@ public class MySQLLegacyDialect extends Dialect {
public void contributeTypes(TypeContributions typeContributions, ServiceRegistry serviceRegistry) { public void contributeTypes(TypeContributions typeContributions, ServiceRegistry serviceRegistry) {
super.contributeTypes( typeContributions, serviceRegistry ); super.contributeTypes( typeContributions, serviceRegistry );
final JdbcTypeRegistry jdbcTypeRegistry = typeContributions.getTypeConfiguration() final JdbcTypeRegistry jdbcTypeRegistry = typeContributions.getTypeConfiguration().getJdbcTypeRegistry();
.getJdbcTypeRegistry();
if ( getMySQLVersion().isSameOrAfter( 5, 7 ) ) { if ( getMySQLVersion().isSameOrAfter( 5, 7 ) ) {
jdbcTypeRegistry.addDescriptorIfAbsent( SqlTypes.JSON, JsonJdbcType.INSTANCE ); jdbcTypeRegistry.addDescriptorIfAbsent( SqlTypes.JSON, MySQLCastingJsonJdbcType.INSTANCE );
} }
// MySQL requires a custom binder for binding untyped nulls with the NULL type // MySQL requires a custom binder for binding untyped nulls with the NULL type

View File

@ -30,9 +30,14 @@ import org.hibernate.dialect.DatabaseVersion;
import org.hibernate.dialect.Dialect; import org.hibernate.dialect.Dialect;
import org.hibernate.dialect.NationalizationSupport; import org.hibernate.dialect.NationalizationSupport;
import org.hibernate.dialect.OracleDialect; import org.hibernate.dialect.OracleDialect;
import org.hibernate.dialect.PostgreSQLCastingInetJdbcType;
import org.hibernate.dialect.PostgreSQLCastingIntervalSecondJdbcType;
import org.hibernate.dialect.PostgreSQLCastingJsonJdbcType;
import org.hibernate.dialect.PostgreSQLCastingStructJdbcType;
import org.hibernate.dialect.PostgreSQLDriverKind; import org.hibernate.dialect.PostgreSQLDriverKind;
import org.hibernate.dialect.PostgreSQLInetJdbcType; import org.hibernate.dialect.PostgreSQLInetJdbcType;
import org.hibernate.dialect.PostgreSQLIntervalSecondJdbcType; import org.hibernate.dialect.PostgreSQLIntervalSecondJdbcType;
import org.hibernate.dialect.PostgreSQLJsonJdbcType;
import org.hibernate.dialect.PostgreSQLJsonbJdbcType; import org.hibernate.dialect.PostgreSQLJsonbJdbcType;
import org.hibernate.dialect.PostgreSQLPGObjectJdbcType; import org.hibernate.dialect.PostgreSQLPGObjectJdbcType;
import org.hibernate.dialect.PostgreSQLStructJdbcType; import org.hibernate.dialect.PostgreSQLStructJdbcType;
@ -253,8 +258,6 @@ public class PostgreSQLLegacyDialect extends Dialect {
if ( getVersion().isSameOrAfter( 8, 2 ) ) { if ( getVersion().isSameOrAfter( 8, 2 ) ) {
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( UUID, "uuid", this ) ); ddlTypeRegistry.addDescriptor( new DdlTypeImpl( UUID, "uuid", this ) );
} }
if ( PostgreSQLPGObjectJdbcType.isUsable() ) {
// The following DDL types require that the PGobject class is usable/visible
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( INET, "inet", this ) ); ddlTypeRegistry.addDescriptor( new DdlTypeImpl( INET, "inet", this ) );
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( GEOMETRY, "geometry", this ) ); ddlTypeRegistry.addDescriptor( new DdlTypeImpl( GEOMETRY, "geometry", this ) );
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( GEOGRAPHY, "geography", this ) ); ddlTypeRegistry.addDescriptor( new DdlTypeImpl( GEOGRAPHY, "geography", this ) );
@ -270,7 +273,6 @@ public class PostgreSQLLegacyDialect extends Dialect {
} }
} }
} }
}
@Override @Override
public int getMaxVarcharLength() { public int getMaxVarcharLength() {
@ -1334,14 +1336,49 @@ public class PostgreSQLLegacyDialect extends Dialect {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLIntervalSecondJdbcType.INSTANCE ); jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLIntervalSecondJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLStructJdbcType.INSTANCE ); jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLStructJdbcType.INSTANCE );
} }
else {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingInetJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingIntervalSecondJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingStructJdbcType.INSTANCE );
}
if ( getVersion().isSameOrAfter( 8, 2 ) ) { if ( getVersion().isSameOrAfter( 8, 2 ) ) {
// HHH-9562 // HHH-9562
jdbcTypeRegistry.addDescriptorIfAbsent( UUIDJdbcType.INSTANCE ); jdbcTypeRegistry.addDescriptorIfAbsent( UUIDJdbcType.INSTANCE );
if ( getVersion().isSameOrAfter( 9, 2 ) ) { if ( getVersion().isSameOrAfter( 9, 2 ) ) {
if ( getVersion().isSameOrAfter( 9, 4 ) ) {
if ( PostgreSQLPGObjectJdbcType.isUsable() ) { if ( PostgreSQLPGObjectJdbcType.isUsable() ) {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLJsonbJdbcType.INSTANCE ); jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLJsonbJdbcType.INSTANCE );
} }
else {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonJdbcType.JSONB_INSTANCE );
}
}
else {
if ( PostgreSQLPGObjectJdbcType.isUsable() ) {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLJsonJdbcType.INSTANCE );
}
else {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonJdbcType.JSON_INSTANCE );
}
}
}
}
}
else {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingInetJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingIntervalSecondJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingStructJdbcType.INSTANCE );
if ( getVersion().isSameOrAfter( 8, 2 ) ) {
jdbcTypeRegistry.addDescriptorIfAbsent( UUIDJdbcType.INSTANCE );
if ( getVersion().isSameOrAfter( 9, 2 ) ) {
if ( getVersion().isSameOrAfter( 9, 4 ) ) {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonJdbcType.JSONB_INSTANCE );
}
else {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonJdbcType.JSON_INSTANCE );
}
} }
} }
} }

View File

@ -64,6 +64,7 @@ import org.hibernate.type.BasicTypeRegistry;
import org.hibernate.type.SqlTypes; import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.java.spi.JavaTypeRegistry; import org.hibernate.type.descriptor.java.spi.JavaTypeRegistry;
import org.hibernate.type.descriptor.jdbc.JdbcType; import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.JsonAsStringJdbcType;
import org.hibernate.type.descriptor.jdbc.JsonJdbcType; import org.hibernate.type.descriptor.jdbc.JsonJdbcType;
import org.hibernate.type.descriptor.jdbc.XmlAsStringJdbcType; import org.hibernate.type.descriptor.jdbc.XmlAsStringJdbcType;
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry; import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
@ -661,29 +662,14 @@ public class MetadataBuildingProcess {
addFallbackIfNecessary( jdbcTypeRegistry, SqlTypes.POINT, SqlTypes.VARBINARY ); addFallbackIfNecessary( jdbcTypeRegistry, SqlTypes.POINT, SqlTypes.VARBINARY );
addFallbackIfNecessary( jdbcTypeRegistry, SqlTypes.GEOGRAPHY, SqlTypes.GEOMETRY ); addFallbackIfNecessary( jdbcTypeRegistry, SqlTypes.GEOGRAPHY, SqlTypes.GEOMETRY );
jdbcTypeRegistry.addDescriptorIfAbsent( JsonJdbcType.INSTANCE ); jdbcTypeRegistry.addDescriptorIfAbsent( JsonAsStringJdbcType.VARCHAR_INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( XmlAsStringJdbcType.INSTANCE ); jdbcTypeRegistry.addDescriptorIfAbsent( XmlAsStringJdbcType.VARCHAR_INSTANCE );
addFallbackIfNecessary( jdbcTypeRegistry, SqlTypes.MATERIALIZED_BLOB, SqlTypes.BLOB ); addFallbackIfNecessary( jdbcTypeRegistry, SqlTypes.MATERIALIZED_BLOB, SqlTypes.BLOB );
addFallbackIfNecessary( jdbcTypeRegistry, SqlTypes.MATERIALIZED_CLOB, SqlTypes.CLOB ); addFallbackIfNecessary( jdbcTypeRegistry, SqlTypes.MATERIALIZED_CLOB, SqlTypes.CLOB );
addFallbackIfNecessary( jdbcTypeRegistry, SqlTypes.MATERIALIZED_NCLOB, SqlTypes.NCLOB ); addFallbackIfNecessary( jdbcTypeRegistry, SqlTypes.MATERIALIZED_NCLOB, SqlTypes.NCLOB );
final DdlTypeRegistry ddlTypeRegistry = typeConfiguration.getDdlTypeRegistry(); final DdlTypeRegistry ddlTypeRegistry = typeConfiguration.getDdlTypeRegistry();
// Fallback to the biggest varchar DdlType when json is requested
ddlTypeRegistry.addDescriptorIfAbsent(
new DdlTypeImpl(
SqlTypes.JSON,
ddlTypeRegistry.getTypeName( SqlTypes.VARCHAR, null, null, null ),
dialect
)
);
ddlTypeRegistry.addDescriptorIfAbsent(
new DdlTypeImpl(
SqlTypes.SQLXML,
ddlTypeRegistry.getTypeName( SqlTypes.VARCHAR, null, null, null ),
dialect
)
);
// Fallback to the geometry DdlType when geography is requested // Fallback to the geometry DdlType when geography is requested
final DdlType geometryType = ddlTypeRegistry.getDescriptor( SqlTypes.GEOMETRY ); final DdlType geometryType = ddlTypeRegistry.getDescriptor( SqlTypes.GEOMETRY );
if ( geometryType != null ) { if ( geometryType != null ) {

View File

@ -244,8 +244,9 @@ public class CockroachDialect extends Dialect {
final DdlTypeRegistry ddlTypeRegistry = typeContributions.getTypeConfiguration().getDdlTypeRegistry(); final DdlTypeRegistry ddlTypeRegistry = typeContributions.getTypeConfiguration().getDdlTypeRegistry();
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( UUID, "uuid", this ) ); ddlTypeRegistry.addDescriptor( new DdlTypeImpl( UUID, "uuid", this ) );
if ( PostgreSQLPGObjectJdbcType.isUsable() ) {
// The following DDL types require that the PGobject class is usable/visible // The following DDL types require that the PGobject class is usable/visible,
// or that a special JDBC type implementation exists, that supports wrapping read/write expressions
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( GEOMETRY, "geometry", this ) ); ddlTypeRegistry.addDescriptor( new DdlTypeImpl( GEOMETRY, "geometry", this ) );
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( GEOGRAPHY, "geography", this ) ); ddlTypeRegistry.addDescriptor( new DdlTypeImpl( GEOGRAPHY, "geography", this ) );
ddlTypeRegistry.addDescriptor( new Scale6IntervalSecondDdlType( this ) ); ddlTypeRegistry.addDescriptor( new Scale6IntervalSecondDdlType( this ) );
@ -254,7 +255,6 @@ public class CockroachDialect extends Dialect {
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( INET, "inet", this ) ); ddlTypeRegistry.addDescriptor( new DdlTypeImpl( INET, "inet", this ) );
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( JSON, "jsonb", this ) ); ddlTypeRegistry.addDescriptor( new DdlTypeImpl( JSON, "jsonb", this ) );
} }
}
@Override @Override
public JdbcType resolveSqlTypeDescriptor( public JdbcType resolveSqlTypeDescriptor(
@ -344,6 +344,17 @@ public class CockroachDialect extends Dialect {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLInetJdbcType.INSTANCE ); jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLInetJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLJsonbJdbcType.INSTANCE ); jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLJsonbJdbcType.INSTANCE );
} }
else {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingIntervalSecondJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingInetJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonJdbcType.JSONB_INSTANCE );
}
}
else {
jdbcTypeRegistry.addDescriptorIfAbsent( UUIDJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingInetJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingIntervalSecondJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonJdbcType.JSONB_INSTANCE );
} }
// Force Blob binding to byte[] for CockroachDB // Force Blob binding to byte[] for CockroachDB

View File

@ -27,8 +27,10 @@ import org.hibernate.sql.ast.tree.Statement;
import org.hibernate.sql.exec.spi.JdbcOperation; import org.hibernate.sql.exec.spi.JdbcOperation;
import org.hibernate.tool.schema.extract.internal.SequenceInformationExtractorMariaDBDatabaseImpl; import org.hibernate.tool.schema.extract.internal.SequenceInformationExtractorMariaDBDatabaseImpl;
import org.hibernate.tool.schema.extract.spi.SequenceInformationExtractor; import org.hibernate.tool.schema.extract.spi.SequenceInformationExtractor;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.StandardBasicTypes; import org.hibernate.type.StandardBasicTypes;
import org.hibernate.type.descriptor.jdbc.JdbcType; import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.JsonJdbcType;
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry; import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
import org.hibernate.type.descriptor.sql.internal.DdlTypeImpl; import org.hibernate.type.descriptor.sql.internal.DdlTypeImpl;
import org.hibernate.type.descriptor.sql.spi.DdlTypeRegistry; import org.hibernate.type.descriptor.sql.spi.DdlTypeRegistry;
@ -133,9 +135,11 @@ public class MariaDBDialect extends MySQLDialect {
@Override @Override
public void contributeTypes(TypeContributions typeContributions, ServiceRegistry serviceRegistry) { public void contributeTypes(TypeContributions typeContributions, ServiceRegistry serviceRegistry) {
final JdbcTypeRegistry jdbcTypeRegistry = typeContributions.getTypeConfiguration().getJdbcTypeRegistry();
// Make sure we register the JSON type descriptor before calling super, because MariaDB does not need casting
jdbcTypeRegistry.addDescriptorIfAbsent( SqlTypes.JSON, JsonJdbcType.INSTANCE );
super.contributeTypes( typeContributions, serviceRegistry ); super.contributeTypes( typeContributions, serviceRegistry );
final JdbcTypeRegistry jdbcTypeRegistry = typeContributions.getTypeConfiguration()
.getJdbcTypeRegistry();
if ( getVersion().isSameOrAfter( 10, 7 ) ) { if ( getVersion().isSameOrAfter( 10, 7 ) ) {
jdbcTypeRegistry.addDescriptorIfAbsent( VarcharUUIDJdbcType.INSTANCE ); jdbcTypeRegistry.addDescriptorIfAbsent( VarcharUUIDJdbcType.INSTANCE );
} }

View File

@ -0,0 +1,45 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.dialect;
import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.metamodel.spi.RuntimeModelCreationContext;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
import org.hibernate.type.descriptor.jdbc.JsonJdbcType;
/**
* @author Christian Beikov
*/
public class MySQLCastingJsonJdbcType extends JsonJdbcType {
/**
* Singleton access
*/
public static final JsonJdbcType INSTANCE = new MySQLCastingJsonJdbcType( null );
public MySQLCastingJsonJdbcType(EmbeddableMappingType embeddableMappingType) {
super( embeddableMappingType );
}
@Override
public AggregateJdbcType resolveAggregateJdbcType(
EmbeddableMappingType mappingType,
String sqlType,
RuntimeModelCreationContext creationContext) {
return new MySQLCastingJsonJdbcType( mappingType );
}
@Override
public void appendWriteExpression(
String writeExpression,
SqlAppender appender,
Dialect dialect) {
appender.append( "cast(" );
appender.append( writeExpression );
appender.append( " as json)" );
}
}

View File

@ -627,10 +627,9 @@ public class MySQLDialect extends Dialect {
public void contributeTypes(TypeContributions typeContributions, ServiceRegistry serviceRegistry) { public void contributeTypes(TypeContributions typeContributions, ServiceRegistry serviceRegistry) {
super.contributeTypes( typeContributions, serviceRegistry ); super.contributeTypes( typeContributions, serviceRegistry );
final JdbcTypeRegistry jdbcTypeRegistry = typeContributions.getTypeConfiguration() final JdbcTypeRegistry jdbcTypeRegistry = typeContributions.getTypeConfiguration().getJdbcTypeRegistry();
.getJdbcTypeRegistry();
jdbcTypeRegistry.addDescriptorIfAbsent( SqlTypes.JSON, JsonJdbcType.INSTANCE ); jdbcTypeRegistry.addDescriptorIfAbsent( SqlTypes.JSON, MySQLCastingJsonJdbcType.INSTANCE );
// MySQL requires a custom binder for binding untyped nulls with the NULL type // MySQL requires a custom binder for binding untyped nulls with the NULL type
typeContributions.contributeJdbcType( NullJdbcType.INSTANCE ); typeContributions.contributeJdbcType( NullJdbcType.INSTANCE );

View File

@ -0,0 +1,111 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.dialect;
import java.net.InetAddress;
import java.sql.CallableStatement;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.ValueBinder;
import org.hibernate.type.descriptor.ValueExtractor;
import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.jdbc.BasicBinder;
import org.hibernate.type.descriptor.jdbc.BasicExtractor;
import org.hibernate.type.descriptor.jdbc.JdbcLiteralFormatter;
import org.hibernate.type.descriptor.jdbc.JdbcType;
/**
* @author Christian Beikov
*/
public class PostgreSQLCastingInetJdbcType implements JdbcType {
public static final PostgreSQLCastingInetJdbcType INSTANCE = new PostgreSQLCastingInetJdbcType();
@Override
public void appendWriteExpression(
String writeExpression,
SqlAppender appender,
Dialect dialect) {
appender.append( "cast(" );
appender.append( writeExpression );
appender.append( " as inet)" );
}
@Override
public int getJdbcTypeCode() {
return SqlTypes.VARBINARY;
}
@Override
public int getDefaultSqlTypeCode() {
return SqlTypes.INET;
}
@Override
public String toString() {
return "InetSecondJdbcType";
}
@Override
public <T> JdbcLiteralFormatter<T> getJdbcLiteralFormatter(JavaType<T> javaType) {
// No literal support for now
return null;
}
@Override
public <X> ValueBinder<X> getBinder(JavaType<X> javaType) {
return new BasicBinder<>( javaType, this ) {
@Override
protected void doBind(PreparedStatement st, X value, int index, WrapperOptions options)
throws SQLException {
st.setString( index, getStringValue( value, options ) );
}
@Override
protected void doBind(CallableStatement st, X value, String name, WrapperOptions options)
throws SQLException {
st.setString( name, getStringValue( value, options ) );
}
private String getStringValue(X value, WrapperOptions options) {
return getJavaType().unwrap( value, InetAddress.class, options ).getHostAddress();
}
};
}
@Override
public <X> ValueExtractor<X> getExtractor(JavaType<X> javaType) {
return new BasicExtractor<>( javaType, this ) {
@Override
protected X doExtract(ResultSet rs, int paramIndex, WrapperOptions options) throws SQLException {
return getObject( rs.getString( paramIndex ), options );
}
@Override
protected X doExtract(CallableStatement statement, int index, WrapperOptions options) throws SQLException {
return getObject( statement.getString( index ), options );
}
@Override
protected X doExtract(CallableStatement statement, String name, WrapperOptions options) throws SQLException {
return getObject( statement.getString( name ), options );
}
private X getObject(String inetString, WrapperOptions options) throws SQLException {
if ( inetString == null ) {
return null;
}
return getJavaType().wrap( inetString, options );
}
};
}
}

View File

@ -0,0 +1,162 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.dialect;
import java.math.BigDecimal;
import java.sql.CallableStatement;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.expression.SelfRenderingExpression;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.ValueBinder;
import org.hibernate.type.descriptor.ValueExtractor;
import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.jdbc.AdjustableJdbcType;
import org.hibernate.type.descriptor.jdbc.BasicBinder;
import org.hibernate.type.descriptor.jdbc.BasicExtractor;
import org.hibernate.type.descriptor.jdbc.JdbcLiteralFormatter;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.JdbcTypeIndicators;
/**
* @author Christian Beikov
*/
public class PostgreSQLCastingIntervalSecondJdbcType implements AdjustableJdbcType {
public static final PostgreSQLCastingIntervalSecondJdbcType INSTANCE = new PostgreSQLCastingIntervalSecondJdbcType();
@Override
public JdbcType resolveIndicatedType(JdbcTypeIndicators indicators, JavaType<?> domainJtd) {
final int scale;
if ( indicators.getColumnScale() == JdbcTypeIndicators.NO_COLUMN_SCALE ) {
scale = domainJtd.getDefaultSqlScale(
indicators.getTypeConfiguration()
.getServiceRegistry()
.getService( JdbcServices.class )
.getDialect(),
this
);
}
else {
scale = indicators.getColumnScale();
}
if ( scale > 6 ) {
// Since the maximum allowed scale on PostgreSQL is 6 (microsecond precision),
// we have to switch to the numeric type if the value is greater
return indicators.getTypeConfiguration().getJdbcTypeRegistry().getDescriptor( SqlTypes.NUMERIC );
}
return this;
}
@Override
public Expression wrapTopLevelSelectionExpression(Expression expression) {
return new SelfRenderingExpression() {
@Override
public void renderToSql(
SqlAppender sqlAppender,
SqlAstTranslator<?> walker,
SessionFactoryImplementor sessionFactory) {
sqlAppender.append( "extract(epoch from " );
expression.accept( walker );
sqlAppender.append( ')' );
}
@Override
public JdbcMappingContainer getExpressionType() {
return expression.getExpressionType();
}
};
}
@Override
public void appendWriteExpression(
String writeExpression,
SqlAppender appender,
Dialect dialect) {
appender.append( '(' );
appender.append( writeExpression );
appender.append( "*interval'1 second)" );
}
@Override
public int getJdbcTypeCode() {
return SqlTypes.NUMERIC;
}
@Override
public int getDefaultSqlTypeCode() {
return SqlTypes.INTERVAL_SECOND;
}
@Override
public String toString() {
return "IntervalSecondJdbcType";
}
@Override
public <T> JdbcLiteralFormatter<T> getJdbcLiteralFormatter(JavaType<T> javaType) {
// No literal support for now
return null;
}
@Override
public <X> ValueBinder<X> getBinder(JavaType<X> javaType) {
return new BasicBinder<>( javaType, this ) {
@Override
protected void doBind(PreparedStatement st, X value, int index, WrapperOptions options)
throws SQLException {
st.setBigDecimal( index, getBigDecimalValue( value, options ) );
}
@Override
protected void doBind(CallableStatement st, X value, String name, WrapperOptions options)
throws SQLException {
st.setBigDecimal( name, getBigDecimalValue( value, options ) );
}
private BigDecimal getBigDecimalValue(X value, WrapperOptions options) {
return getJavaType().unwrap( value, BigDecimal.class, options ).movePointLeft( 9 );
}
};
}
@Override
public <X> ValueExtractor<X> getExtractor(JavaType<X> javaType) {
return new BasicExtractor<>( javaType, this ) {
@Override
protected X doExtract(ResultSet rs, int paramIndex, WrapperOptions options) throws SQLException {
return getObject( rs.getBigDecimal( paramIndex ), options );
}
@Override
protected X doExtract(CallableStatement statement, int index, WrapperOptions options) throws SQLException {
return getObject( statement.getBigDecimal( index ), options );
}
@Override
protected X doExtract(CallableStatement statement, String name, WrapperOptions options) throws SQLException {
return getObject( statement.getBigDecimal( name ), options );
}
private X getObject(BigDecimal bigDecimal, WrapperOptions options) throws SQLException {
if ( bigDecimal == null ) {
return null;
}
return getJavaType().wrap( bigDecimal.movePointRight( 9 ), options );
}
};
}
}

View File

@ -0,0 +1,59 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.dialect;
import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.metamodel.spi.RuntimeModelCreationContext;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
import org.hibernate.type.descriptor.jdbc.JsonJdbcType;
/**
* @author Christian Beikov
*/
public class PostgreSQLCastingJsonJdbcType extends JsonJdbcType {
public static final PostgreSQLCastingJsonJdbcType JSON_INSTANCE = new PostgreSQLCastingJsonJdbcType( false, null );
public static final PostgreSQLCastingJsonJdbcType JSONB_INSTANCE = new PostgreSQLCastingJsonJdbcType( true, null );
private final boolean jsonb;
public PostgreSQLCastingJsonJdbcType(boolean jsonb, EmbeddableMappingType embeddableMappingType) {
super( embeddableMappingType );
this.jsonb = jsonb;
}
@Override
public int getDdlTypeCode() {
return SqlTypes.JSON;
}
@Override
public AggregateJdbcType resolveAggregateJdbcType(
EmbeddableMappingType mappingType,
String sqlType,
RuntimeModelCreationContext creationContext) {
return new PostgreSQLCastingJsonJdbcType( jsonb, mappingType );
}
@Override
public void appendWriteExpression(
String writeExpression,
SqlAppender appender,
Dialect dialect) {
appender.append( "cast(" );
appender.append( writeExpression );
appender.append( " as " );
if ( jsonb ) {
appender.append( "jsonb)" );
}
else {
appender.append( "json)" );
}
}
}

View File

@ -0,0 +1,91 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.dialect;
import java.sql.CallableStatement;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.metamodel.spi.RuntimeModelCreationContext;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.type.descriptor.ValueBinder;
import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
import org.hibernate.type.descriptor.jdbc.BasicBinder;
/**
* @author Christian Beikov
*/
public class PostgreSQLCastingStructJdbcType extends PostgreSQLStructJdbcType {
public static final PostgreSQLCastingStructJdbcType INSTANCE = new PostgreSQLCastingStructJdbcType( null, null, null );
public PostgreSQLCastingStructJdbcType(
EmbeddableMappingType embeddableMappingType,
String typeName,
int[] orderMapping) {
super( embeddableMappingType, typeName, orderMapping );
}
@Override
public AggregateJdbcType resolveAggregateJdbcType(
EmbeddableMappingType mappingType,
String sqlType,
RuntimeModelCreationContext creationContext) {
return new PostgreSQLCastingStructJdbcType(
mappingType,
sqlType,
creationContext.getBootModel()
.getDatabase()
.getDefaultNamespace()
.locateUserDefinedType( Identifier.toIdentifier( sqlType ) )
.getOrderMapping()
);
}
@Override
public void appendWriteExpression(
String writeExpression,
SqlAppender appender,
Dialect dialect) {
appender.append( "cast(" );
appender.append( writeExpression );
appender.append( " as " );
appender.append( getTypeName() );
appender.append( ')' );
}
@Override
public <X> ValueBinder<X> getBinder(JavaType<X> javaType) {
return new BasicBinder<>( javaType, this ) {
@Override
protected void doBind(PreparedStatement st, X value, int index, WrapperOptions options)
throws SQLException {
final String stringValue = ( (PostgreSQLCastingStructJdbcType) getJdbcType() ).toString(
value,
getJavaType(),
options
);
st.setString( index, stringValue );
}
@Override
protected void doBind(CallableStatement st, X value, String name, WrapperOptions options)
throws SQLException {
final String stringValue = ( (PostgreSQLCastingStructJdbcType) getJdbcType() ).toString(
value,
getJavaType(),
options
);
st.setString( name, stringValue );
}
};
}
}

View File

@ -248,8 +248,6 @@ public class PostgreSQLDialect extends Dialect {
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( SQLXML, "xml", this ) ); ddlTypeRegistry.addDescriptor( new DdlTypeImpl( SQLXML, "xml", this ) );
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( UUID, "uuid", this ) ); ddlTypeRegistry.addDescriptor( new DdlTypeImpl( UUID, "uuid", this ) );
if ( PostgreSQLPGObjectJdbcType.isUsable() ) {
// The following DDL types require that the PGobject class is usable/visible
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( INET, "inet", this ) ); ddlTypeRegistry.addDescriptor( new DdlTypeImpl( INET, "inet", this ) );
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( GEOMETRY, "geometry", this ) ); ddlTypeRegistry.addDescriptor( new DdlTypeImpl( GEOMETRY, "geometry", this ) );
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( GEOGRAPHY, "geography", this ) ); ddlTypeRegistry.addDescriptor( new DdlTypeImpl( GEOGRAPHY, "geography", this ) );
@ -258,7 +256,6 @@ public class PostgreSQLDialect extends Dialect {
// Prefer jsonb if possible // Prefer jsonb if possible
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( JSON, "jsonb", this ) ); ddlTypeRegistry.addDescriptor( new DdlTypeImpl( JSON, "jsonb", this ) );
} }
}
@Override @Override
public int getMaxVarcharLength() { public int getMaxVarcharLength() {
@ -1316,17 +1313,27 @@ public class PostgreSQLDialect extends Dialect {
jdbcTypeRegistry.addDescriptor( XmlJdbcType.INSTANCE ); jdbcTypeRegistry.addDescriptor( XmlJdbcType.INSTANCE );
if ( driverKind == PostgreSQLDriverKind.PG_JDBC ) { if ( driverKind == PostgreSQLDriverKind.PG_JDBC ) {
// HHH-9562
jdbcTypeRegistry.addDescriptorIfAbsent( UUIDJdbcType.INSTANCE );
if ( PostgreSQLPGObjectJdbcType.isUsable() ) { if ( PostgreSQLPGObjectJdbcType.isUsable() ) {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLInetJdbcType.INSTANCE ); jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLInetJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLIntervalSecondJdbcType.INSTANCE ); jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLIntervalSecondJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLStructJdbcType.INSTANCE ); jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLStructJdbcType.INSTANCE );
}
// HHH-9562
jdbcTypeRegistry.addDescriptorIfAbsent( UUIDJdbcType.INSTANCE );
if ( PostgreSQLPGObjectJdbcType.isUsable() ) {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLJsonbJdbcType.INSTANCE ); jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLJsonbJdbcType.INSTANCE );
} }
else {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingInetJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingIntervalSecondJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingStructJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonJdbcType.JSONB_INSTANCE );
}
}
else {
jdbcTypeRegistry.addDescriptorIfAbsent( UUIDJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingInetJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingIntervalSecondJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingStructJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonJdbcType.JSONB_INSTANCE );
} }
// PostgreSQL requires a custom binder for binding untyped nulls as VARBINARY // PostgreSQL requires a custom binder for binding untyped nulls as VARBINARY

View File

@ -56,7 +56,7 @@ import static org.hibernate.type.descriptor.DateTimeUtils.appendAsTimestampWithM
*/ */
public class PostgreSQLStructJdbcType extends PostgreSQLPGObjectJdbcType implements AggregateJdbcType { public class PostgreSQLStructJdbcType extends PostgreSQLPGObjectJdbcType implements AggregateJdbcType {
public static final PostgreSQLStructJdbcType INSTANCE = new PostgreSQLStructJdbcType(); public static final PostgreSQLStructJdbcType INSTANCE = new PostgreSQLStructJdbcType( null, null, null );
private static final DateTimeFormatter LOCAL_DATE_TIME; private static final DateTimeFormatter LOCAL_DATE_TIME;
static { static {
@ -89,11 +89,6 @@ public class PostgreSQLStructJdbcType extends PostgreSQLPGObjectJdbcType impleme
private final EmbeddableMappingType embeddableMappingType; private final EmbeddableMappingType embeddableMappingType;
private final ValueExtractor<Object[]> objectArrayExtractor; private final ValueExtractor<Object[]> objectArrayExtractor;
private PostgreSQLStructJdbcType() {
// The default instance is for reading only and will return an Object[]
this( null, null, null );
}
public PostgreSQLStructJdbcType(EmbeddableMappingType embeddableMappingType, String typeName, int[] orderMapping) { public PostgreSQLStructJdbcType(EmbeddableMappingType embeddableMappingType, String typeName, int[] orderMapping) {
super( typeName, SqlTypes.STRUCT ); super( typeName, SqlTypes.STRUCT );
this.embeddableMappingType = embeddableMappingType; this.embeddableMappingType = embeddableMappingType;

View File

@ -122,7 +122,6 @@ public class AggregateWindowEmulationQueryTransformer implements QueryTransforme
columnName, columnName,
false, false,
null, null,
null,
mapping.getJdbcMapping() mapping.getJdbcMapping()
); );
final Expression expression = subSelections.get( i ).getExpression(); final Expression expression = subSelections.get( i ).getExpression();
@ -190,7 +189,6 @@ public class AggregateWindowEmulationQueryTransformer implements QueryTransforme
columnName, columnName,
false, false,
null, null,
null,
jdbcMapping jdbcMapping
); );
final int subValuesPosition = subSelectClause.getSqlSelections().size(); final int subValuesPosition = subSelectClause.getSqlSelections().size();
@ -252,7 +250,6 @@ public class AggregateWindowEmulationQueryTransformer implements QueryTransforme
columnName, columnName,
false, false,
null, null,
null,
jdbcMapping jdbcMapping
); );
final int subValuesPosition = subSelectClause.getSqlSelections().size(); final int subValuesPosition = subSelectClause.getSqlSelections().size();
@ -311,7 +308,6 @@ public class AggregateWindowEmulationQueryTransformer implements QueryTransforme
columnName, columnName,
false, false,
null, null,
null,
jdbcMapping jdbcMapping
); );
final int subValuesPosition = subSelectClause.getSqlSelections().size(); final int subValuesPosition = subSelectClause.getSqlSelections().size();
@ -368,7 +364,6 @@ public class AggregateWindowEmulationQueryTransformer implements QueryTransforme
columnName, columnName,
false, false,
null, null,
null,
mapping.getJdbcMapping() mapping.getJdbcMapping()
) )
); );

View File

@ -13,6 +13,7 @@ import java.util.NoSuchElementException;
import org.hibernate.FetchMode; import org.hibernate.FetchMode;
import org.hibernate.MappingException; import org.hibernate.MappingException;
import org.hibernate.boot.model.relational.Database; import org.hibernate.boot.model.relational.Database;
import org.hibernate.boot.spi.MetadataBuildingContext;
import org.hibernate.engine.spi.Mapping; import org.hibernate.engine.spi.Mapping;
import org.hibernate.internal.util.collections.ArrayHelper; import org.hibernate.internal.util.collections.ArrayHelper;
import org.hibernate.mapping.Column; import org.hibernate.mapping.Column;
@ -186,6 +187,11 @@ public class ExportableColumn extends Column {
public boolean isColumnUpdateable(int index) { public boolean isColumnUpdateable(int index) {
return true; return true;
} }
@Override
public MetadataBuildingContext getBuildingContext() {
return table.getIdentifierValue().getBuildingContext();
}
} }
public static class ColumnIterator implements Iterator<Selectable> { public static class ColumnIterator implements Iterator<Selectable> {

View File

@ -170,6 +170,7 @@ public abstract class Collection implements Fetchable, Value, Filterable {
this.loaderName = original.loaderName; this.loaderName = original.loaderName;
} }
@Override
public MetadataBuildingContext getBuildingContext() { public MetadataBuildingContext getBuildingContext() {
return buildingContext; return buildingContext;
} }

View File

@ -560,6 +560,7 @@ public class Column implements Selectable, Serializable, Cloneable, ColumnTypeIn
return hasCustomRead() ? customRead : getQuotedName( dialect ); return hasCustomRead() ? customRead : getQuotedName( dialect );
} }
@Override
public String getWriteExpr() { public String getWriteExpr() {
return customWrite != null && customWrite.length() > 0 ? customWrite : "?"; return customWrite != null && customWrite.length() > 0 ? customWrite : "?";
} }

View File

@ -51,6 +51,7 @@ public class OneToMany implements Value {
return new OneToMany( this ); return new OneToMany( this );
} }
@Override
public MetadataBuildingContext getBuildingContext() { public MetadataBuildingContext getBuildingContext() {
return buildingContext; return buildingContext;
} }

View File

@ -6,8 +6,12 @@
*/ */
package org.hibernate.mapping; package org.hibernate.mapping;
import org.hibernate.Incubating;
import org.hibernate.dialect.Dialect; import org.hibernate.dialect.Dialect;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.query.sqm.function.SqmFunctionRegistry; import org.hibernate.query.sqm.function.SqmFunctionRegistry;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.spi.TypeConfiguration; import org.hibernate.type.spi.TypeConfiguration;
/** /**
@ -62,4 +66,17 @@ public interface Selectable {
String getAlias(Dialect dialect, Table table); String getAlias(Dialect dialect, Table table);
String getTemplate(Dialect dialect, TypeConfiguration typeConfiguration, SqmFunctionRegistry functionRegistry); String getTemplate(Dialect dialect, TypeConfiguration typeConfiguration, SqmFunctionRegistry functionRegistry);
@Incubating
default String getWriteExpr() {
final String customWriteExpression = getCustomWriteExpression();
return customWriteExpression == null || customWriteExpression.isEmpty()
? "?"
: customWriteExpression;
}
@Incubating
default String getWriteExpr(JdbcMapping jdbcMapping, Dialect dialect) {
return jdbcMapping.getJdbcType().wrapWriteExpression( getWriteExpr(), dialect );
}
} }

View File

@ -142,6 +142,7 @@ public abstract class SimpleValue implements KeyValue {
this.generator = original.generator; this.generator = original.generator;
} }
@Override
public MetadataBuildingContext getBuildingContext() { public MetadataBuildingContext getBuildingContext() {
return buildingContext; return buildingContext;
} }

View File

@ -11,9 +11,15 @@ import java.util.Iterator;
import java.util.List; import java.util.List;
import org.hibernate.FetchMode; import org.hibernate.FetchMode;
import org.hibernate.Incubating;
import org.hibernate.MappingException; import org.hibernate.MappingException;
import org.hibernate.boot.spi.MetadataBuildingContext;
import org.hibernate.engine.spi.Mapping; import org.hibernate.engine.spi.Mapping;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.service.ServiceRegistry; import org.hibernate.service.ServiceRegistry;
import org.hibernate.type.CompositeType;
import org.hibernate.type.EntityType;
import org.hibernate.type.MetaType;
import org.hibernate.type.Type; import org.hibernate.type.Type;
/** /**
@ -72,6 +78,59 @@ public interface Value extends Serializable {
Type getType() throws MappingException; Type getType() throws MappingException;
@Incubating
default JdbcMapping getSelectableType(Mapping factory, int index) throws MappingException {
return getType( factory, getType(), index );
}
private JdbcMapping getType(Mapping factory, Type elementType, int index) {
if ( elementType instanceof CompositeType ) {
final Type[] subtypes = ( (CompositeType) elementType ).getSubtypes();
for ( int i = 0; i < subtypes.length; i++ ) {
final Type subtype = subtypes[i];
final int columnSpan;
if ( subtype instanceof EntityType ) {
final EntityType entityType = (EntityType) subtype;
final Type idType = getIdType( entityType );
columnSpan = idType.getColumnSpan( factory );
}
else {
columnSpan = subtype.getColumnSpan( factory );
}
if ( columnSpan < index ) {
index -= columnSpan;
}
else if ( columnSpan != 0 ) {
return getType( factory, subtype, index );
}
}
// Should never happen
throw new IllegalStateException( "Type index is past the types column span!" );
}
else if ( elementType instanceof EntityType ) {
final EntityType entityType = (EntityType) elementType;
final Type idType = getIdType( entityType );
return getType( factory, idType, index );
}
else if ( elementType instanceof MetaType ) {
return (JdbcMapping) ( (MetaType) elementType ).getBaseType();
}
return (JdbcMapping) elementType;
}
private Type getIdType(EntityType entityType) {
final PersistentClass entityBinding = getBuildingContext().getMetadataCollector()
.getEntityBinding( entityType.getAssociatedEntityName() );
final Type idType;
if ( entityType.isReferenceToPrimaryKey() ) {
idType = entityBinding.getIdentifier().getType();
}
else {
idType = entityBinding.getProperty( entityType.getRHSUniqueKeyPropertyName() ).getType();
}
return idType;
}
FetchMode getFetchMode(); FetchMode getFetchMode();
Table getTable(); Table getTable();
@ -105,6 +164,10 @@ public interface Value extends Serializable {
boolean[] getColumnUpdateability(); boolean[] getColumnUpdateability();
boolean hasAnyUpdatableColumns(); boolean hasAnyUpdatableColumns();
@Incubating
default MetadataBuildingContext getBuildingContext() {
throw new UnsupportedOperationException( "Value#getBuildingContext is not implemented by: " + getClass().getName() );
}
ServiceRegistry getServiceRegistry(); ServiceRegistry getServiceRegistry();
Value copy(); Value copy();

View File

@ -306,7 +306,7 @@ public abstract class AbstractEmbeddableMapping implements EmbeddableMappingType
selectablePath, selectablePath,
selectable.isFormula(), selectable.isFormula(),
selectable.getCustomReadExpression(), selectable.getCustomReadExpression(),
selectable.getCustomWriteExpression(), selectable.getWriteExpr( ( (BasicType<?>) subtype ).getJdbcMapping(), dialect ),
columnDefinition, columnDefinition,
length, length,
precision, precision,

View File

@ -250,6 +250,11 @@ public class BasicAttributeMapping
return customWriteExpression; return customWriteExpression;
} }
@Override
public String getWriteExpression() {
return customWriteExpression;
}
@Override @Override
public String getColumnDefinition() { public String getColumnDefinition() {
return columnDefinition; return columnDefinition;

View File

@ -150,7 +150,6 @@ public class CaseStatementDiscriminatorMappingImpl extends AbstractDiscriminator
tableDiscriminatorDetails.getCheckColumnName(), tableDiscriminatorDetails.getCheckColumnName(),
false, false,
null, null,
null,
getJdbcMapping() getJdbcMapping()
), ),
true true

View File

@ -420,7 +420,7 @@ public class EmbeddableMappingTypeImpl extends AbstractEmbeddableMapping impleme
selectablePath, selectablePath,
selectable.isFormula(), selectable.isFormula(),
selectable.getCustomReadExpression(), selectable.getCustomReadExpression(),
selectable.getCustomWriteExpression(), selectable.getWriteExpr( ( (BasicType<?>) subtype ).getJdbcMapping(), dialect ),
columnDefinition, columnDefinition,
length, length,
precision, precision,

View File

@ -16,6 +16,7 @@ import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.metamodel.mapping.SelectableMapping; import org.hibernate.metamodel.mapping.SelectableMapping;
import org.hibernate.metamodel.mapping.SelectablePath; import org.hibernate.metamodel.mapping.SelectablePath;
import org.hibernate.query.sqm.function.SqmFunctionRegistry; import org.hibernate.query.sqm.function.SqmFunctionRegistry;
import org.hibernate.type.BasicType;
import org.hibernate.type.spi.TypeConfiguration; import org.hibernate.type.spi.TypeConfiguration;
/** /**
@ -57,7 +58,7 @@ public class SelectableMappingImpl extends SqlTypedMappingImpl implements Select
this.selectionExpression = selectionExpression.intern(); this.selectionExpression = selectionExpression.intern();
this.selectablePath = selectablePath == null ? new SelectablePath( selectionExpression ) : selectablePath; this.selectablePath = selectablePath == null ? new SelectablePath( selectionExpression ) : selectablePath;
this.customReadExpression = customReadExpression == null ? null : customReadExpression.intern(); this.customReadExpression = customReadExpression == null ? null : customReadExpression.intern();
this.customWriteExpression = customWriteExpression == null ? null : customWriteExpression.intern(); this.customWriteExpression = customWriteExpression == null || isFormula ? null : customWriteExpression.intern();
this.nullable = nullable; this.nullable = nullable;
this.insertable = insertable; this.insertable = insertable;
this.updateable = updateable; this.updateable = updateable;
@ -160,7 +161,7 @@ public class SelectableMappingImpl extends SqlTypedMappingImpl implements Select
? null ? null
: parentPath.append( selectableName ), : parentPath.append( selectableName ),
selectable.getCustomReadExpression(), selectable.getCustomReadExpression(),
selectable.getCustomWriteExpression(), selectable.getWriteExpr( jdbcMapping, dialect ),
columnDefinition, columnDefinition,
length, length,
precision, precision,
@ -214,6 +215,11 @@ public class SelectableMappingImpl extends SqlTypedMappingImpl implements Select
return customWriteExpression; return customWriteExpression;
} }
@Override
public String getWriteExpression() {
return customWriteExpression;
}
@Override @Override
public boolean isFormula() { public boolean isFormula() {
return isFormula; return isFormula;

View File

@ -67,7 +67,6 @@ public class ColumnReference implements OrderingExpression, SequencePart {
// because these ordering fragments are only ever part of the order-by clause, there // because these ordering fragments are only ever part of the order-by clause, there
// is no need for the JdbcMapping // is no need for the JdbcMapping
null, null,
null,
null null
) )
); );

View File

@ -128,6 +128,7 @@ import org.hibernate.sql.model.jdbc.JdbcMutationOperation;
import org.hibernate.sql.results.graph.DomainResult; import org.hibernate.sql.results.graph.DomainResult;
import org.hibernate.sql.results.graph.internal.ImmutableFetchList; import org.hibernate.sql.results.graph.internal.ImmutableFetchList;
import org.hibernate.sql.results.internal.SqlSelectionImpl; import org.hibernate.sql.results.internal.SqlSelectionImpl;
import org.hibernate.type.AssociationType;
import org.hibernate.type.CollectionType; import org.hibernate.type.CollectionType;
import org.hibernate.type.CompositeType; import org.hibernate.type.CompositeType;
import org.hibernate.type.EntityType; import org.hibernate.type.EntityType;
@ -397,7 +398,7 @@ public abstract class AbstractCollectionPersister
else { else {
Column col = (Column) selectable; Column col = (Column) selectable;
elementColumnNames[j] = col.getQuotedName( dialect ); elementColumnNames[j] = col.getQuotedName( dialect );
elementColumnWriters[j] = col.getWriteExpr(); elementColumnWriters[j] = col.getWriteExpr( elementBootDescriptor.getSelectableType( factory, j ), dialect );
elementColumnReaders[j] = col.getReadExpr( dialect ); elementColumnReaders[j] = col.getReadExpr( dialect );
elementColumnReaderTemplates[j] = col.getTemplate( elementColumnReaderTemplates[j] = col.getTemplate(
dialect, dialect,

View File

@ -662,7 +662,7 @@ public abstract class AbstractEntityPersister
else { else {
final Column column = (Column) selectable; final Column column = (Column) selectable;
colNames[k] = column.getQuotedName( dialect ); colNames[k] = column.getQuotedName( dialect );
colWriters[k] = column.getWriteExpr(); colWriters[k] = column.getWriteExpr( prop.getValue().getSelectableType( factory, k ), dialect );
} }
} }
propertyColumnNames[i] = colNames; propertyColumnNames[i] = colNames;
@ -1326,7 +1326,6 @@ public abstract class AbstractEntityPersister
rootPkColumnName, rootPkColumnName,
false, false,
null, null,
null,
selection.getJdbcMapping() selection.getJdbcMapping()
) )
); );
@ -1339,7 +1338,6 @@ public abstract class AbstractEntityPersister
fkColumnName, fkColumnName,
false, false,
null, null,
null,
selection.getJdbcMapping() selection.getJdbcMapping()
) )
); );
@ -3062,7 +3060,6 @@ public abstract class AbstractEntityPersister
discriminatorExpression, discriminatorExpression,
isDiscriminatorFormula(), isDiscriminatorFormula(),
null, null,
null,
discriminatorType.getJdbcMapping() discriminatorType.getJdbcMapping()
) )
); );
@ -5301,7 +5298,7 @@ public abstract class AbstractEntityPersister
null, null,
false, false,
null, null,
null, "?",
column.getSqlType(), column.getSqlType(),
column.getLength(), column.getLength(),
column.getPrecision(), column.getPrecision(),
@ -5330,7 +5327,7 @@ public abstract class AbstractEntityPersister
attrColumnExpression = attrColumnNames[0]; attrColumnExpression = attrColumnNames[0];
isAttrColumnExpressionFormula = false; isAttrColumnExpressionFormula = false;
customReadExpr = null; customReadExpr = null;
customWriteExpr = null; customWriteExpr = "?";
Column column = value.getColumns().get( 0 ); Column column = value.getColumns().get( 0 );
columnDefinition = column.getSqlType(); columnDefinition = column.getSqlType();
length = column.getLength(); length = column.getLength();
@ -5356,7 +5353,7 @@ public abstract class AbstractEntityPersister
creationContext.getTypeConfiguration(), creationContext.getTypeConfiguration(),
creationContext.getFunctionRegistry() creationContext.getFunctionRegistry()
); );
customWriteExpr = selectable.getCustomWriteExpression(); customWriteExpr = selectable.getWriteExpr( (JdbcMapping) attrType, creationContext.getDialect() );
Column column = value.getColumns().get( 0 ); Column column = value.getColumns().get( 0 );
columnDefinition = column.getSqlType(); columnDefinition = column.getSqlType();
length = column.getLength(); length = column.getLength();

View File

@ -268,7 +268,6 @@ public class CteInsertHandler implements InsertHandler {
rowNumberColumn.getColumnExpression(), rowNumberColumn.getColumnExpression(),
false, false,
null, null,
null,
rowNumberColumn.getJdbcMapping() rowNumberColumn.getJdbcMapping()
); );
insertStatement.getTargetColumns().set( insertStatement.getTargetColumns().set(
@ -322,7 +321,6 @@ public class CteInsertHandler implements InsertHandler {
columnReference.getColumnExpression(), columnReference.getColumnExpression(),
false, false,
null, null,
null,
columnReference.getJdbcMapping() columnReference.getJdbcMapping()
) )
) )
@ -353,7 +351,6 @@ public class CteInsertHandler implements InsertHandler {
rowNumberColumn.getColumnExpression(), rowNumberColumn.getColumnExpression(),
false, false,
null, null,
null,
rowNumberColumn.getJdbcMapping() rowNumberColumn.getJdbcMapping()
); );
insertStatement.getTargetColumns().add( columnReference ); insertStatement.getTargetColumns().add( columnReference );
@ -391,7 +388,6 @@ public class CteInsertHandler implements InsertHandler {
rowNumberColumn.getColumnExpression(), rowNumberColumn.getColumnExpression(),
false, false,
null, null,
null,
rowNumberColumn.getJdbcMapping() rowNumberColumn.getJdbcMapping()
); );
final CteColumn idColumn = fullEntityCteTable.getCteColumns().get( 0 ); final CteColumn idColumn = fullEntityCteTable.getCteColumns().get( 0 );
@ -498,7 +494,6 @@ public class CteInsertHandler implements InsertHandler {
rowNumberColumn.getColumnExpression(), rowNumberColumn.getColumnExpression(),
false, false,
null, null,
null,
rowNumberColumn.getJdbcMapping() rowNumberColumn.getJdbcMapping()
) )
) )
@ -515,7 +510,6 @@ public class CteInsertHandler implements InsertHandler {
idColumn.getColumnExpression(), idColumn.getColumnExpression(),
false, false,
null, null,
null,
idColumn.getJdbcMapping() idColumn.getJdbcMapping()
), ),
BinaryArithmeticOperator.ADD, BinaryArithmeticOperator.ADD,
@ -527,7 +521,6 @@ public class CteInsertHandler implements InsertHandler {
rowNumberColumn.getColumnExpression(), rowNumberColumn.getColumnExpression(),
false, false,
null, null,
null,
rowNumberColumn.getJdbcMapping() rowNumberColumn.getJdbcMapping()
), ),
integerType integerType
@ -560,7 +553,6 @@ public class CteInsertHandler implements InsertHandler {
cteColumn.getColumnExpression(), cteColumn.getColumnExpression(),
false, false,
null, null,
null,
cteColumn.getJdbcMapping() cteColumn.getJdbcMapping()
) )
) )
@ -822,7 +814,6 @@ public class CteInsertHandler implements InsertHandler {
rowNumberColumn.getColumnExpression(), rowNumberColumn.getColumnExpression(),
false, false,
null, null,
null,
rowNumberColumn.getJdbcMapping() rowNumberColumn.getJdbcMapping()
); );
// Insert in the same order as the original tuples came // Insert in the same order as the original tuples came
@ -843,7 +834,6 @@ public class CteInsertHandler implements InsertHandler {
keyColumns[j], keyColumns[j],
false, false,
null, null,
null,
null null
) )
); );
@ -878,7 +868,6 @@ public class CteInsertHandler implements InsertHandler {
rowNumberColumn.getColumnExpression(), rowNumberColumn.getColumnExpression(),
false, false,
null, null,
null,
rowNumberColumn.getJdbcMapping() rowNumberColumn.getJdbcMapping()
) )
) )
@ -897,7 +886,6 @@ public class CteInsertHandler implements InsertHandler {
idCteColumn.getColumnExpression(), idCteColumn.getColumnExpression(),
false, false,
null, null,
null,
idCteColumn.getJdbcMapping() idCteColumn.getJdbcMapping()
) )
) )
@ -914,7 +902,6 @@ public class CteInsertHandler implements InsertHandler {
cteColumn.getColumnExpression(), cteColumn.getColumnExpression(),
false, false,
null, null,
null,
cteColumn.getJdbcMapping() cteColumn.getJdbcMapping()
) )
) )
@ -944,7 +931,6 @@ public class CteInsertHandler implements InsertHandler {
idCteColumn.getColumnExpression(), idCteColumn.getColumnExpression(),
false, false,
null, null,
null,
idCteColumn.getJdbcMapping() idCteColumn.getJdbcMapping()
); );
finalResultQuery.getSelectClause().addSqlSelection( finalResultQuery.getSelectClause().addSqlSelection(
@ -998,7 +984,6 @@ public class CteInsertHandler implements InsertHandler {
keyColumns[j], keyColumns[j],
false, false,
null, null,
null,
null null
) )
); );
@ -1011,7 +996,6 @@ public class CteInsertHandler implements InsertHandler {
rootKeyColumns[j], rootKeyColumns[j],
false, false,
null, null,
null,
null null
) )
) )
@ -1046,7 +1030,6 @@ public class CteInsertHandler implements InsertHandler {
entry.getKey().get( j ).getColumnExpression(), entry.getKey().get( j ).getColumnExpression(),
columnReference.isColumnExpressionFormula(), columnReference.isColumnExpressionFormula(),
null, null,
null,
columnReference.getJdbcMapping() columnReference.getJdbcMapping()
) )
) )

View File

@ -74,7 +74,6 @@ public class InPredicateRestrictionProducer implements MatchingIdRestrictionProd
// id columns cannot be formulas and cannot have custom read and write expressions // id columns cannot be formulas and cannot have custom read and write expressions
false, false,
null, null,
null,
basicIdMapping.getJdbcMapping() basicIdMapping.getJdbcMapping()
); );
predicate = new InListPredicate( inFixture ); predicate = new InListPredicate( inFixture );

View File

@ -452,7 +452,6 @@ public class InlineUpdateHandler implements UpdateHandler {
columnReference.getColumnExpression(), columnReference.getColumnExpression(),
false, false,
null, null,
null,
columnReference.getJdbcMapping() columnReference.getJdbcMapping()
); );
columnNames.add( columnReference.getColumnExpression() ); columnNames.add( columnReference.getColumnExpression() );
@ -463,7 +462,6 @@ public class InlineUpdateHandler implements UpdateHandler {
selectableMapping.getSelectionExpression(), selectableMapping.getSelectionExpression(),
false, false,
null, null,
null,
columnReference.getJdbcMapping() columnReference.getJdbcMapping()
) )
); );
@ -485,7 +483,6 @@ public class InlineUpdateHandler implements UpdateHandler {
columnReference.getColumnExpression(), columnReference.getColumnExpression(),
false, false,
null, null,
null,
columnReference.getJdbcMapping() columnReference.getJdbcMapping()
); );
columnNames = Collections.singletonList( columnReference.getColumnExpression() ); columnNames = Collections.singletonList( columnReference.getColumnExpression() );
@ -497,7 +494,6 @@ public class InlineUpdateHandler implements UpdateHandler {
( (BasicEntityIdentifierMapping) entityDescriptor.getIdentifierMapping() ).getSelectionExpression(), ( (BasicEntityIdentifierMapping) entityDescriptor.getIdentifierMapping() ).getSelectionExpression(),
false, false,
null, null,
null,
columnReference.getJdbcMapping() columnReference.getJdbcMapping()
) )
); );

View File

@ -80,7 +80,6 @@ public final class ExecuteWithTemporaryTableHelper {
// id columns cannot be formulas and cannot have custom read and write expressions // id columns cannot be formulas and cannot have custom read and write expressions
false, false,
null, null,
null,
column.getJdbcMapping() column.getJdbcMapping()
) )
); );
@ -230,7 +229,6 @@ public final class ExecuteWithTemporaryTableHelper {
temporaryTableColumn.getColumnName(), temporaryTableColumn.getColumnName(),
false, false,
null, null,
null,
temporaryTableColumn.getJdbcMapping() temporaryTableColumn.getJdbcMapping()
) )
) )
@ -250,7 +248,6 @@ public final class ExecuteWithTemporaryTableHelper {
selectableMapping.getSelectionExpression(), selectableMapping.getSelectionExpression(),
false, false,
null, null,
null,
selectableMapping.getJdbcMapping() selectableMapping.getJdbcMapping()
) )
) )
@ -274,7 +271,6 @@ public final class ExecuteWithTemporaryTableHelper {
idTable.getSessionUidColumn().getColumnName(), idTable.getSessionUidColumn().getColumnName(),
false, false,
null, null,
null,
idTable.getSessionUidColumn().getJdbcMapping() idTable.getSessionUidColumn().getJdbcMapping()
), ),
ComparisonOperator.EQUAL, ComparisonOperator.EQUAL,

View File

@ -22,7 +22,6 @@ import org.hibernate.engine.FetchTiming;
import org.hibernate.engine.jdbc.spi.JdbcServices; import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.engine.spi.SessionFactoryImplementor; import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor; import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.generator.EventType;
import org.hibernate.id.BulkInsertionCapableIdentifierGenerator; import org.hibernate.id.BulkInsertionCapableIdentifierGenerator;
import org.hibernate.id.OptimizableGenerator; import org.hibernate.id.OptimizableGenerator;
import org.hibernate.id.PostInsertIdentityPersister; import org.hibernate.id.PostInsertIdentityPersister;
@ -348,7 +347,6 @@ public class InsertExecutionDelegate implements TableBasedInsertHandler.Executio
columnReference.getColumnExpression(), columnReference.getColumnExpression(),
false, false,
null, null,
null,
columnReference.getJdbcMapping() columnReference.getJdbcMapping()
) )
) )
@ -369,7 +367,6 @@ public class InsertExecutionDelegate implements TableBasedInsertHandler.Executio
TemporaryTable.ENTITY_TABLE_IDENTITY_COLUMN, TemporaryTable.ENTITY_TABLE_IDENTITY_COLUMN,
false, false,
null, null,
null,
identifierMapping.getJdbcMapping() identifierMapping.getJdbcMapping()
); );
idSelectQuerySpec.getSelectClause() idSelectQuerySpec.getSelectClause()
@ -453,7 +450,6 @@ public class InsertExecutionDelegate implements TableBasedInsertHandler.Executio
sessionUidColumn.getColumnName(), sessionUidColumn.getColumnName(),
false, false,
null, null,
null,
sessionUidColumn.getJdbcMapping() sessionUidColumn.getJdbcMapping()
), ),
ComparisonOperator.EQUAL, ComparisonOperator.EQUAL,
@ -470,7 +466,6 @@ public class InsertExecutionDelegate implements TableBasedInsertHandler.Executio
rowNumberColumn.getColumnName(), rowNumberColumn.getColumnName(),
false, false,
null, null,
null,
rowNumberColumn.getJdbcMapping() rowNumberColumn.getJdbcMapping()
), ),
ComparisonOperator.EQUAL, ComparisonOperator.EQUAL,
@ -529,7 +524,6 @@ public class InsertExecutionDelegate implements TableBasedInsertHandler.Executio
keyColumns[0], keyColumns[0],
false, false,
null, null,
null,
identifierMapping.getJdbcMapping() identifierMapping.getJdbcMapping()
) )
); );
@ -542,7 +536,6 @@ public class InsertExecutionDelegate implements TableBasedInsertHandler.Executio
idColumnReference.getColumnExpression(), idColumnReference.getColumnExpression(),
false, false,
null, null,
null,
idColumnReference.getJdbcMapping() idColumnReference.getJdbcMapping()
) )
) )
@ -599,7 +592,6 @@ public class InsertExecutionDelegate implements TableBasedInsertHandler.Executio
TemporaryTable.ENTITY_TABLE_IDENTITY_COLUMN, TemporaryTable.ENTITY_TABLE_IDENTITY_COLUMN,
false, false,
null, null,
null,
identifierMapping.getJdbcMapping() identifierMapping.getJdbcMapping()
), ),
ComparisonOperator.EQUAL, ComparisonOperator.EQUAL,
@ -706,7 +698,6 @@ public class InsertExecutionDelegate implements TableBasedInsertHandler.Executio
columnReference.getColumnExpression(), columnReference.getColumnExpression(),
false, false,
null, null,
null,
columnReference.getJdbcMapping() columnReference.getJdbcMapping()
) )
) )
@ -740,7 +731,6 @@ public class InsertExecutionDelegate implements TableBasedInsertHandler.Executio
targetKeyColumnName, targetKeyColumnName,
false, false,
null, null,
null,
identifierMapping.getJdbcMapping() identifierMapping.getJdbcMapping()
) )
); );

View File

@ -189,7 +189,6 @@ public class TableBasedInsertHandler implements InsertHandler {
rowNumberColumn.getColumnName(), rowNumberColumn.getColumnName(),
false, false,
null, null,
null,
rowNumberColumn.getJdbcMapping() rowNumberColumn.getJdbcMapping()
); );
insertStatement.getTargetColumns().set( insertStatement.getTargetColumns().set(
@ -214,7 +213,6 @@ public class TableBasedInsertHandler implements InsertHandler {
rowNumberColumn.getColumnName(), rowNumberColumn.getColumnName(),
false, false,
null, null,
null,
rowNumberColumn.getJdbcMapping() rowNumberColumn.getJdbcMapping()
); );
insertStatement.getTargetColumns().add( columnReference ); insertStatement.getTargetColumns().add( columnReference );
@ -237,7 +235,6 @@ public class TableBasedInsertHandler implements InsertHandler {
sessionUidColumn.getColumnName(), sessionUidColumn.getColumnName(),
false, false,
null, null,
null,
sessionUidColumn.getJdbcMapping() sessionUidColumn.getJdbcMapping()
); );
insertStatement.getTargetColumns().add( sessionUidColumnReference ); insertStatement.getTargetColumns().add( sessionUidColumnReference );
@ -267,7 +264,6 @@ public class TableBasedInsertHandler implements InsertHandler {
rowNumberColumn.getColumnName(), rowNumberColumn.getColumnName(),
false, false,
null, null,
null,
rowNumberColumn.getJdbcMapping() rowNumberColumn.getJdbcMapping()
); );
insertStatement.getTargetColumns().add( columnReference ); insertStatement.getTargetColumns().add( columnReference );
@ -286,7 +282,6 @@ public class TableBasedInsertHandler implements InsertHandler {
sessionUidColumn.getColumnName(), sessionUidColumn.getColumnName(),
false, false,
null, null,
null,
sessionUidColumn.getJdbcMapping() sessionUidColumn.getJdbcMapping()
); );
insertStatement.getTargetColumns().add( sessionUidColumnReference ); insertStatement.getTargetColumns().add( sessionUidColumnReference );

View File

@ -96,6 +96,8 @@ import org.hibernate.metamodel.model.domain.EntityDomainType;
import org.hibernate.metamodel.model.domain.PluralPersistentAttribute; import org.hibernate.metamodel.model.domain.PluralPersistentAttribute;
import org.hibernate.metamodel.model.domain.internal.AnyDiscriminatorSqmPath; import org.hibernate.metamodel.model.domain.internal.AnyDiscriminatorSqmPath;
import org.hibernate.metamodel.model.domain.internal.AnyDiscriminatorSqmPathSource; import org.hibernate.metamodel.model.domain.internal.AnyDiscriminatorSqmPathSource;
import org.hibernate.query.derived.AnonymousTupleTableGroupProducer;
import org.hibernate.query.derived.AnonymousTupleType;
import org.hibernate.metamodel.model.domain.internal.BasicSqmPathSource; import org.hibernate.metamodel.model.domain.internal.BasicSqmPathSource;
import org.hibernate.metamodel.model.domain.internal.CompositeSqmPathSource; import org.hibernate.metamodel.model.domain.internal.CompositeSqmPathSource;
import org.hibernate.metamodel.model.domain.internal.DiscriminatorSqmPath; import org.hibernate.metamodel.model.domain.internal.DiscriminatorSqmPath;
@ -113,8 +115,6 @@ import org.hibernate.query.criteria.JpaCteCriteriaAttribute;
import org.hibernate.query.criteria.JpaPath; import org.hibernate.query.criteria.JpaPath;
import org.hibernate.query.criteria.JpaSearchOrder; import org.hibernate.query.criteria.JpaSearchOrder;
import org.hibernate.query.derived.AnonymousTupleEntityValuedModelPart; import org.hibernate.query.derived.AnonymousTupleEntityValuedModelPart;
import org.hibernate.query.derived.AnonymousTupleTableGroupProducer;
import org.hibernate.query.derived.AnonymousTupleType;
import org.hibernate.query.spi.QueryEngine; import org.hibernate.query.spi.QueryEngine;
import org.hibernate.query.spi.QueryOptions; import org.hibernate.query.spi.QueryOptions;
import org.hibernate.query.spi.QueryParameterBinding; import org.hibernate.query.spi.QueryParameterBinding;
@ -4611,7 +4611,6 @@ public abstract class BaseSqmToSqlAstConverter<T extends Statement> extends Base
columnNames.get( i ), columnNames.get( i ),
false, false,
null, null,
null,
subQueryColumns.get( i ).getJdbcMapping() subQueryColumns.get( i ).getJdbcMapping()
) )
); );
@ -4654,7 +4653,6 @@ public abstract class BaseSqmToSqlAstConverter<T extends Statement> extends Base
columnNames.get( 0 ), columnNames.get( 0 ),
false, false,
null, null,
null,
expression.getExpressionType().getSingleJdbcMapping() expression.getExpressionType().getSingleJdbcMapping()
) )
); );
@ -4757,7 +4755,6 @@ public abstract class BaseSqmToSqlAstConverter<T extends Statement> extends Base
tableReference.getColumnNames().get( 0 ), tableReference.getColumnNames().get( 0 ),
false, false,
null, null,
null,
sqlSelections.get( 0 ).getExpressionType().getSingleJdbcMapping() sqlSelections.get( 0 ).getExpressionType().getSingleJdbcMapping()
) )
), ),
@ -4774,7 +4771,6 @@ public abstract class BaseSqmToSqlAstConverter<T extends Statement> extends Base
tableReference.getColumnNames().get( selectionIndex ), tableReference.getColumnNames().get( selectionIndex ),
false, false,
null, null,
null,
selectionMapping.getJdbcMapping() selectionMapping.getJdbcMapping()
) )
) )

View File

@ -147,12 +147,23 @@ public class SqlAstQueryPartProcessingStateImpl
final SelectClause selectClause = ( (QuerySpec) queryPart ).getSelectClause(); final SelectClause selectClause = ( (QuerySpec) queryPart ).getSelectClause();
final int valuesArrayPosition = selectClause.getSqlSelections().size(); final int valuesArrayPosition = selectClause.getSqlSelections().size();
final SqlSelection sqlSelection = expression.createSqlSelection( final SqlSelection sqlSelection;
if ( isTopLevel() ) {
sqlSelection = expression.createDomainResultSqlSelection(
valuesArrayPosition + 1, valuesArrayPosition + 1,
valuesArrayPosition, valuesArrayPosition,
javaType, javaType,
typeConfiguration typeConfiguration
); );
}
else {
sqlSelection = expression.createSqlSelection(
valuesArrayPosition + 1,
valuesArrayPosition,
javaType,
typeConfiguration
);
}
selectClause.addSqlSelection( sqlSelection ); selectClause.addSqlSelection( sqlSelection );

View File

@ -1977,7 +1977,6 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
currentCteStatement.getCycleMarkColumn().getColumnExpression(), currentCteStatement.getCycleMarkColumn().getColumnExpression(),
false, false,
null, null,
null,
currentCteStatement.getCycleMarkColumn().getJdbcMapping() currentCteStatement.getCycleMarkColumn().getJdbcMapping()
); );
if ( currentCteStatement.getCycleValue().getJdbcMapping() == getBooleanType() if ( currentCteStatement.getCycleValue().getJdbcMapping() == getBooleanType()
@ -2020,7 +2019,6 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
depthColumnName, depthColumnName,
false, false,
null, null,
null,
integerType integerType
); );
visitColumnReference( depthColumnReference ); visitColumnReference( depthColumnReference );
@ -2052,7 +2050,6 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
currentCteStatement.getSearchColumn().getColumnExpression(), currentCteStatement.getSearchColumn().getColumnExpression(),
false, false,
null, null,
null,
currentCteStatement.getSearchColumn().getJdbcMapping() currentCteStatement.getSearchColumn().getJdbcMapping()
) )
); );
@ -2164,7 +2161,6 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
depthColumnName, depthColumnName,
false, false,
null, null,
null,
integerType integerType
); );
visitColumnReference( depthColumnReference ); visitColumnReference( depthColumnReference );
@ -2208,7 +2204,6 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
currentCteStatement.getSearchColumn().getColumnExpression(), currentCteStatement.getSearchColumn().getColumnExpression(),
false, false,
null, null,
null,
currentCteStatement.getSearchColumn().getJdbcMapping() currentCteStatement.getSearchColumn().getJdbcMapping()
) )
); );
@ -2353,7 +2348,6 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
cyclePathColumnName, cyclePathColumnName,
false, false,
null, null,
null,
stringType stringType
); );
@ -2500,7 +2494,6 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
cyclePathColumnName, cyclePathColumnName,
false, false,
null, null,
null,
stringType stringType
); );
arguments.add( new QueryLiteral<>( "%", stringType ) ); arguments.add( new QueryLiteral<>( "%", stringType ) );
@ -2919,7 +2912,6 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
"c" + i, "c" + i,
false, false,
null, null,
null,
getIntegerType() getIntegerType()
) )
) )
@ -3144,7 +3136,6 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
"c" + index, "c" + index,
false, false,
null, null,
null,
expression.getExpressionType().getSingleJdbcMapping() expression.getExpressionType().getSingleJdbcMapping()
); );
} }
@ -5608,7 +5599,6 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
columnName, columnName,
false, false,
null, null,
null,
null null
) )
); );
@ -5678,7 +5668,6 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
columnName, columnName,
false, false,
null, null,
null,
null null
) )
); );
@ -5803,7 +5792,6 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
"sort_col_" + i, "sort_col_" + i,
false, false,
null, null,
null,
null null
); );
sortExpression = sortSpecification.getSortExpression(); sortExpression = sortSpecification.getSortExpression();
@ -6193,7 +6181,11 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
break; break;
case DEFAULT: case DEFAULT:
default: default:
appendSql( PARAM_MARKER ); jdbcParameter.getExpressionType()
.getJdbcMappings()
.get( 0 )
.getJdbcType()
.appendWriteExpression( "?", this, getDialect() );
parameterBinders.add( jdbcParameter.getParameterBinder() ); parameterBinders.add( jdbcParameter.getParameterBinder() );
jdbcParameters.addParameter( jdbcParameter ); jdbcParameters.addParameter( jdbcParameter );

View File

@ -18,4 +18,8 @@ public interface SqlAstProcessingState {
SqlExpressionResolver getSqlExpressionResolver(); SqlExpressionResolver getSqlExpressionResolver();
SqlAstCreationState getSqlAstCreationState(); SqlAstCreationState getSqlAstCreationState();
default boolean isTopLevel() {//todo: naming
return getParentState() == null;
}
} }

View File

@ -37,7 +37,6 @@ public class ColumnReference implements Expression, Assignable {
private final SelectablePath selectablePath; private final SelectablePath selectablePath;
private final boolean isFormula; private final boolean isFormula;
private final String readExpression; private final String readExpression;
private final String customWriteExpression;
private final JdbcMapping jdbcMapping; private final JdbcMapping jdbcMapping;
public ColumnReference(TableReference tableReference, SelectableMapping selectableMapping) { public ColumnReference(TableReference tableReference, SelectableMapping selectableMapping) {
@ -47,7 +46,6 @@ public class ColumnReference implements Expression, Assignable {
selectableMapping.getSelectablePath(), selectableMapping.getSelectablePath(),
selectableMapping.isFormula(), selectableMapping.isFormula(),
selectableMapping.getCustomReadExpression(), selectableMapping.getCustomReadExpression(),
selectableMapping.getCustomWriteExpression(),
selectableMapping.getJdbcMapping() selectableMapping.getJdbcMapping()
); );
} }
@ -59,7 +57,6 @@ public class ColumnReference implements Expression, Assignable {
null, null,
false, false,
null, null,
null,
jdbcMapping jdbcMapping
); );
} }
@ -71,7 +68,6 @@ public class ColumnReference implements Expression, Assignable {
selectableMapping.getSelectablePath(), selectableMapping.getSelectablePath(),
selectableMapping.isFormula(), selectableMapping.isFormula(),
selectableMapping.getCustomReadExpression(), selectableMapping.getCustomReadExpression(),
selectableMapping.getCustomWriteExpression(),
selectableMapping.getJdbcMapping() selectableMapping.getJdbcMapping()
); );
} }
@ -83,7 +79,6 @@ public class ColumnReference implements Expression, Assignable {
selectableMapping.getSelectablePath(), selectableMapping.getSelectablePath(),
selectableMapping.isFormula(), selectableMapping.isFormula(),
selectableMapping.getCustomReadExpression(), selectableMapping.getCustomReadExpression(),
selectableMapping.getCustomWriteExpression(),
jdbcMapping jdbcMapping
); );
} }
@ -93,7 +88,6 @@ public class ColumnReference implements Expression, Assignable {
String columnExpression, String columnExpression,
boolean isFormula, boolean isFormula,
String customReadExpression, String customReadExpression,
String customWriteExpression,
JdbcMapping jdbcMapping) { JdbcMapping jdbcMapping) {
this( this(
tableReference.getIdentificationVariable(), tableReference.getIdentificationVariable(),
@ -101,7 +95,6 @@ public class ColumnReference implements Expression, Assignable {
null, null,
isFormula, isFormula,
customReadExpression, customReadExpression,
customWriteExpression,
jdbcMapping jdbcMapping
); );
} }
@ -111,9 +104,8 @@ public class ColumnReference implements Expression, Assignable {
String columnExpression, String columnExpression,
boolean isFormula, boolean isFormula,
String customReadExpression, String customReadExpression,
String customWriteExpression,
JdbcMapping jdbcMapping) { JdbcMapping jdbcMapping) {
this( qualifier, columnExpression, null, isFormula, customReadExpression, customWriteExpression, jdbcMapping ); this( qualifier, columnExpression, null, isFormula, customReadExpression, jdbcMapping );
} }
public ColumnReference( public ColumnReference(
@ -122,7 +114,6 @@ public class ColumnReference implements Expression, Assignable {
SelectablePath selectablePath, SelectablePath selectablePath,
boolean isFormula, boolean isFormula,
String customReadExpression, String customReadExpression,
String customWriteExpression,
JdbcMapping jdbcMapping) { JdbcMapping jdbcMapping) {
this.qualifier = StringHelper.nullIfEmpty( qualifier ); this.qualifier = StringHelper.nullIfEmpty( qualifier );
@ -142,15 +133,6 @@ public class ColumnReference implements Expression, Assignable {
this.isFormula = isFormula; this.isFormula = isFormula;
this.readExpression = customReadExpression; this.readExpression = customReadExpression;
//TODO: writeExpression is never used, can it be removed?
if ( !isFormula && customWriteExpression != null ) {
this.customWriteExpression = customWriteExpression;
}
else {
this.customWriteExpression = null;
}
this.jdbcMapping = jdbcMapping; this.jdbcMapping = jdbcMapping;
} }
@ -175,10 +157,6 @@ public class ColumnReference implements Expression, Assignable {
return selectablePath; return selectablePath;
} }
public String getCustomWriteExpression() {
return customWriteExpression;
}
public boolean isColumnExpressionFormula() { public boolean isColumnExpressionFormula() {
return isFormula; return isFormula;
} }

View File

@ -42,4 +42,23 @@ public interface Expression extends SqlAstNode, SqlSelectionProducer {
this this
); );
} }
default SqlSelection createDomainResultSqlSelection(
int jdbcPosition,
int valuesArrayPosition,
JavaType javaType,
TypeConfiguration typeConfiguration) {
// Apply possible jdbc type wrapping
final Expression expression;
final JdbcMappingContainer expressionType = getExpressionType();
if ( expressionType == null ) {
expression = this;
}
else {
expression = expressionType.getJdbcMappings().get( 0 ).getJdbcType().wrapTopLevelSelectionExpression( this );
}
return expression == this
? createSqlSelection( jdbcPosition, valuesArrayPosition, javaType, typeConfiguration )
: new SqlSelectionImpl( jdbcPosition, valuesArrayPosition, expression );
}
} }

View File

@ -15,11 +15,13 @@ import org.hibernate.Incubating;
import org.hibernate.dialect.Dialect; import org.hibernate.dialect.Dialect;
import org.hibernate.engine.jdbc.Size; import org.hibernate.engine.jdbc.Size;
import org.hibernate.query.sqm.CastType; import org.hibernate.query.sqm.CastType;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.sql.ast.spi.StringBuilderSqlAppender;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.type.SqlTypes; import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.ValueBinder; import org.hibernate.type.descriptor.ValueBinder;
import org.hibernate.type.descriptor.ValueExtractor; import org.hibernate.type.descriptor.ValueExtractor;
import org.hibernate.type.descriptor.WrapperOptions; import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.java.BasicJavaType;
import org.hibernate.type.descriptor.java.JavaType; import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.sql.spi.DdlTypeRegistry; import org.hibernate.type.descriptor.sql.spi.DdlTypeRegistry;
import org.hibernate.type.spi.TypeConfiguration; import org.hibernate.type.spi.TypeConfiguration;
@ -160,6 +162,35 @@ public interface JdbcType extends Serializable {
return null; return null;
} }
/**
* Wraps the top level selection expression to be able to read values with this JdbcType's ValueExtractor.
* @since 6.2
*/
@Incubating
default Expression wrapTopLevelSelectionExpression(Expression expression) {
return expression;
}
/**
* Wraps the write expression to be able to write values with this JdbcType's ValueBinder.
* @since 6.2
*/
@Incubating
default String wrapWriteExpression(String writeExpression, Dialect dialect) {
final StringBuilder sb = new StringBuilder( writeExpression.length() );
appendWriteExpression( writeExpression, new StringBuilderSqlAppender( sb ), dialect );
return sb.toString();
}
/**
* Append the write expression wrapped in a way to be able to write values with this JdbcType's ValueBinder.
* @since 6.2
*/
@Incubating
default void appendWriteExpression(String writeExpression, SqlAppender appender, Dialect dialect) {
appender.append( writeExpression );
}
default boolean isInteger() { default boolean isInteger() {
int typeCode = getDdlTypeCode(); int typeCode = getDdlTypeCode();
return SqlTypes.isIntegral(typeCode) return SqlTypes.isIntegral(typeCode)

View File

@ -0,0 +1,170 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.type.descriptor.jdbc;
import java.sql.CallableStatement;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.metamodel.spi.RuntimeModelCreationContext;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.ValueBinder;
import org.hibernate.type.descriptor.ValueExtractor;
import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.sql.spi.DdlTypeRegistry;
/**
* Specialized type mapping for {@code JSON} and the JSON SQL data type.
*
* @author Christian Beikov
*/
public class JsonAsStringJdbcType extends JsonJdbcType implements AdjustableJdbcType {
/**
* Singleton access
*/
public static final JsonAsStringJdbcType VARCHAR_INSTANCE = new JsonAsStringJdbcType( SqlTypes.LONG32VARCHAR, null );
public static final JsonAsStringJdbcType NVARCHAR_INSTANCE = new JsonAsStringJdbcType( SqlTypes.LONG32NVARCHAR, null );
public static final JsonAsStringJdbcType CLOB_INSTANCE = new JsonAsStringJdbcType( SqlTypes.CLOB, null );
public static final JsonAsStringJdbcType NCLOB_INSTANCE = new JsonAsStringJdbcType( SqlTypes.NCLOB, null );
private final boolean nationalized;
private final int ddlTypeCode;
protected JsonAsStringJdbcType(int ddlTypeCode, EmbeddableMappingType embeddableMappingType) {
super( embeddableMappingType );
this.ddlTypeCode = ddlTypeCode;
this.nationalized = ddlTypeCode == SqlTypes.LONG32NVARCHAR
|| ddlTypeCode == SqlTypes.NCLOB;
}
@Override
public int getJdbcTypeCode() {
return nationalized ? SqlTypes.NVARCHAR : SqlTypes.VARCHAR;
}
@Override
public int getDdlTypeCode() {
return ddlTypeCode;
}
@Override
public String toString() {
return "JsonAsStringJdbcType";
}
@Override
public JdbcType resolveIndicatedType(JdbcTypeIndicators indicators, JavaType<?> domainJtd) {
// Depending on the size of the column, we might have to adjust the jdbc type code for DDL.
// In some DBMS we can compare LOBs with special functions which is handled in the SqlAstTranslators,
// but that requires the correct jdbc type code to be available, which we ensure this way
if ( getEmbeddableMappingType() == null ) {
if ( needsLob( indicators ) ) {
return indicators.isNationalized() ? NCLOB_INSTANCE : CLOB_INSTANCE;
}
else {
return indicators.isNationalized() ? NVARCHAR_INSTANCE : VARCHAR_INSTANCE;
}
}
else {
if ( needsLob( indicators ) ) {
return new JsonAsStringJdbcType(
indicators.isNationalized() ? SqlTypes.NCLOB : SqlTypes.CLOB,
getEmbeddableMappingType()
);
}
else {
return new JsonAsStringJdbcType(
indicators.isNationalized() ? SqlTypes.LONG32NVARCHAR : SqlTypes.LONG32VARCHAR,
getEmbeddableMappingType()
);
}
}
}
protected boolean needsLob(JdbcTypeIndicators indicators) {
final Dialect dialect = indicators.getTypeConfiguration()
.getServiceRegistry()
.getService( JdbcServices.class )
.getDialect();
final long length = indicators.getColumnLength();
final long maxLength = indicators.isNationalized() ?
dialect.getMaxNVarcharLength() :
dialect.getMaxVarcharLength();
if ( length > maxLength ) {
return true;
}
final DdlTypeRegistry ddlTypeRegistry = indicators.getTypeConfiguration().getDdlTypeRegistry();
final String typeName = ddlTypeRegistry.getTypeName( getDdlTypeCode(), dialect );
return typeName.equals( ddlTypeRegistry.getTypeName( SqlTypes.CLOB, dialect ) )
|| typeName.equals( ddlTypeRegistry.getTypeName( SqlTypes.NCLOB, dialect ) );
}
@Override
public AggregateJdbcType resolveAggregateJdbcType(
EmbeddableMappingType mappingType,
String sqlType,
RuntimeModelCreationContext creationContext) {
return new JsonAsStringJdbcType( ddlTypeCode, mappingType );
}
@Override
public <X> ValueBinder<X> getBinder(JavaType<X> javaType) {
if ( nationalized ) {
return new BasicBinder<>( javaType, this ) {
@Override
protected void doBind(PreparedStatement st, X value, int index, WrapperOptions options)
throws SQLException {
final String json = ( (JsonAsStringJdbcType) getJdbcType() ).toString( value, getJavaType(), options );
st.setNString( index, json );
}
@Override
protected void doBind(CallableStatement st, X value, String name, WrapperOptions options)
throws SQLException {
final String json = ( (JsonAsStringJdbcType) getJdbcType() ).toString( value, getJavaType(), options );
st.setNString( name, json );
}
};
}
else {
return super.getBinder( javaType );
}
}
@Override
public <X> ValueExtractor<X> getExtractor(JavaType<X> javaType) {
if ( nationalized ) {
return new BasicExtractor<>( javaType, this ) {
@Override
protected X doExtract(ResultSet rs, int paramIndex, WrapperOptions options) throws SQLException {
return fromString( rs.getNString( paramIndex ), getJavaType(), options );
}
@Override
protected X doExtract(CallableStatement statement, int index, WrapperOptions options)
throws SQLException {
return fromString( statement.getNString( index ), getJavaType(), options );
}
@Override
protected X doExtract(CallableStatement statement, String name, WrapperOptions options)
throws SQLException {
return fromString( statement.getNString( name ), getJavaType(), options );
}
};
}
else {
return super.getExtractor( javaType );
}
}
}

View File

@ -11,6 +11,8 @@ import java.sql.PreparedStatement;
import java.sql.ResultSet; import java.sql.ResultSet;
import java.sql.SQLException; import java.sql.SQLException;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.metamodel.mapping.EmbeddableMappingType; import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.metamodel.spi.RuntimeModelCreationContext; import org.hibernate.metamodel.spi.RuntimeModelCreationContext;
import org.hibernate.type.SqlTypes; import org.hibernate.type.SqlTypes;
@ -18,20 +20,30 @@ import org.hibernate.type.descriptor.ValueBinder;
import org.hibernate.type.descriptor.ValueExtractor; import org.hibernate.type.descriptor.ValueExtractor;
import org.hibernate.type.descriptor.WrapperOptions; import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.java.JavaType; import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.sql.spi.DdlTypeRegistry;
/** /**
* Specialized type mapping for {@code SQLXML} and the XML SQL data type. * Specialized type mapping for {@code SQLXML} and the XML SQL data type.
* *
* @author Christian Beikov * @author Christian Beikov
*/ */
public class XmlAsStringJdbcType extends XmlJdbcType { public class XmlAsStringJdbcType extends XmlJdbcType implements AdjustableJdbcType {
/** /**
* Singleton access * Singleton access
*/ */
public static final XmlAsStringJdbcType INSTANCE = new XmlAsStringJdbcType( null ); public static final XmlAsStringJdbcType VARCHAR_INSTANCE = new XmlAsStringJdbcType( SqlTypes.LONG32VARCHAR, null );
public static final XmlAsStringJdbcType NVARCHAR_INSTANCE = new XmlAsStringJdbcType( SqlTypes.LONG32NVARCHAR, null );
public static final XmlAsStringJdbcType CLOB_INSTANCE = new XmlAsStringJdbcType( SqlTypes.CLOB, null );
public static final XmlAsStringJdbcType NCLOB_INSTANCE = new XmlAsStringJdbcType( SqlTypes.NCLOB, null );
private XmlAsStringJdbcType(EmbeddableMappingType embeddableMappingType) { private final boolean nationalized;
private final int ddlTypeCode;
public XmlAsStringJdbcType(int ddlTypeCode, EmbeddableMappingType embeddableMappingType) {
super( embeddableMappingType ); super( embeddableMappingType );
this.ddlTypeCode = ddlTypeCode;
this.nationalized = ddlTypeCode == SqlTypes.LONG32NVARCHAR
|| ddlTypeCode == SqlTypes.NCLOB;
} }
@Override @Override
@ -39,12 +51,12 @@ public class XmlAsStringJdbcType extends XmlJdbcType {
EmbeddableMappingType mappingType, EmbeddableMappingType mappingType,
String sqlType, String sqlType,
RuntimeModelCreationContext creationContext) { RuntimeModelCreationContext creationContext) {
return new XmlAsStringJdbcType( mappingType ); return new XmlAsStringJdbcType( ddlTypeCode, mappingType );
} }
@Override @Override
public int getJdbcTypeCode() { public int getJdbcTypeCode() {
return SqlTypes.VARCHAR; return nationalized ? SqlTypes.NVARCHAR : SqlTypes.VARCHAR;
} }
@Override @Override
@ -52,13 +64,92 @@ public class XmlAsStringJdbcType extends XmlJdbcType {
return SqlTypes.SQLXML; return SqlTypes.SQLXML;
} }
@Override
public int getDdlTypeCode() {
return ddlTypeCode;
}
@Override @Override
public String toString() { public String toString() {
return "XmlAsStringJdbcType"; return "XmlAsStringJdbcType";
} }
@Override
public JdbcType resolveIndicatedType(JdbcTypeIndicators indicators, JavaType<?> domainJtd) {
// Depending on the size of the column, we might have to adjust the jdbc type code for DDL.
// In some DBMS we can compare LOBs with special functions which is handled in the SqlAstTranslators,
// but that requires the correct jdbc type code to be available, which we ensure this way
if ( getEmbeddableMappingType() == null ) {
if ( needsLob( indicators ) ) {
return indicators.isNationalized() ? NCLOB_INSTANCE : CLOB_INSTANCE;
}
else {
return indicators.isNationalized() ? NVARCHAR_INSTANCE : VARCHAR_INSTANCE;
}
}
else {
if ( needsLob( indicators ) ) {
return new XmlAsStringJdbcType(
indicators.isNationalized() ? SqlTypes.NCLOB : SqlTypes.CLOB,
getEmbeddableMappingType()
);
}
else {
return new XmlAsStringJdbcType(
indicators.isNationalized() ? SqlTypes.LONG32NVARCHAR : SqlTypes.LONG32VARCHAR,
getEmbeddableMappingType()
);
}
}
}
protected boolean needsLob(JdbcTypeIndicators indicators) {
final Dialect dialect = indicators.getTypeConfiguration()
.getServiceRegistry()
.getService( JdbcServices.class )
.getDialect();
final long length = indicators.getColumnLength();
final long maxLength = indicators.isNationalized() ?
dialect.getMaxNVarcharLength() :
dialect.getMaxVarcharLength();
if ( length > maxLength ) {
return true;
}
final DdlTypeRegistry ddlTypeRegistry = indicators.getTypeConfiguration().getDdlTypeRegistry();
final String typeName = ddlTypeRegistry.getTypeName( getDdlTypeCode(), dialect );
return typeName.equals( ddlTypeRegistry.getTypeName( SqlTypes.CLOB, dialect ) )
|| typeName.equals( ddlTypeRegistry.getTypeName( SqlTypes.NCLOB, dialect ) );
}
@Override @Override
public <X> ValueBinder<X> getBinder(JavaType<X> javaType) { public <X> ValueBinder<X> getBinder(JavaType<X> javaType) {
if ( nationalized ) {
return new BasicBinder<>( javaType, this ) {
@Override
protected void doBind(PreparedStatement st, X value, int index, WrapperOptions options)
throws SQLException {
final String xml = ( (XmlAsStringJdbcType) getJdbcType() ).toString(
value,
getJavaType(),
options
);
st.setNString( index, xml );
}
@Override
protected void doBind(CallableStatement st, X value, String name, WrapperOptions options)
throws SQLException {
final String xml = ( (XmlAsStringJdbcType) getJdbcType() ).toString(
value,
getJavaType(),
options
);
st.setNString( name, xml );
}
};
}
else {
return new BasicBinder<>( javaType, this ) { return new BasicBinder<>( javaType, this ) {
@Override @Override
protected void doBind(PreparedStatement st, X value, int index, WrapperOptions options) protected void doBind(PreparedStatement st, X value, int index, WrapperOptions options)
@ -83,9 +174,43 @@ public class XmlAsStringJdbcType extends XmlJdbcType {
} }
}; };
} }
}
@Override @Override
public <X> ValueExtractor<X> getExtractor(JavaType<X> javaType) { public <X> ValueExtractor<X> getExtractor(JavaType<X> javaType) {
if ( nationalized ) {
return new BasicExtractor<>( javaType, this ) {
@Override
protected X doExtract(ResultSet rs, int paramIndex, WrapperOptions options) throws SQLException {
return getObject( rs.getNString( paramIndex ), options );
}
@Override
protected X doExtract(CallableStatement statement, int index, WrapperOptions options)
throws SQLException {
return getObject( statement.getNString( index ), options );
}
@Override
protected X doExtract(CallableStatement statement, String name, WrapperOptions options)
throws SQLException {
return getObject( statement.getNString( name ), options );
}
private X getObject(String xml, WrapperOptions options) throws SQLException {
if ( xml == null ) {
return null;
}
return ( (XmlAsStringJdbcType) getJdbcType() ).fromString(
xml,
getJavaType(),
options
);
}
};
}
else {
return new BasicExtractor<>( javaType, this ) { return new BasicExtractor<>( javaType, this ) {
@Override @Override
@ -117,3 +242,4 @@ public class XmlAsStringJdbcType extends XmlJdbcType {
}; };
} }
} }
}

View File

@ -9,11 +9,14 @@ package org.hibernate.spatial.dialect.cockroachdb;
import org.hibernate.boot.model.FunctionContributions; import org.hibernate.boot.model.FunctionContributions;
import org.hibernate.boot.model.TypeContributions; import org.hibernate.boot.model.TypeContributions;
import org.hibernate.dialect.PostgreSQLPGObjectJdbcType;
import org.hibernate.query.sqm.function.SqmFunctionRegistry; import org.hibernate.query.sqm.function.SqmFunctionRegistry;
import org.hibernate.service.ServiceRegistry; import org.hibernate.service.ServiceRegistry;
import org.hibernate.spatial.FunctionKey; import org.hibernate.spatial.FunctionKey;
import org.hibernate.spatial.HSMessageLogger; import org.hibernate.spatial.HSMessageLogger;
import org.hibernate.spatial.contributor.ContributorImplementor; import org.hibernate.spatial.contributor.ContributorImplementor;
import org.hibernate.spatial.dialect.postgis.PGCastingGeographyJdbcType;
import org.hibernate.spatial.dialect.postgis.PGCastingGeometryJdbcType;
import org.hibernate.spatial.dialect.postgis.PGGeographyJdbcType; import org.hibernate.spatial.dialect.postgis.PGGeographyJdbcType;
import org.hibernate.spatial.dialect.postgis.PGGeometryJdbcType; import org.hibernate.spatial.dialect.postgis.PGGeometryJdbcType;
import org.hibernate.spatial.dialect.postgis.PostgisSqmFunctionDescriptors; import org.hibernate.spatial.dialect.postgis.PostgisSqmFunctionDescriptors;
@ -29,9 +32,15 @@ public class CockroachDbContributor implements ContributorImplementor {
@Override @Override
public void contributeJdbcTypes(TypeContributions typeContributions) { public void contributeJdbcTypes(TypeContributions typeContributions) {
HSMessageLogger.SPATIAL_MSG_LOGGER.typeContributions( this.getClass().getCanonicalName() ); HSMessageLogger.SPATIAL_MSG_LOGGER.typeContributions( this.getClass().getCanonicalName() );
if ( PostgreSQLPGObjectJdbcType.isUsable() ) {
typeContributions.contributeJdbcType( PGGeometryJdbcType.INSTANCE_WKB_2 ); typeContributions.contributeJdbcType( PGGeometryJdbcType.INSTANCE_WKB_2 );
typeContributions.contributeJdbcType( PGGeographyJdbcType.INSTANCE_WKB_2 ); typeContributions.contributeJdbcType( PGGeographyJdbcType.INSTANCE_WKB_2 );
} }
else {
typeContributions.contributeJdbcType( PGCastingGeometryJdbcType.INSTANCE_WKB_2 );
typeContributions.contributeJdbcType( PGCastingGeographyJdbcType.INSTANCE_WKB_2 );
}
}
@Override @Override
public void contributeFunctions(FunctionContributions functionContributions) { public void contributeFunctions(FunctionContributions functionContributions) {

View File

@ -0,0 +1,161 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.spatial.dialect.postgis;
import java.sql.CallableStatement;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Types;
import org.hibernate.dialect.Dialect;
import org.hibernate.spatial.GeometryLiteralFormatter;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.type.descriptor.ValueBinder;
import org.hibernate.type.descriptor.ValueExtractor;
import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.jdbc.BasicBinder;
import org.hibernate.type.descriptor.jdbc.BasicExtractor;
import org.hibernate.type.descriptor.jdbc.JdbcLiteralFormatter;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.geolatte.geom.ByteBuffer;
import org.geolatte.geom.Geometry;
import org.geolatte.geom.codec.Wkb;
import org.geolatte.geom.codec.WkbDecoder;
import org.geolatte.geom.codec.Wkt;
import org.geolatte.geom.codec.WktDecoder;
import org.geolatte.geom.codec.WktEncoder;
/**
* Type Descriptor for the Postgis Geometry type
*
* @author Karel Maesen, Geovise BVBA
*/
public abstract class AbstractCastingPostGISJdbcType implements JdbcType {
private final Wkb.Dialect wkbDialect;
AbstractCastingPostGISJdbcType(Wkb.Dialect dialect) {
wkbDialect = dialect;
}
@Override
public <T> JdbcLiteralFormatter<T> getJdbcLiteralFormatter(JavaType<T> javaType) {
return new PGGeometryLiteralFormatter<>( getConstructorFunction(), javaType );
}
@Override
public abstract int getDefaultSqlTypeCode();
protected abstract String getConstructorFunction();
@Override
public void appendWriteExpression(
String writeExpression,
SqlAppender appender,
Dialect dialect) {
appender.append( getConstructorFunction() );
appender.append( '(' );
appender.append( writeExpression );
appender.append( ')' );
}
public Geometry<?> toGeometry(String wkt) {
if ( wkt == null ) {
return null;
}
if ( wkt.startsWith( "00" ) || wkt.startsWith( "01" ) ) {
//we have a WKB because this wkt starts with the bit-order byte
ByteBuffer buffer = ByteBuffer.from( wkt );
final WkbDecoder decoder = Wkb.newDecoder( wkbDialect );
return decoder.decode( buffer );
}
else {
return parseWkt( wkt );
}
}
private static Geometry<?> parseWkt(String pgValue) {
final WktDecoder decoder = Wkt.newDecoder( Wkt.Dialect.POSTGIS_EWKT_1 );
return decoder.decode( pgValue );
}
@Override
public int getJdbcTypeCode() {
return Types.VARCHAR;
}
@Override
public <X> ValueBinder<X> getBinder(final JavaType<X> javaType) {
return new BasicBinder<X>( javaType, this ) {
@Override
protected void doBind(PreparedStatement st, X value, int index, WrapperOptions options)
throws SQLException {
st.setString( index, toWkt( value, options ) );
}
@Override
protected void doBind(CallableStatement st, X value, String name, WrapperOptions options)
throws SQLException {
st.setString( name, toWkt( value, options ) );
}
private String toWkt(X value, WrapperOptions options) throws SQLException {
final WktEncoder encoder = Wkt.newEncoder( Wkt.Dialect.POSTGIS_EWKT_1 );
final Geometry<?> geometry = getJavaType().unwrap( value, Geometry.class, options );
return encoder.encode( geometry );
}
};
}
@Override
public <X> ValueExtractor<X> getExtractor(final JavaType<X> javaType) {
return new BasicExtractor<X>( javaType, this ) {
@Override
protected X doExtract(ResultSet rs, int paramIndex, WrapperOptions options) throws SQLException {
return getJavaType().wrap( toGeometry( rs.getString( paramIndex ) ), options );
}
@Override
protected X doExtract(CallableStatement statement, int index, WrapperOptions options) throws SQLException {
return getJavaType().wrap( toGeometry( statement.getString( index ) ), options );
}
@Override
protected X doExtract(CallableStatement statement, String name, WrapperOptions options)
throws SQLException {
return getJavaType().wrap( toGeometry( statement.getString( name ) ), options );
}
};
}
static class PGGeometryLiteralFormatter<T> extends GeometryLiteralFormatter<T> {
private final String constructorFunction;
public PGGeometryLiteralFormatter(String constructorFunction, JavaType<T> javaType) {
super( javaType, Wkt.Dialect.POSTGIS_EWKT_1, "" );
this.constructorFunction = constructorFunction;
}
@Override
public void appendJdbcLiteral(SqlAppender appender, T value, Dialect dialect, WrapperOptions wrapperOptions) {
Geometry<?> geom = javaType.unwrap( value, Geometry.class, wrapperOptions );
appender.append( constructorFunction );
appender.appendSql( "('" );
appender.appendSql( Wkt.toWkt( geom, wktDialect ) );
appender.appendSql( "')" );
}
}
}

View File

@ -0,0 +1,38 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.spatial.dialect.postgis;
import org.hibernate.type.SqlTypes;
import org.geolatte.geom.codec.Wkb;
/**
* Type Descriptor for the Postgis Geography type
*
* @author Karel Maesen, Geovise BVBA
*/
public class PGCastingGeographyJdbcType extends AbstractCastingPostGISJdbcType {
// Type descriptor instance using EWKB v2 (postgis versions >= 2.2.2, see: https://trac.osgeo.org/postgis/ticket/3181)
public static final PGCastingGeographyJdbcType INSTANCE_WKB_2 = new PGCastingGeographyJdbcType( Wkb.Dialect.POSTGIS_EWKB_2 );
private PGCastingGeographyJdbcType(Wkb.Dialect dialect) {
super( dialect );
}
@Override
public int getDefaultSqlTypeCode() {
return SqlTypes.GEOGRAPHY;
}
@Override
protected String getConstructorFunction() {
return "st_geogfromtext";
}
}

View File

@ -0,0 +1,38 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.spatial.dialect.postgis;
import org.hibernate.type.SqlTypes;
import org.geolatte.geom.codec.Wkb;
/**
* Type Descriptor for the Postgis Geometry type
*
* @author Karel Maesen, Geovise BVBA
*/
public class PGCastingGeometryJdbcType extends AbstractCastingPostGISJdbcType {
// Type descriptor instance using EWKB v2 (postgis versions >= 2.2.2, see: https://trac.osgeo.org/postgis/ticket/3181)
public static final PGCastingGeometryJdbcType INSTANCE_WKB_2 = new PGCastingGeometryJdbcType( Wkb.Dialect.POSTGIS_EWKB_2 );
private PGCastingGeometryJdbcType(Wkb.Dialect dialect) {
super( dialect );
}
@Override
public int getDefaultSqlTypeCode() {
return SqlTypes.GEOMETRY;
}
@Override
protected String getConstructorFunction() {
return "st_geomfromewkt";
}
}

View File

@ -9,6 +9,7 @@ package org.hibernate.spatial.dialect.postgis;
import org.hibernate.boot.model.FunctionContributions; import org.hibernate.boot.model.FunctionContributions;
import org.hibernate.boot.model.TypeContributions; import org.hibernate.boot.model.TypeContributions;
import org.hibernate.dialect.PostgreSQLPGObjectJdbcType;
import org.hibernate.query.sqm.function.SqmFunctionRegistry; import org.hibernate.query.sqm.function.SqmFunctionRegistry;
import org.hibernate.service.ServiceRegistry; import org.hibernate.service.ServiceRegistry;
import org.hibernate.spatial.HSMessageLogger; import org.hibernate.spatial.HSMessageLogger;
@ -25,9 +26,15 @@ public class PostgisDialectContributor implements ContributorImplementor {
@Override @Override
public void contributeJdbcTypes(TypeContributions typeContributions) { public void contributeJdbcTypes(TypeContributions typeContributions) {
HSMessageLogger.SPATIAL_MSG_LOGGER.typeContributions( this.getClass().getCanonicalName() ); HSMessageLogger.SPATIAL_MSG_LOGGER.typeContributions( this.getClass().getCanonicalName() );
if ( PostgreSQLPGObjectJdbcType.isUsable() ) {
typeContributions.contributeJdbcType( PGGeometryJdbcType.INSTANCE_WKB_2 ); typeContributions.contributeJdbcType( PGGeometryJdbcType.INSTANCE_WKB_2 );
typeContributions.contributeJdbcType( PGGeographyJdbcType.INSTANCE_WKB_2 ); typeContributions.contributeJdbcType( PGGeographyJdbcType.INSTANCE_WKB_2 );
} }
else {
typeContributions.contributeJdbcType( PGCastingGeometryJdbcType.INSTANCE_WKB_2 );
typeContributions.contributeJdbcType( PGCastingGeographyJdbcType.INSTANCE_WKB_2 );
}
}
@Override @Override
public void contributeFunctions(FunctionContributions functionContributions) { public void contributeFunctions(FunctionContributions functionContributions) {