HHH-15443 Allow JdbcType to wrap read and write expressions

This commit is contained in:
Christian Beikov 2022-08-23 14:47:31 +02:00 committed by Steve Ebersole
parent 67f8bee35a
commit 16ca1a0595
53 changed files with 1564 additions and 293 deletions

View File

@ -14,6 +14,10 @@ import java.util.Map;
import org.hibernate.annotations.JdbcTypeCode;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.AbstractHANADialect;
import org.hibernate.dialect.DerbyDialect;
import org.hibernate.dialect.OracleDialect;
import org.hibernate.dialect.SybaseDialect;
import org.hibernate.metamodel.mapping.internal.BasicAttributeMapping;
import org.hibernate.metamodel.spi.MappingMetamodelImplementor;
import org.hibernate.persister.entity.EntityPersister;
@ -26,6 +30,9 @@ import org.hibernate.testing.orm.junit.ServiceRegistry;
import org.hibernate.testing.orm.junit.SessionFactory;
import org.hibernate.testing.orm.junit.SessionFactoryScope;
import org.hibernate.testing.orm.junit.Setting;
import org.hibernate.testing.orm.junit.SkipForDialect;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import jakarta.persistence.Entity;
@ -36,6 +43,8 @@ import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.isOneOf;
import static org.hamcrest.Matchers.isA;
import static org.hamcrest.Matchers.notNullValue;
/**
* @author Christian Beikov
@ -60,10 +69,37 @@ public abstract class JsonMappingTests {
}
}
private final boolean supportsObjectMapKey;
private final Map<String, String> stringMap;
private final Map<StringNode, StringNode> objectMap;
private final List<StringNode> list;
private final String json;
protected JsonMappingTests(boolean supportsObjectMapKey) {
this.supportsObjectMapKey = supportsObjectMapKey;
this.stringMap = Map.of( "name", "ABC" );
this.objectMap = supportsObjectMapKey ? Map.of(
new StringNode( "name" ),
new StringNode( "ABC" )
) : null;
this.list = List.of( new StringNode( "ABC" ) );
this.json = "{\"name\":\"abc\"}";
}
@BeforeEach
public void setup(SessionFactoryScope scope) {
scope.inTransaction(
(session) -> {
session.persist( new EntityWithJson( 1, stringMap, objectMap, list, json ) );
}
);
}
@AfterEach
public void tearDown(SessionFactoryScope scope) {
scope.inTransaction(
(session) -> {
session.remove( session.find( EntityWithJson.class, 1 ) );
}
);
}
@Test
@ -74,39 +110,56 @@ public abstract class JsonMappingTests {
final EntityPersister entityDescriptor = mappingMetamodel.findEntityDescriptor( EntityWithJson.class );
final JdbcTypeRegistry jdbcTypeRegistry = mappingMetamodel.getTypeConfiguration().getJdbcTypeRegistry();
final BasicAttributeMapping payloadAttribute = (BasicAttributeMapping) entityDescriptor.findAttributeMapping( "payload" );
final BasicAttributeMapping objectMapAttribute = (BasicAttributeMapping) entityDescriptor.findAttributeMapping( "objectMap" );
final BasicAttributeMapping listAttribute = (BasicAttributeMapping) entityDescriptor.findAttributeMapping( "list" );
final BasicAttributeMapping stringMapAttribute = (BasicAttributeMapping) entityDescriptor.findAttributeMapping(
"stringMap" );
final BasicAttributeMapping objectMapAttribute = (BasicAttributeMapping) entityDescriptor.findAttributeMapping(
"objectMap" );
final BasicAttributeMapping listAttribute = (BasicAttributeMapping) entityDescriptor.findAttributeMapping(
"list" );
final BasicAttributeMapping jsonAttribute = (BasicAttributeMapping) entityDescriptor.findAttributeMapping( "jsonString" );
assertThat( payloadAttribute.getJavaType().getJavaTypeClass(), equalTo( Map.class ) );
assertThat( stringMapAttribute.getJavaType().getJavaTypeClass(), equalTo( Map.class ) );
assertThat( objectMapAttribute.getJavaType().getJavaTypeClass(), equalTo( Map.class ) );
assertThat( listAttribute.getJavaType().getJavaTypeClass(), equalTo( List.class ) );
assertThat( jsonAttribute.getJavaType().getJavaTypeClass(), equalTo( String.class ) );
final JdbcType jsonType = jdbcTypeRegistry.getDescriptor( SqlTypes.JSON );
assertThat( payloadAttribute.getJdbcMapping().getJdbcType(), is( jsonType ) );
assertThat( objectMapAttribute.getJdbcMapping().getJdbcType(), is( jsonType ) );
assertThat( listAttribute.getJdbcMapping().getJdbcType(), is( jsonType ) );
assertThat( jsonAttribute.getJdbcMapping().getJdbcType(), is( jsonType ) );
Map<String, String> stringMap = Map.of( "name", "ABC" );
Map<StringNode, StringNode> objectMap = supportsObjectMapKey ? Map.of( new StringNode( "name" ), new StringNode( "ABC" ) ) : null;
List<StringNode> list = List.of( new StringNode( "ABC" ) );
String json = "{\"name\":\"abc\"}";
// PostgreSQL returns the JSON slightly formatted
String alternativeJson = "{\"name\": \"abc\"}";
scope.inTransaction(
(session) -> {
session.persist( new EntityWithJson( 1, stringMap, objectMap, list, json ) );
assertThat( stringMapAttribute.getJdbcMapping().getJdbcType(), isA( (Class<JdbcType>) jsonType.getClass() ) );
assertThat( objectMapAttribute.getJdbcMapping().getJdbcType(), isA( (Class<JdbcType>) jsonType.getClass() ) );
assertThat( listAttribute.getJdbcMapping().getJdbcType(), isA( (Class<JdbcType>) jsonType.getClass() ) );
assertThat( jsonAttribute.getJdbcMapping().getJdbcType(), isA( (Class<JdbcType>) jsonType.getClass() ) );
}
);
@Test
public void verifyReadWorks(SessionFactoryScope scope) {
scope.inTransaction(
(session) -> {
EntityWithJson entityWithJson = session.find( EntityWithJson.class, 1 );
assertThat( entityWithJson.payload, is( stringMap ) );
assertThat( entityWithJson.stringMap, is( stringMap ) );
assertThat( entityWithJson.objectMap, is( objectMap ) );
assertThat( entityWithJson.list, is( list ) );
}
);
}
@Test
@SkipForDialect(dialectClass = DerbyDialect.class, reason = "Derby doesn't support comparing CLOBs with the = operator")
@SkipForDialect(dialectClass = AbstractHANADialect.class, matchSubTypes = true, reason = "HANA doesn't support comparing LOBs with the = operator")
@SkipForDialect(dialectClass = SybaseDialect.class, matchSubTypes = true, reason = "Sybase doesn't support comparing LOBs with the = operator")
@SkipForDialect(dialectClass = OracleDialect.class, matchSubTypes = true, reason = "Oracle doesn't support comparing JSON with the = operator")
public void verifyComparisonWorks(SessionFactoryScope scope) {
scope.inTransaction(
(session) -> {
// PostgreSQL returns the JSON slightly formatted
String alternativeJson = "{\"name\": \"abc\"}";
EntityWithJson entityWithJson = session.createQuery(
"from EntityWithJson e where e.stringMap = :param",
EntityWithJson.class
)
.setParameter( "param", stringMap )
.getSingleResult();
assertThat( entityWithJson, notNullValue() );
assertThat( entityWithJson.stringMap, is( stringMap ) );
assertThat( entityWithJson.objectMap, is( objectMap ) );
assertThat( entityWithJson.list, is( list ) );
assertThat( entityWithJson.jsonString, isOneOf( json, alternativeJson ) );
@ -149,7 +202,7 @@ public abstract class JsonMappingTests {
//tag::basic-json-example[]
@JdbcTypeCode( SqlTypes.JSON )
private Map<String, String> payload;
private Map<String, String> stringMap;
//end::basic-json-example[]
@JdbcTypeCode( SqlTypes.JSON )
@ -166,12 +219,12 @@ public abstract class JsonMappingTests {
public EntityWithJson(
Integer id,
Map<String, String> payload,
Map<String, String> stringMap,
Map<StringNode, StringNode> objectMap,
List<StringNode> list,
String jsonString) {
this.id = id;
this.payload = payload;
this.stringMap = stringMap;
this.objectMap = objectMap;
this.list = list;
this.jsonString = jsonString;

View File

@ -11,6 +11,10 @@ import java.util.Map;
import org.hibernate.annotations.JdbcTypeCode;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.AbstractHANADialect;
import org.hibernate.dialect.DerbyDialect;
import org.hibernate.dialect.OracleDialect;
import org.hibernate.dialect.SybaseDialect;
import org.hibernate.metamodel.mapping.internal.BasicAttributeMapping;
import org.hibernate.metamodel.spi.MappingMetamodelImplementor;
import org.hibernate.persister.entity.EntityPersister;
@ -23,6 +27,9 @@ import org.hibernate.testing.orm.junit.ServiceRegistry;
import org.hibernate.testing.orm.junit.SessionFactory;
import org.hibernate.testing.orm.junit.SessionFactoryScope;
import org.hibernate.testing.orm.junit.Setting;
import org.hibernate.testing.orm.junit.SkipForDialect;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import jakarta.persistence.Entity;
@ -34,6 +41,8 @@ import jakarta.xml.bind.annotation.XmlRootElement;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.isA;
import static org.hamcrest.Matchers.notNullValue;
/**
* @author Christian Beikov
@ -58,10 +67,36 @@ public abstract class XmlMappingTests {
}
}
private final boolean supportsObjectMapKey;
private final Map<String, StringNode> stringMap;
private final Map<StringNode, StringNode> objectMap;
private final List<StringNode> list;
protected XmlMappingTests(boolean supportsObjectMapKey) {
this.supportsObjectMapKey = supportsObjectMapKey;
this.stringMap = Map.of( "name", new StringNode( "ABC" ) );
this.objectMap = supportsObjectMapKey ? Map.of(
new StringNode( "name" ),
new StringNode( "ABC" )
) : null;
this.list = List.of( new StringNode( "ABC" ) );
}
@BeforeEach
public void setup(SessionFactoryScope scope) {
scope.inTransaction(
(session) -> {
session.persist( new EntityWithXml( 1, stringMap, objectMap, list ) );
}
);
}
@AfterEach
public void tearDown(SessionFactoryScope scope) {
scope.inTransaction(
(session) -> {
session.remove( session.find( EntityWithXml.class, 1 ) );
}
);
}
@Test
@ -72,27 +107,24 @@ public abstract class XmlMappingTests {
final EntityPersister entityDescriptor = mappingMetamodel.findEntityDescriptor( EntityWithXml.class );
final JdbcTypeRegistry jdbcTypeRegistry = mappingMetamodel.getTypeConfiguration().getJdbcTypeRegistry();
final BasicAttributeMapping stringMapAttribute = (BasicAttributeMapping) entityDescriptor.findAttributeMapping( "stringMap" );
final BasicAttributeMapping objectMapAttribute = (BasicAttributeMapping) entityDescriptor.findAttributeMapping( "objectMap" );
final BasicAttributeMapping listAttribute = (BasicAttributeMapping) entityDescriptor.findAttributeMapping( "list" );
final BasicAttributeMapping stringMapAttribute = (BasicAttributeMapping) entityDescriptor.findAttributeMapping(
"stringMap" );
final BasicAttributeMapping objectMapAttribute = (BasicAttributeMapping) entityDescriptor.findAttributeMapping(
"objectMap" );
final BasicAttributeMapping listAttribute = (BasicAttributeMapping) entityDescriptor.findAttributeMapping(
"list" );
assertThat( stringMapAttribute.getJavaType().getJavaTypeClass(), equalTo( Map.class ) );
assertThat( objectMapAttribute.getJavaType().getJavaTypeClass(), equalTo( Map.class ) );
assertThat( listAttribute.getJavaType().getJavaTypeClass(), equalTo( List.class ) );
final JdbcType xmlType = jdbcTypeRegistry.getDescriptor( SqlTypes.SQLXML );
assertThat( stringMapAttribute.getJdbcMapping().getJdbcType(), is( xmlType ) );
assertThat( objectMapAttribute.getJdbcMapping().getJdbcType(), is( xmlType ) );
assertThat( listAttribute.getJdbcMapping().getJdbcType(), is( xmlType ) );
Map<String, StringNode> stringMap = Map.of( "name", new StringNode( "ABC" ) );
Map<StringNode, StringNode> objectMap = supportsObjectMapKey ? Map.of( new StringNode( "name" ), new StringNode( "ABC" ) ) : null;
List<StringNode> list = List.of( new StringNode( "ABC" ) );
scope.inTransaction(
(session) -> {
session.persist( new EntityWithXml( 1, stringMap, objectMap, list ) );
assertThat( stringMapAttribute.getJdbcMapping().getJdbcType(), isA( (Class<JdbcType>) xmlType.getClass() ) );
assertThat( objectMapAttribute.getJdbcMapping().getJdbcType(), isA( (Class<JdbcType>) xmlType.getClass() ) );
assertThat( listAttribute.getJdbcMapping().getJdbcType(), isA( (Class<JdbcType>) xmlType.getClass() ) );
}
);
@Test
public void verifyReadWorks(SessionFactoryScope scope) {
scope.inTransaction(
(session) -> {
EntityWithXml entityWithXml = session.find( EntityWithXml.class, 1 );
@ -103,6 +135,28 @@ public abstract class XmlMappingTests {
);
}
@Test
@SkipForDialect(dialectClass = DerbyDialect.class, reason = "Derby doesn't support comparing CLOBs with the = operator")
@SkipForDialect(dialectClass = AbstractHANADialect.class, matchSubTypes = true, reason = "HANA doesn't support comparing LOBs with the = operator")
@SkipForDialect(dialectClass = SybaseDialect.class, matchSubTypes = true, reason = "Sybase doesn't support comparing LOBs with the = operator")
@SkipForDialect(dialectClass = OracleDialect.class, matchSubTypes = true, reason = "Oracle doesn't support comparing JSON with the = operator")
public void verifyComparisonWorks(SessionFactoryScope scope) {
scope.inTransaction(
(session) -> {
EntityWithXml entityWithJson = session.createQuery(
"from EntityWithXml e where e.stringMap = :param",
EntityWithXml.class
)
.setParameter( "param", stringMap )
.getSingleResult();
assertThat( entityWithJson, notNullValue() );
assertThat( entityWithJson.stringMap, is( stringMap ) );
assertThat( entityWithJson.objectMap, is( objectMap ) );
assertThat( entityWithJson.list, is( list ) );
}
);
}
@Entity(name = "EntityWithXml")
@Table(name = "EntityWithXml")
public static class EntityWithXml {

View File

@ -30,6 +30,9 @@ import org.hibernate.boot.model.TypeContributions;
import org.hibernate.dialect.DatabaseVersion;
import org.hibernate.dialect.Dialect;
import org.hibernate.dialect.NationalizationSupport;
import org.hibernate.dialect.PostgreSQLCastingInetJdbcType;
import org.hibernate.dialect.PostgreSQLCastingIntervalSecondJdbcType;
import org.hibernate.dialect.PostgreSQLCastingJsonJdbcType;
import org.hibernate.dialect.PostgreSQLDriverKind;
import org.hibernate.dialect.PostgreSQLInetJdbcType;
import org.hibernate.dialect.PostgreSQLIntervalSecondJdbcType;
@ -228,7 +231,6 @@ public class CockroachLegacyDialect extends Dialect {
final DdlTypeRegistry ddlTypeRegistry = typeContributions.getTypeConfiguration().getDdlTypeRegistry();
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( UUID, "uuid", this ) );
if ( PostgreSQLPGObjectJdbcType.isUsable() ) {
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( GEOMETRY, "geometry", this ) );
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( GEOGRAPHY, "geography", this ) );
ddlTypeRegistry.addDescriptor( new Scale6IntervalSecondDdlType( this ) );
@ -242,7 +244,6 @@ public class CockroachLegacyDialect extends Dialect {
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( JSON, "json", this ) );
}
}
}
@Override
public JdbcType resolveSqlTypeDescriptor(
@ -338,6 +339,27 @@ public class CockroachLegacyDialect extends Dialect {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLJsonJdbcType.INSTANCE );
}
}
else {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingIntervalSecondJdbcType.INSTANCE );
if ( getVersion().isSameOrAfter( 20, 0 ) ) {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingInetJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonJdbcType.JSONB_INSTANCE );
}
else {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonJdbcType.JSON_INSTANCE );
}
}
}
else {
jdbcTypeRegistry.addDescriptorIfAbsent( UUIDJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingIntervalSecondJdbcType.INSTANCE );
if ( getVersion().isSameOrAfter( 20, 0 ) ) {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingInetJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonJdbcType.JSONB_INSTANCE );
}
else {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonJdbcType.JSON_INSTANCE );
}
}
// Force Blob binding to byte[] for CockroachDB

View File

@ -34,8 +34,10 @@ import org.hibernate.sql.ast.tree.Statement;
import org.hibernate.sql.exec.spi.JdbcOperation;
import org.hibernate.tool.schema.extract.internal.SequenceInformationExtractorMariaDBDatabaseImpl;
import org.hibernate.tool.schema.extract.spi.SequenceInformationExtractor;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.StandardBasicTypes;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.JsonJdbcType;
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
import org.hibernate.type.descriptor.sql.internal.DdlTypeImpl;
import org.hibernate.type.descriptor.sql.spi.DdlTypeRegistry;
@ -141,9 +143,11 @@ public class MariaDBLegacyDialect extends MySQLLegacyDialect {
@Override
public void contributeTypes(TypeContributions typeContributions, ServiceRegistry serviceRegistry) {
final JdbcTypeRegistry jdbcTypeRegistry = typeContributions.getTypeConfiguration().getJdbcTypeRegistry();
// Make sure we register the JSON type descriptor before calling super, because MariaDB does not need casting
jdbcTypeRegistry.addDescriptorIfAbsent( SqlTypes.JSON, JsonJdbcType.INSTANCE );
super.contributeTypes( typeContributions, serviceRegistry );
final JdbcTypeRegistry jdbcTypeRegistry = typeContributions.getTypeConfiguration()
.getJdbcTypeRegistry();
if ( getVersion().isSameOrAfter( 10, 7 ) ) {
jdbcTypeRegistry.addDescriptorIfAbsent( VarcharUUIDJdbcType.INSTANCE );
}

View File

@ -21,6 +21,7 @@ import org.hibernate.dialect.DatabaseVersion;
import org.hibernate.dialect.Dialect;
import org.hibernate.dialect.InnoDBStorageEngine;
import org.hibernate.dialect.MyISAMStorageEngine;
import org.hibernate.dialect.MySQLCastingJsonJdbcType;
import org.hibernate.dialect.MySQLServerConfiguration;
import org.hibernate.dialect.MySQLStorageEngine;
import org.hibernate.dialect.Replacer;
@ -630,11 +631,10 @@ public class MySQLLegacyDialect extends Dialect {
public void contributeTypes(TypeContributions typeContributions, ServiceRegistry serviceRegistry) {
super.contributeTypes( typeContributions, serviceRegistry );
final JdbcTypeRegistry jdbcTypeRegistry = typeContributions.getTypeConfiguration()
.getJdbcTypeRegistry();
final JdbcTypeRegistry jdbcTypeRegistry = typeContributions.getTypeConfiguration().getJdbcTypeRegistry();
if ( getMySQLVersion().isSameOrAfter( 5, 7 ) ) {
jdbcTypeRegistry.addDescriptorIfAbsent( SqlTypes.JSON, JsonJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( SqlTypes.JSON, MySQLCastingJsonJdbcType.INSTANCE );
}
// MySQL requires a custom binder for binding untyped nulls with the NULL type

View File

@ -30,9 +30,14 @@ import org.hibernate.dialect.DatabaseVersion;
import org.hibernate.dialect.Dialect;
import org.hibernate.dialect.NationalizationSupport;
import org.hibernate.dialect.OracleDialect;
import org.hibernate.dialect.PostgreSQLCastingInetJdbcType;
import org.hibernate.dialect.PostgreSQLCastingIntervalSecondJdbcType;
import org.hibernate.dialect.PostgreSQLCastingJsonJdbcType;
import org.hibernate.dialect.PostgreSQLCastingStructJdbcType;
import org.hibernate.dialect.PostgreSQLDriverKind;
import org.hibernate.dialect.PostgreSQLInetJdbcType;
import org.hibernate.dialect.PostgreSQLIntervalSecondJdbcType;
import org.hibernate.dialect.PostgreSQLJsonJdbcType;
import org.hibernate.dialect.PostgreSQLJsonbJdbcType;
import org.hibernate.dialect.PostgreSQLPGObjectJdbcType;
import org.hibernate.dialect.PostgreSQLStructJdbcType;
@ -253,8 +258,6 @@ public class PostgreSQLLegacyDialect extends Dialect {
if ( getVersion().isSameOrAfter( 8, 2 ) ) {
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( UUID, "uuid", this ) );
}
if ( PostgreSQLPGObjectJdbcType.isUsable() ) {
// The following DDL types require that the PGobject class is usable/visible
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( INET, "inet", this ) );
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( GEOMETRY, "geometry", this ) );
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( GEOGRAPHY, "geography", this ) );
@ -270,7 +273,6 @@ public class PostgreSQLLegacyDialect extends Dialect {
}
}
}
}
@Override
public int getMaxVarcharLength() {
@ -1334,14 +1336,49 @@ public class PostgreSQLLegacyDialect extends Dialect {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLIntervalSecondJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLStructJdbcType.INSTANCE );
}
else {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingInetJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingIntervalSecondJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingStructJdbcType.INSTANCE );
}
if ( getVersion().isSameOrAfter( 8, 2 ) ) {
// HHH-9562
jdbcTypeRegistry.addDescriptorIfAbsent( UUIDJdbcType.INSTANCE );
if ( getVersion().isSameOrAfter( 9, 2 ) ) {
if ( getVersion().isSameOrAfter( 9, 4 ) ) {
if ( PostgreSQLPGObjectJdbcType.isUsable() ) {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLJsonbJdbcType.INSTANCE );
}
else {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonJdbcType.JSONB_INSTANCE );
}
}
else {
if ( PostgreSQLPGObjectJdbcType.isUsable() ) {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLJsonJdbcType.INSTANCE );
}
else {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonJdbcType.JSON_INSTANCE );
}
}
}
}
}
else {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingInetJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingIntervalSecondJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingStructJdbcType.INSTANCE );
if ( getVersion().isSameOrAfter( 8, 2 ) ) {
jdbcTypeRegistry.addDescriptorIfAbsent( UUIDJdbcType.INSTANCE );
if ( getVersion().isSameOrAfter( 9, 2 ) ) {
if ( getVersion().isSameOrAfter( 9, 4 ) ) {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonJdbcType.JSONB_INSTANCE );
}
else {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonJdbcType.JSON_INSTANCE );
}
}
}
}

View File

@ -64,6 +64,7 @@ import org.hibernate.type.BasicTypeRegistry;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.java.spi.JavaTypeRegistry;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.JsonAsStringJdbcType;
import org.hibernate.type.descriptor.jdbc.JsonJdbcType;
import org.hibernate.type.descriptor.jdbc.XmlAsStringJdbcType;
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
@ -661,29 +662,14 @@ public class MetadataBuildingProcess {
addFallbackIfNecessary( jdbcTypeRegistry, SqlTypes.POINT, SqlTypes.VARBINARY );
addFallbackIfNecessary( jdbcTypeRegistry, SqlTypes.GEOGRAPHY, SqlTypes.GEOMETRY );
jdbcTypeRegistry.addDescriptorIfAbsent( JsonJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( XmlAsStringJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( JsonAsStringJdbcType.VARCHAR_INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( XmlAsStringJdbcType.VARCHAR_INSTANCE );
addFallbackIfNecessary( jdbcTypeRegistry, SqlTypes.MATERIALIZED_BLOB, SqlTypes.BLOB );
addFallbackIfNecessary( jdbcTypeRegistry, SqlTypes.MATERIALIZED_CLOB, SqlTypes.CLOB );
addFallbackIfNecessary( jdbcTypeRegistry, SqlTypes.MATERIALIZED_NCLOB, SqlTypes.NCLOB );
final DdlTypeRegistry ddlTypeRegistry = typeConfiguration.getDdlTypeRegistry();
// Fallback to the biggest varchar DdlType when json is requested
ddlTypeRegistry.addDescriptorIfAbsent(
new DdlTypeImpl(
SqlTypes.JSON,
ddlTypeRegistry.getTypeName( SqlTypes.VARCHAR, null, null, null ),
dialect
)
);
ddlTypeRegistry.addDescriptorIfAbsent(
new DdlTypeImpl(
SqlTypes.SQLXML,
ddlTypeRegistry.getTypeName( SqlTypes.VARCHAR, null, null, null ),
dialect
)
);
// Fallback to the geometry DdlType when geography is requested
final DdlType geometryType = ddlTypeRegistry.getDescriptor( SqlTypes.GEOMETRY );
if ( geometryType != null ) {

View File

@ -244,8 +244,9 @@ public class CockroachDialect extends Dialect {
final DdlTypeRegistry ddlTypeRegistry = typeContributions.getTypeConfiguration().getDdlTypeRegistry();
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( UUID, "uuid", this ) );
if ( PostgreSQLPGObjectJdbcType.isUsable() ) {
// The following DDL types require that the PGobject class is usable/visible
// The following DDL types require that the PGobject class is usable/visible,
// or that a special JDBC type implementation exists, that supports wrapping read/write expressions
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( GEOMETRY, "geometry", this ) );
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( GEOGRAPHY, "geography", this ) );
ddlTypeRegistry.addDescriptor( new Scale6IntervalSecondDdlType( this ) );
@ -254,7 +255,6 @@ public class CockroachDialect extends Dialect {
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( INET, "inet", this ) );
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( JSON, "jsonb", this ) );
}
}
@Override
public JdbcType resolveSqlTypeDescriptor(
@ -344,6 +344,17 @@ public class CockroachDialect extends Dialect {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLInetJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLJsonbJdbcType.INSTANCE );
}
else {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingIntervalSecondJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingInetJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonJdbcType.JSONB_INSTANCE );
}
}
else {
jdbcTypeRegistry.addDescriptorIfAbsent( UUIDJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingInetJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingIntervalSecondJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonJdbcType.JSONB_INSTANCE );
}
// Force Blob binding to byte[] for CockroachDB

View File

@ -27,8 +27,10 @@ import org.hibernate.sql.ast.tree.Statement;
import org.hibernate.sql.exec.spi.JdbcOperation;
import org.hibernate.tool.schema.extract.internal.SequenceInformationExtractorMariaDBDatabaseImpl;
import org.hibernate.tool.schema.extract.spi.SequenceInformationExtractor;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.StandardBasicTypes;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.JsonJdbcType;
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
import org.hibernate.type.descriptor.sql.internal.DdlTypeImpl;
import org.hibernate.type.descriptor.sql.spi.DdlTypeRegistry;
@ -133,9 +135,11 @@ public class MariaDBDialect extends MySQLDialect {
@Override
public void contributeTypes(TypeContributions typeContributions, ServiceRegistry serviceRegistry) {
final JdbcTypeRegistry jdbcTypeRegistry = typeContributions.getTypeConfiguration().getJdbcTypeRegistry();
// Make sure we register the JSON type descriptor before calling super, because MariaDB does not need casting
jdbcTypeRegistry.addDescriptorIfAbsent( SqlTypes.JSON, JsonJdbcType.INSTANCE );
super.contributeTypes( typeContributions, serviceRegistry );
final JdbcTypeRegistry jdbcTypeRegistry = typeContributions.getTypeConfiguration()
.getJdbcTypeRegistry();
if ( getVersion().isSameOrAfter( 10, 7 ) ) {
jdbcTypeRegistry.addDescriptorIfAbsent( VarcharUUIDJdbcType.INSTANCE );
}

View File

@ -0,0 +1,45 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.dialect;
import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.metamodel.spi.RuntimeModelCreationContext;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
import org.hibernate.type.descriptor.jdbc.JsonJdbcType;
/**
* @author Christian Beikov
*/
public class MySQLCastingJsonJdbcType extends JsonJdbcType {
/**
* Singleton access
*/
public static final JsonJdbcType INSTANCE = new MySQLCastingJsonJdbcType( null );
public MySQLCastingJsonJdbcType(EmbeddableMappingType embeddableMappingType) {
super( embeddableMappingType );
}
@Override
public AggregateJdbcType resolveAggregateJdbcType(
EmbeddableMappingType mappingType,
String sqlType,
RuntimeModelCreationContext creationContext) {
return new MySQLCastingJsonJdbcType( mappingType );
}
@Override
public void appendWriteExpression(
String writeExpression,
SqlAppender appender,
Dialect dialect) {
appender.append( "cast(" );
appender.append( writeExpression );
appender.append( " as json)" );
}
}

View File

@ -627,10 +627,9 @@ public class MySQLDialect extends Dialect {
public void contributeTypes(TypeContributions typeContributions, ServiceRegistry serviceRegistry) {
super.contributeTypes( typeContributions, serviceRegistry );
final JdbcTypeRegistry jdbcTypeRegistry = typeContributions.getTypeConfiguration()
.getJdbcTypeRegistry();
final JdbcTypeRegistry jdbcTypeRegistry = typeContributions.getTypeConfiguration().getJdbcTypeRegistry();
jdbcTypeRegistry.addDescriptorIfAbsent( SqlTypes.JSON, JsonJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( SqlTypes.JSON, MySQLCastingJsonJdbcType.INSTANCE );
// MySQL requires a custom binder for binding untyped nulls with the NULL type
typeContributions.contributeJdbcType( NullJdbcType.INSTANCE );

View File

@ -0,0 +1,111 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.dialect;
import java.net.InetAddress;
import java.sql.CallableStatement;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.ValueBinder;
import org.hibernate.type.descriptor.ValueExtractor;
import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.jdbc.BasicBinder;
import org.hibernate.type.descriptor.jdbc.BasicExtractor;
import org.hibernate.type.descriptor.jdbc.JdbcLiteralFormatter;
import org.hibernate.type.descriptor.jdbc.JdbcType;
/**
* @author Christian Beikov
*/
public class PostgreSQLCastingInetJdbcType implements JdbcType {
public static final PostgreSQLCastingInetJdbcType INSTANCE = new PostgreSQLCastingInetJdbcType();
@Override
public void appendWriteExpression(
String writeExpression,
SqlAppender appender,
Dialect dialect) {
appender.append( "cast(" );
appender.append( writeExpression );
appender.append( " as inet)" );
}
@Override
public int getJdbcTypeCode() {
return SqlTypes.VARBINARY;
}
@Override
public int getDefaultSqlTypeCode() {
return SqlTypes.INET;
}
@Override
public String toString() {
return "InetSecondJdbcType";
}
@Override
public <T> JdbcLiteralFormatter<T> getJdbcLiteralFormatter(JavaType<T> javaType) {
// No literal support for now
return null;
}
@Override
public <X> ValueBinder<X> getBinder(JavaType<X> javaType) {
return new BasicBinder<>( javaType, this ) {
@Override
protected void doBind(PreparedStatement st, X value, int index, WrapperOptions options)
throws SQLException {
st.setString( index, getStringValue( value, options ) );
}
@Override
protected void doBind(CallableStatement st, X value, String name, WrapperOptions options)
throws SQLException {
st.setString( name, getStringValue( value, options ) );
}
private String getStringValue(X value, WrapperOptions options) {
return getJavaType().unwrap( value, InetAddress.class, options ).getHostAddress();
}
};
}
@Override
public <X> ValueExtractor<X> getExtractor(JavaType<X> javaType) {
return new BasicExtractor<>( javaType, this ) {
@Override
protected X doExtract(ResultSet rs, int paramIndex, WrapperOptions options) throws SQLException {
return getObject( rs.getString( paramIndex ), options );
}
@Override
protected X doExtract(CallableStatement statement, int index, WrapperOptions options) throws SQLException {
return getObject( statement.getString( index ), options );
}
@Override
protected X doExtract(CallableStatement statement, String name, WrapperOptions options) throws SQLException {
return getObject( statement.getString( name ), options );
}
private X getObject(String inetString, WrapperOptions options) throws SQLException {
if ( inetString == null ) {
return null;
}
return getJavaType().wrap( inetString, options );
}
};
}
}

View File

@ -0,0 +1,162 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.dialect;
import java.math.BigDecimal;
import java.sql.CallableStatement;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.sql.ast.tree.expression.SelfRenderingExpression;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.ValueBinder;
import org.hibernate.type.descriptor.ValueExtractor;
import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.jdbc.AdjustableJdbcType;
import org.hibernate.type.descriptor.jdbc.BasicBinder;
import org.hibernate.type.descriptor.jdbc.BasicExtractor;
import org.hibernate.type.descriptor.jdbc.JdbcLiteralFormatter;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.JdbcTypeIndicators;
/**
* @author Christian Beikov
*/
public class PostgreSQLCastingIntervalSecondJdbcType implements AdjustableJdbcType {
public static final PostgreSQLCastingIntervalSecondJdbcType INSTANCE = new PostgreSQLCastingIntervalSecondJdbcType();
@Override
public JdbcType resolveIndicatedType(JdbcTypeIndicators indicators, JavaType<?> domainJtd) {
final int scale;
if ( indicators.getColumnScale() == JdbcTypeIndicators.NO_COLUMN_SCALE ) {
scale = domainJtd.getDefaultSqlScale(
indicators.getTypeConfiguration()
.getServiceRegistry()
.getService( JdbcServices.class )
.getDialect(),
this
);
}
else {
scale = indicators.getColumnScale();
}
if ( scale > 6 ) {
// Since the maximum allowed scale on PostgreSQL is 6 (microsecond precision),
// we have to switch to the numeric type if the value is greater
return indicators.getTypeConfiguration().getJdbcTypeRegistry().getDescriptor( SqlTypes.NUMERIC );
}
return this;
}
@Override
public Expression wrapTopLevelSelectionExpression(Expression expression) {
return new SelfRenderingExpression() {
@Override
public void renderToSql(
SqlAppender sqlAppender,
SqlAstTranslator<?> walker,
SessionFactoryImplementor sessionFactory) {
sqlAppender.append( "extract(epoch from " );
expression.accept( walker );
sqlAppender.append( ')' );
}
@Override
public JdbcMappingContainer getExpressionType() {
return expression.getExpressionType();
}
};
}
@Override
public void appendWriteExpression(
String writeExpression,
SqlAppender appender,
Dialect dialect) {
appender.append( '(' );
appender.append( writeExpression );
appender.append( "*interval'1 second)" );
}
@Override
public int getJdbcTypeCode() {
return SqlTypes.NUMERIC;
}
@Override
public int getDefaultSqlTypeCode() {
return SqlTypes.INTERVAL_SECOND;
}
@Override
public String toString() {
return "IntervalSecondJdbcType";
}
@Override
public <T> JdbcLiteralFormatter<T> getJdbcLiteralFormatter(JavaType<T> javaType) {
// No literal support for now
return null;
}
@Override
public <X> ValueBinder<X> getBinder(JavaType<X> javaType) {
return new BasicBinder<>( javaType, this ) {
@Override
protected void doBind(PreparedStatement st, X value, int index, WrapperOptions options)
throws SQLException {
st.setBigDecimal( index, getBigDecimalValue( value, options ) );
}
@Override
protected void doBind(CallableStatement st, X value, String name, WrapperOptions options)
throws SQLException {
st.setBigDecimal( name, getBigDecimalValue( value, options ) );
}
private BigDecimal getBigDecimalValue(X value, WrapperOptions options) {
return getJavaType().unwrap( value, BigDecimal.class, options ).movePointLeft( 9 );
}
};
}
@Override
public <X> ValueExtractor<X> getExtractor(JavaType<X> javaType) {
return new BasicExtractor<>( javaType, this ) {
@Override
protected X doExtract(ResultSet rs, int paramIndex, WrapperOptions options) throws SQLException {
return getObject( rs.getBigDecimal( paramIndex ), options );
}
@Override
protected X doExtract(CallableStatement statement, int index, WrapperOptions options) throws SQLException {
return getObject( statement.getBigDecimal( index ), options );
}
@Override
protected X doExtract(CallableStatement statement, String name, WrapperOptions options) throws SQLException {
return getObject( statement.getBigDecimal( name ), options );
}
private X getObject(BigDecimal bigDecimal, WrapperOptions options) throws SQLException {
if ( bigDecimal == null ) {
return null;
}
return getJavaType().wrap( bigDecimal.movePointRight( 9 ), options );
}
};
}
}

View File

@ -0,0 +1,59 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.dialect;
import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.metamodel.spi.RuntimeModelCreationContext;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
import org.hibernate.type.descriptor.jdbc.JsonJdbcType;
/**
* @author Christian Beikov
*/
public class PostgreSQLCastingJsonJdbcType extends JsonJdbcType {
public static final PostgreSQLCastingJsonJdbcType JSON_INSTANCE = new PostgreSQLCastingJsonJdbcType( false, null );
public static final PostgreSQLCastingJsonJdbcType JSONB_INSTANCE = new PostgreSQLCastingJsonJdbcType( true, null );
private final boolean jsonb;
public PostgreSQLCastingJsonJdbcType(boolean jsonb, EmbeddableMappingType embeddableMappingType) {
super( embeddableMappingType );
this.jsonb = jsonb;
}
@Override
public int getDdlTypeCode() {
return SqlTypes.JSON;
}
@Override
public AggregateJdbcType resolveAggregateJdbcType(
EmbeddableMappingType mappingType,
String sqlType,
RuntimeModelCreationContext creationContext) {
return new PostgreSQLCastingJsonJdbcType( jsonb, mappingType );
}
@Override
public void appendWriteExpression(
String writeExpression,
SqlAppender appender,
Dialect dialect) {
appender.append( "cast(" );
appender.append( writeExpression );
appender.append( " as " );
if ( jsonb ) {
appender.append( "jsonb)" );
}
else {
appender.append( "json)" );
}
}
}

View File

@ -0,0 +1,91 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.dialect;
import java.sql.CallableStatement;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.metamodel.spi.RuntimeModelCreationContext;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.type.descriptor.ValueBinder;
import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.jdbc.AggregateJdbcType;
import org.hibernate.type.descriptor.jdbc.BasicBinder;
/**
* @author Christian Beikov
*/
public class PostgreSQLCastingStructJdbcType extends PostgreSQLStructJdbcType {
public static final PostgreSQLCastingStructJdbcType INSTANCE = new PostgreSQLCastingStructJdbcType( null, null, null );
public PostgreSQLCastingStructJdbcType(
EmbeddableMappingType embeddableMappingType,
String typeName,
int[] orderMapping) {
super( embeddableMappingType, typeName, orderMapping );
}
@Override
public AggregateJdbcType resolveAggregateJdbcType(
EmbeddableMappingType mappingType,
String sqlType,
RuntimeModelCreationContext creationContext) {
return new PostgreSQLCastingStructJdbcType(
mappingType,
sqlType,
creationContext.getBootModel()
.getDatabase()
.getDefaultNamespace()
.locateUserDefinedType( Identifier.toIdentifier( sqlType ) )
.getOrderMapping()
);
}
@Override
public void appendWriteExpression(
String writeExpression,
SqlAppender appender,
Dialect dialect) {
appender.append( "cast(" );
appender.append( writeExpression );
appender.append( " as " );
appender.append( getTypeName() );
appender.append( ')' );
}
@Override
public <X> ValueBinder<X> getBinder(JavaType<X> javaType) {
return new BasicBinder<>( javaType, this ) {
@Override
protected void doBind(PreparedStatement st, X value, int index, WrapperOptions options)
throws SQLException {
final String stringValue = ( (PostgreSQLCastingStructJdbcType) getJdbcType() ).toString(
value,
getJavaType(),
options
);
st.setString( index, stringValue );
}
@Override
protected void doBind(CallableStatement st, X value, String name, WrapperOptions options)
throws SQLException {
final String stringValue = ( (PostgreSQLCastingStructJdbcType) getJdbcType() ).toString(
value,
getJavaType(),
options
);
st.setString( name, stringValue );
}
};
}
}

View File

@ -248,8 +248,6 @@ public class PostgreSQLDialect extends Dialect {
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( SQLXML, "xml", this ) );
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( UUID, "uuid", this ) );
if ( PostgreSQLPGObjectJdbcType.isUsable() ) {
// The following DDL types require that the PGobject class is usable/visible
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( INET, "inet", this ) );
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( GEOMETRY, "geometry", this ) );
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( GEOGRAPHY, "geography", this ) );
@ -258,7 +256,6 @@ public class PostgreSQLDialect extends Dialect {
// Prefer jsonb if possible
ddlTypeRegistry.addDescriptor( new DdlTypeImpl( JSON, "jsonb", this ) );
}
}
@Override
public int getMaxVarcharLength() {
@ -1316,17 +1313,27 @@ public class PostgreSQLDialect extends Dialect {
jdbcTypeRegistry.addDescriptor( XmlJdbcType.INSTANCE );
if ( driverKind == PostgreSQLDriverKind.PG_JDBC ) {
// HHH-9562
jdbcTypeRegistry.addDescriptorIfAbsent( UUIDJdbcType.INSTANCE );
if ( PostgreSQLPGObjectJdbcType.isUsable() ) {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLInetJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLIntervalSecondJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLStructJdbcType.INSTANCE );
}
// HHH-9562
jdbcTypeRegistry.addDescriptorIfAbsent( UUIDJdbcType.INSTANCE );
if ( PostgreSQLPGObjectJdbcType.isUsable() ) {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLJsonbJdbcType.INSTANCE );
}
else {
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingInetJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingIntervalSecondJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingStructJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonJdbcType.JSONB_INSTANCE );
}
}
else {
jdbcTypeRegistry.addDescriptorIfAbsent( UUIDJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingInetJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingIntervalSecondJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingStructJdbcType.INSTANCE );
jdbcTypeRegistry.addDescriptorIfAbsent( PostgreSQLCastingJsonJdbcType.JSONB_INSTANCE );
}
// PostgreSQL requires a custom binder for binding untyped nulls as VARBINARY

View File

@ -56,7 +56,7 @@ import static org.hibernate.type.descriptor.DateTimeUtils.appendAsTimestampWithM
*/
public class PostgreSQLStructJdbcType extends PostgreSQLPGObjectJdbcType implements AggregateJdbcType {
public static final PostgreSQLStructJdbcType INSTANCE = new PostgreSQLStructJdbcType();
public static final PostgreSQLStructJdbcType INSTANCE = new PostgreSQLStructJdbcType( null, null, null );
private static final DateTimeFormatter LOCAL_DATE_TIME;
static {
@ -89,11 +89,6 @@ public class PostgreSQLStructJdbcType extends PostgreSQLPGObjectJdbcType impleme
private final EmbeddableMappingType embeddableMappingType;
private final ValueExtractor<Object[]> objectArrayExtractor;
private PostgreSQLStructJdbcType() {
// The default instance is for reading only and will return an Object[]
this( null, null, null );
}
public PostgreSQLStructJdbcType(EmbeddableMappingType embeddableMappingType, String typeName, int[] orderMapping) {
super( typeName, SqlTypes.STRUCT );
this.embeddableMappingType = embeddableMappingType;

View File

@ -122,7 +122,6 @@ public class AggregateWindowEmulationQueryTransformer implements QueryTransforme
columnName,
false,
null,
null,
mapping.getJdbcMapping()
);
final Expression expression = subSelections.get( i ).getExpression();
@ -190,7 +189,6 @@ public class AggregateWindowEmulationQueryTransformer implements QueryTransforme
columnName,
false,
null,
null,
jdbcMapping
);
final int subValuesPosition = subSelectClause.getSqlSelections().size();
@ -252,7 +250,6 @@ public class AggregateWindowEmulationQueryTransformer implements QueryTransforme
columnName,
false,
null,
null,
jdbcMapping
);
final int subValuesPosition = subSelectClause.getSqlSelections().size();
@ -311,7 +308,6 @@ public class AggregateWindowEmulationQueryTransformer implements QueryTransforme
columnName,
false,
null,
null,
jdbcMapping
);
final int subValuesPosition = subSelectClause.getSqlSelections().size();
@ -368,7 +364,6 @@ public class AggregateWindowEmulationQueryTransformer implements QueryTransforme
columnName,
false,
null,
null,
mapping.getJdbcMapping()
)
);

View File

@ -13,6 +13,7 @@ import java.util.NoSuchElementException;
import org.hibernate.FetchMode;
import org.hibernate.MappingException;
import org.hibernate.boot.model.relational.Database;
import org.hibernate.boot.spi.MetadataBuildingContext;
import org.hibernate.engine.spi.Mapping;
import org.hibernate.internal.util.collections.ArrayHelper;
import org.hibernate.mapping.Column;
@ -186,6 +187,11 @@ public class ExportableColumn extends Column {
public boolean isColumnUpdateable(int index) {
return true;
}
@Override
public MetadataBuildingContext getBuildingContext() {
return table.getIdentifierValue().getBuildingContext();
}
}
public static class ColumnIterator implements Iterator<Selectable> {

View File

@ -170,6 +170,7 @@ public abstract class Collection implements Fetchable, Value, Filterable {
this.loaderName = original.loaderName;
}
@Override
public MetadataBuildingContext getBuildingContext() {
return buildingContext;
}

View File

@ -560,6 +560,7 @@ public class Column implements Selectable, Serializable, Cloneable, ColumnTypeIn
return hasCustomRead() ? customRead : getQuotedName( dialect );
}
@Override
public String getWriteExpr() {
return customWrite != null && customWrite.length() > 0 ? customWrite : "?";
}

View File

@ -51,6 +51,7 @@ public class OneToMany implements Value {
return new OneToMany( this );
}
@Override
public MetadataBuildingContext getBuildingContext() {
return buildingContext;
}

View File

@ -6,8 +6,12 @@
*/
package org.hibernate.mapping;
import org.hibernate.Incubating;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.query.sqm.function.SqmFunctionRegistry;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.spi.TypeConfiguration;
/**
@ -62,4 +66,17 @@ public interface Selectable {
String getAlias(Dialect dialect, Table table);
String getTemplate(Dialect dialect, TypeConfiguration typeConfiguration, SqmFunctionRegistry functionRegistry);
@Incubating
default String getWriteExpr() {
final String customWriteExpression = getCustomWriteExpression();
return customWriteExpression == null || customWriteExpression.isEmpty()
? "?"
: customWriteExpression;
}
@Incubating
default String getWriteExpr(JdbcMapping jdbcMapping, Dialect dialect) {
return jdbcMapping.getJdbcType().wrapWriteExpression( getWriteExpr(), dialect );
}
}

View File

@ -142,6 +142,7 @@ public abstract class SimpleValue implements KeyValue {
this.generator = original.generator;
}
@Override
public MetadataBuildingContext getBuildingContext() {
return buildingContext;
}

View File

@ -11,9 +11,15 @@ import java.util.Iterator;
import java.util.List;
import org.hibernate.FetchMode;
import org.hibernate.Incubating;
import org.hibernate.MappingException;
import org.hibernate.boot.spi.MetadataBuildingContext;
import org.hibernate.engine.spi.Mapping;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.type.CompositeType;
import org.hibernate.type.EntityType;
import org.hibernate.type.MetaType;
import org.hibernate.type.Type;
/**
@ -72,6 +78,59 @@ public interface Value extends Serializable {
Type getType() throws MappingException;
@Incubating
default JdbcMapping getSelectableType(Mapping factory, int index) throws MappingException {
return getType( factory, getType(), index );
}
private JdbcMapping getType(Mapping factory, Type elementType, int index) {
if ( elementType instanceof CompositeType ) {
final Type[] subtypes = ( (CompositeType) elementType ).getSubtypes();
for ( int i = 0; i < subtypes.length; i++ ) {
final Type subtype = subtypes[i];
final int columnSpan;
if ( subtype instanceof EntityType ) {
final EntityType entityType = (EntityType) subtype;
final Type idType = getIdType( entityType );
columnSpan = idType.getColumnSpan( factory );
}
else {
columnSpan = subtype.getColumnSpan( factory );
}
if ( columnSpan < index ) {
index -= columnSpan;
}
else if ( columnSpan != 0 ) {
return getType( factory, subtype, index );
}
}
// Should never happen
throw new IllegalStateException( "Type index is past the types column span!" );
}
else if ( elementType instanceof EntityType ) {
final EntityType entityType = (EntityType) elementType;
final Type idType = getIdType( entityType );
return getType( factory, idType, index );
}
else if ( elementType instanceof MetaType ) {
return (JdbcMapping) ( (MetaType) elementType ).getBaseType();
}
return (JdbcMapping) elementType;
}
private Type getIdType(EntityType entityType) {
final PersistentClass entityBinding = getBuildingContext().getMetadataCollector()
.getEntityBinding( entityType.getAssociatedEntityName() );
final Type idType;
if ( entityType.isReferenceToPrimaryKey() ) {
idType = entityBinding.getIdentifier().getType();
}
else {
idType = entityBinding.getProperty( entityType.getRHSUniqueKeyPropertyName() ).getType();
}
return idType;
}
FetchMode getFetchMode();
Table getTable();
@ -105,6 +164,10 @@ public interface Value extends Serializable {
boolean[] getColumnUpdateability();
boolean hasAnyUpdatableColumns();
@Incubating
default MetadataBuildingContext getBuildingContext() {
throw new UnsupportedOperationException( "Value#getBuildingContext is not implemented by: " + getClass().getName() );
}
ServiceRegistry getServiceRegistry();
Value copy();

View File

@ -306,7 +306,7 @@ public abstract class AbstractEmbeddableMapping implements EmbeddableMappingType
selectablePath,
selectable.isFormula(),
selectable.getCustomReadExpression(),
selectable.getCustomWriteExpression(),
selectable.getWriteExpr( ( (BasicType<?>) subtype ).getJdbcMapping(), dialect ),
columnDefinition,
length,
precision,

View File

@ -250,6 +250,11 @@ public class BasicAttributeMapping
return customWriteExpression;
}
@Override
public String getWriteExpression() {
return customWriteExpression;
}
@Override
public String getColumnDefinition() {
return columnDefinition;

View File

@ -150,7 +150,6 @@ public class CaseStatementDiscriminatorMappingImpl extends AbstractDiscriminator
tableDiscriminatorDetails.getCheckColumnName(),
false,
null,
null,
getJdbcMapping()
),
true

View File

@ -420,7 +420,7 @@ public class EmbeddableMappingTypeImpl extends AbstractEmbeddableMapping impleme
selectablePath,
selectable.isFormula(),
selectable.getCustomReadExpression(),
selectable.getCustomWriteExpression(),
selectable.getWriteExpr( ( (BasicType<?>) subtype ).getJdbcMapping(), dialect ),
columnDefinition,
length,
precision,

View File

@ -16,6 +16,7 @@ import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.metamodel.mapping.SelectableMapping;
import org.hibernate.metamodel.mapping.SelectablePath;
import org.hibernate.query.sqm.function.SqmFunctionRegistry;
import org.hibernate.type.BasicType;
import org.hibernate.type.spi.TypeConfiguration;
/**
@ -57,7 +58,7 @@ public class SelectableMappingImpl extends SqlTypedMappingImpl implements Select
this.selectionExpression = selectionExpression.intern();
this.selectablePath = selectablePath == null ? new SelectablePath( selectionExpression ) : selectablePath;
this.customReadExpression = customReadExpression == null ? null : customReadExpression.intern();
this.customWriteExpression = customWriteExpression == null ? null : customWriteExpression.intern();
this.customWriteExpression = customWriteExpression == null || isFormula ? null : customWriteExpression.intern();
this.nullable = nullable;
this.insertable = insertable;
this.updateable = updateable;
@ -160,7 +161,7 @@ public class SelectableMappingImpl extends SqlTypedMappingImpl implements Select
? null
: parentPath.append( selectableName ),
selectable.getCustomReadExpression(),
selectable.getCustomWriteExpression(),
selectable.getWriteExpr( jdbcMapping, dialect ),
columnDefinition,
length,
precision,
@ -214,6 +215,11 @@ public class SelectableMappingImpl extends SqlTypedMappingImpl implements Select
return customWriteExpression;
}
@Override
public String getWriteExpression() {
return customWriteExpression;
}
@Override
public boolean isFormula() {
return isFormula;

View File

@ -67,7 +67,6 @@ public class ColumnReference implements OrderingExpression, SequencePart {
// because these ordering fragments are only ever part of the order-by clause, there
// is no need for the JdbcMapping
null,
null,
null
)
);

View File

@ -128,6 +128,7 @@ import org.hibernate.sql.model.jdbc.JdbcMutationOperation;
import org.hibernate.sql.results.graph.DomainResult;
import org.hibernate.sql.results.graph.internal.ImmutableFetchList;
import org.hibernate.sql.results.internal.SqlSelectionImpl;
import org.hibernate.type.AssociationType;
import org.hibernate.type.CollectionType;
import org.hibernate.type.CompositeType;
import org.hibernate.type.EntityType;
@ -397,7 +398,7 @@ public abstract class AbstractCollectionPersister
else {
Column col = (Column) selectable;
elementColumnNames[j] = col.getQuotedName( dialect );
elementColumnWriters[j] = col.getWriteExpr();
elementColumnWriters[j] = col.getWriteExpr( elementBootDescriptor.getSelectableType( factory, j ), dialect );
elementColumnReaders[j] = col.getReadExpr( dialect );
elementColumnReaderTemplates[j] = col.getTemplate(
dialect,

View File

@ -662,7 +662,7 @@ public abstract class AbstractEntityPersister
else {
final Column column = (Column) selectable;
colNames[k] = column.getQuotedName( dialect );
colWriters[k] = column.getWriteExpr();
colWriters[k] = column.getWriteExpr( prop.getValue().getSelectableType( factory, k ), dialect );
}
}
propertyColumnNames[i] = colNames;
@ -1326,7 +1326,6 @@ public abstract class AbstractEntityPersister
rootPkColumnName,
false,
null,
null,
selection.getJdbcMapping()
)
);
@ -1339,7 +1338,6 @@ public abstract class AbstractEntityPersister
fkColumnName,
false,
null,
null,
selection.getJdbcMapping()
)
);
@ -3062,7 +3060,6 @@ public abstract class AbstractEntityPersister
discriminatorExpression,
isDiscriminatorFormula(),
null,
null,
discriminatorType.getJdbcMapping()
)
);
@ -5301,7 +5298,7 @@ public abstract class AbstractEntityPersister
null,
false,
null,
null,
"?",
column.getSqlType(),
column.getLength(),
column.getPrecision(),
@ -5330,7 +5327,7 @@ public abstract class AbstractEntityPersister
attrColumnExpression = attrColumnNames[0];
isAttrColumnExpressionFormula = false;
customReadExpr = null;
customWriteExpr = null;
customWriteExpr = "?";
Column column = value.getColumns().get( 0 );
columnDefinition = column.getSqlType();
length = column.getLength();
@ -5356,7 +5353,7 @@ public abstract class AbstractEntityPersister
creationContext.getTypeConfiguration(),
creationContext.getFunctionRegistry()
);
customWriteExpr = selectable.getCustomWriteExpression();
customWriteExpr = selectable.getWriteExpr( (JdbcMapping) attrType, creationContext.getDialect() );
Column column = value.getColumns().get( 0 );
columnDefinition = column.getSqlType();
length = column.getLength();

View File

@ -268,7 +268,6 @@ public class CteInsertHandler implements InsertHandler {
rowNumberColumn.getColumnExpression(),
false,
null,
null,
rowNumberColumn.getJdbcMapping()
);
insertStatement.getTargetColumns().set(
@ -322,7 +321,6 @@ public class CteInsertHandler implements InsertHandler {
columnReference.getColumnExpression(),
false,
null,
null,
columnReference.getJdbcMapping()
)
)
@ -353,7 +351,6 @@ public class CteInsertHandler implements InsertHandler {
rowNumberColumn.getColumnExpression(),
false,
null,
null,
rowNumberColumn.getJdbcMapping()
);
insertStatement.getTargetColumns().add( columnReference );
@ -391,7 +388,6 @@ public class CteInsertHandler implements InsertHandler {
rowNumberColumn.getColumnExpression(),
false,
null,
null,
rowNumberColumn.getJdbcMapping()
);
final CteColumn idColumn = fullEntityCteTable.getCteColumns().get( 0 );
@ -498,7 +494,6 @@ public class CteInsertHandler implements InsertHandler {
rowNumberColumn.getColumnExpression(),
false,
null,
null,
rowNumberColumn.getJdbcMapping()
)
)
@ -515,7 +510,6 @@ public class CteInsertHandler implements InsertHandler {
idColumn.getColumnExpression(),
false,
null,
null,
idColumn.getJdbcMapping()
),
BinaryArithmeticOperator.ADD,
@ -527,7 +521,6 @@ public class CteInsertHandler implements InsertHandler {
rowNumberColumn.getColumnExpression(),
false,
null,
null,
rowNumberColumn.getJdbcMapping()
),
integerType
@ -560,7 +553,6 @@ public class CteInsertHandler implements InsertHandler {
cteColumn.getColumnExpression(),
false,
null,
null,
cteColumn.getJdbcMapping()
)
)
@ -822,7 +814,6 @@ public class CteInsertHandler implements InsertHandler {
rowNumberColumn.getColumnExpression(),
false,
null,
null,
rowNumberColumn.getJdbcMapping()
);
// Insert in the same order as the original tuples came
@ -843,7 +834,6 @@ public class CteInsertHandler implements InsertHandler {
keyColumns[j],
false,
null,
null,
null
)
);
@ -878,7 +868,6 @@ public class CteInsertHandler implements InsertHandler {
rowNumberColumn.getColumnExpression(),
false,
null,
null,
rowNumberColumn.getJdbcMapping()
)
)
@ -897,7 +886,6 @@ public class CteInsertHandler implements InsertHandler {
idCteColumn.getColumnExpression(),
false,
null,
null,
idCteColumn.getJdbcMapping()
)
)
@ -914,7 +902,6 @@ public class CteInsertHandler implements InsertHandler {
cteColumn.getColumnExpression(),
false,
null,
null,
cteColumn.getJdbcMapping()
)
)
@ -944,7 +931,6 @@ public class CteInsertHandler implements InsertHandler {
idCteColumn.getColumnExpression(),
false,
null,
null,
idCteColumn.getJdbcMapping()
);
finalResultQuery.getSelectClause().addSqlSelection(
@ -998,7 +984,6 @@ public class CteInsertHandler implements InsertHandler {
keyColumns[j],
false,
null,
null,
null
)
);
@ -1011,7 +996,6 @@ public class CteInsertHandler implements InsertHandler {
rootKeyColumns[j],
false,
null,
null,
null
)
)
@ -1046,7 +1030,6 @@ public class CteInsertHandler implements InsertHandler {
entry.getKey().get( j ).getColumnExpression(),
columnReference.isColumnExpressionFormula(),
null,
null,
columnReference.getJdbcMapping()
)
)

View File

@ -74,7 +74,6 @@ public class InPredicateRestrictionProducer implements MatchingIdRestrictionProd
// id columns cannot be formulas and cannot have custom read and write expressions
false,
null,
null,
basicIdMapping.getJdbcMapping()
);
predicate = new InListPredicate( inFixture );

View File

@ -452,7 +452,6 @@ public class InlineUpdateHandler implements UpdateHandler {
columnReference.getColumnExpression(),
false,
null,
null,
columnReference.getJdbcMapping()
);
columnNames.add( columnReference.getColumnExpression() );
@ -463,7 +462,6 @@ public class InlineUpdateHandler implements UpdateHandler {
selectableMapping.getSelectionExpression(),
false,
null,
null,
columnReference.getJdbcMapping()
)
);
@ -485,7 +483,6 @@ public class InlineUpdateHandler implements UpdateHandler {
columnReference.getColumnExpression(),
false,
null,
null,
columnReference.getJdbcMapping()
);
columnNames = Collections.singletonList( columnReference.getColumnExpression() );
@ -497,7 +494,6 @@ public class InlineUpdateHandler implements UpdateHandler {
( (BasicEntityIdentifierMapping) entityDescriptor.getIdentifierMapping() ).getSelectionExpression(),
false,
null,
null,
columnReference.getJdbcMapping()
)
);

View File

@ -80,7 +80,6 @@ public final class ExecuteWithTemporaryTableHelper {
// id columns cannot be formulas and cannot have custom read and write expressions
false,
null,
null,
column.getJdbcMapping()
)
);
@ -230,7 +229,6 @@ public final class ExecuteWithTemporaryTableHelper {
temporaryTableColumn.getColumnName(),
false,
null,
null,
temporaryTableColumn.getJdbcMapping()
)
)
@ -250,7 +248,6 @@ public final class ExecuteWithTemporaryTableHelper {
selectableMapping.getSelectionExpression(),
false,
null,
null,
selectableMapping.getJdbcMapping()
)
)
@ -274,7 +271,6 @@ public final class ExecuteWithTemporaryTableHelper {
idTable.getSessionUidColumn().getColumnName(),
false,
null,
null,
idTable.getSessionUidColumn().getJdbcMapping()
),
ComparisonOperator.EQUAL,

View File

@ -22,7 +22,6 @@ import org.hibernate.engine.FetchTiming;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.generator.EventType;
import org.hibernate.id.BulkInsertionCapableIdentifierGenerator;
import org.hibernate.id.OptimizableGenerator;
import org.hibernate.id.PostInsertIdentityPersister;
@ -348,7 +347,6 @@ public class InsertExecutionDelegate implements TableBasedInsertHandler.Executio
columnReference.getColumnExpression(),
false,
null,
null,
columnReference.getJdbcMapping()
)
)
@ -369,7 +367,6 @@ public class InsertExecutionDelegate implements TableBasedInsertHandler.Executio
TemporaryTable.ENTITY_TABLE_IDENTITY_COLUMN,
false,
null,
null,
identifierMapping.getJdbcMapping()
);
idSelectQuerySpec.getSelectClause()
@ -453,7 +450,6 @@ public class InsertExecutionDelegate implements TableBasedInsertHandler.Executio
sessionUidColumn.getColumnName(),
false,
null,
null,
sessionUidColumn.getJdbcMapping()
),
ComparisonOperator.EQUAL,
@ -470,7 +466,6 @@ public class InsertExecutionDelegate implements TableBasedInsertHandler.Executio
rowNumberColumn.getColumnName(),
false,
null,
null,
rowNumberColumn.getJdbcMapping()
),
ComparisonOperator.EQUAL,
@ -529,7 +524,6 @@ public class InsertExecutionDelegate implements TableBasedInsertHandler.Executio
keyColumns[0],
false,
null,
null,
identifierMapping.getJdbcMapping()
)
);
@ -542,7 +536,6 @@ public class InsertExecutionDelegate implements TableBasedInsertHandler.Executio
idColumnReference.getColumnExpression(),
false,
null,
null,
idColumnReference.getJdbcMapping()
)
)
@ -599,7 +592,6 @@ public class InsertExecutionDelegate implements TableBasedInsertHandler.Executio
TemporaryTable.ENTITY_TABLE_IDENTITY_COLUMN,
false,
null,
null,
identifierMapping.getJdbcMapping()
),
ComparisonOperator.EQUAL,
@ -706,7 +698,6 @@ public class InsertExecutionDelegate implements TableBasedInsertHandler.Executio
columnReference.getColumnExpression(),
false,
null,
null,
columnReference.getJdbcMapping()
)
)
@ -740,7 +731,6 @@ public class InsertExecutionDelegate implements TableBasedInsertHandler.Executio
targetKeyColumnName,
false,
null,
null,
identifierMapping.getJdbcMapping()
)
);

View File

@ -189,7 +189,6 @@ public class TableBasedInsertHandler implements InsertHandler {
rowNumberColumn.getColumnName(),
false,
null,
null,
rowNumberColumn.getJdbcMapping()
);
insertStatement.getTargetColumns().set(
@ -214,7 +213,6 @@ public class TableBasedInsertHandler implements InsertHandler {
rowNumberColumn.getColumnName(),
false,
null,
null,
rowNumberColumn.getJdbcMapping()
);
insertStatement.getTargetColumns().add( columnReference );
@ -237,7 +235,6 @@ public class TableBasedInsertHandler implements InsertHandler {
sessionUidColumn.getColumnName(),
false,
null,
null,
sessionUidColumn.getJdbcMapping()
);
insertStatement.getTargetColumns().add( sessionUidColumnReference );
@ -267,7 +264,6 @@ public class TableBasedInsertHandler implements InsertHandler {
rowNumberColumn.getColumnName(),
false,
null,
null,
rowNumberColumn.getJdbcMapping()
);
insertStatement.getTargetColumns().add( columnReference );
@ -286,7 +282,6 @@ public class TableBasedInsertHandler implements InsertHandler {
sessionUidColumn.getColumnName(),
false,
null,
null,
sessionUidColumn.getJdbcMapping()
);
insertStatement.getTargetColumns().add( sessionUidColumnReference );

View File

@ -96,6 +96,8 @@ import org.hibernate.metamodel.model.domain.EntityDomainType;
import org.hibernate.metamodel.model.domain.PluralPersistentAttribute;
import org.hibernate.metamodel.model.domain.internal.AnyDiscriminatorSqmPath;
import org.hibernate.metamodel.model.domain.internal.AnyDiscriminatorSqmPathSource;
import org.hibernate.query.derived.AnonymousTupleTableGroupProducer;
import org.hibernate.query.derived.AnonymousTupleType;
import org.hibernate.metamodel.model.domain.internal.BasicSqmPathSource;
import org.hibernate.metamodel.model.domain.internal.CompositeSqmPathSource;
import org.hibernate.metamodel.model.domain.internal.DiscriminatorSqmPath;
@ -113,8 +115,6 @@ import org.hibernate.query.criteria.JpaCteCriteriaAttribute;
import org.hibernate.query.criteria.JpaPath;
import org.hibernate.query.criteria.JpaSearchOrder;
import org.hibernate.query.derived.AnonymousTupleEntityValuedModelPart;
import org.hibernate.query.derived.AnonymousTupleTableGroupProducer;
import org.hibernate.query.derived.AnonymousTupleType;
import org.hibernate.query.spi.QueryEngine;
import org.hibernate.query.spi.QueryOptions;
import org.hibernate.query.spi.QueryParameterBinding;
@ -4611,7 +4611,6 @@ public abstract class BaseSqmToSqlAstConverter<T extends Statement> extends Base
columnNames.get( i ),
false,
null,
null,
subQueryColumns.get( i ).getJdbcMapping()
)
);
@ -4654,7 +4653,6 @@ public abstract class BaseSqmToSqlAstConverter<T extends Statement> extends Base
columnNames.get( 0 ),
false,
null,
null,
expression.getExpressionType().getSingleJdbcMapping()
)
);
@ -4757,7 +4755,6 @@ public abstract class BaseSqmToSqlAstConverter<T extends Statement> extends Base
tableReference.getColumnNames().get( 0 ),
false,
null,
null,
sqlSelections.get( 0 ).getExpressionType().getSingleJdbcMapping()
)
),
@ -4774,7 +4771,6 @@ public abstract class BaseSqmToSqlAstConverter<T extends Statement> extends Base
tableReference.getColumnNames().get( selectionIndex ),
false,
null,
null,
selectionMapping.getJdbcMapping()
)
)

View File

@ -147,12 +147,23 @@ public class SqlAstQueryPartProcessingStateImpl
final SelectClause selectClause = ( (QuerySpec) queryPart ).getSelectClause();
final int valuesArrayPosition = selectClause.getSqlSelections().size();
final SqlSelection sqlSelection = expression.createSqlSelection(
final SqlSelection sqlSelection;
if ( isTopLevel() ) {
sqlSelection = expression.createDomainResultSqlSelection(
valuesArrayPosition + 1,
valuesArrayPosition,
javaType,
typeConfiguration
);
}
else {
sqlSelection = expression.createSqlSelection(
valuesArrayPosition + 1,
valuesArrayPosition,
javaType,
typeConfiguration
);
}
selectClause.addSqlSelection( sqlSelection );

View File

@ -1977,7 +1977,6 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
currentCteStatement.getCycleMarkColumn().getColumnExpression(),
false,
null,
null,
currentCteStatement.getCycleMarkColumn().getJdbcMapping()
);
if ( currentCteStatement.getCycleValue().getJdbcMapping() == getBooleanType()
@ -2020,7 +2019,6 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
depthColumnName,
false,
null,
null,
integerType
);
visitColumnReference( depthColumnReference );
@ -2052,7 +2050,6 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
currentCteStatement.getSearchColumn().getColumnExpression(),
false,
null,
null,
currentCteStatement.getSearchColumn().getJdbcMapping()
)
);
@ -2164,7 +2161,6 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
depthColumnName,
false,
null,
null,
integerType
);
visitColumnReference( depthColumnReference );
@ -2208,7 +2204,6 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
currentCteStatement.getSearchColumn().getColumnExpression(),
false,
null,
null,
currentCteStatement.getSearchColumn().getJdbcMapping()
)
);
@ -2353,7 +2348,6 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
cyclePathColumnName,
false,
null,
null,
stringType
);
@ -2500,7 +2494,6 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
cyclePathColumnName,
false,
null,
null,
stringType
);
arguments.add( new QueryLiteral<>( "%", stringType ) );
@ -2919,7 +2912,6 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
"c" + i,
false,
null,
null,
getIntegerType()
)
)
@ -3144,7 +3136,6 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
"c" + index,
false,
null,
null,
expression.getExpressionType().getSingleJdbcMapping()
);
}
@ -5608,7 +5599,6 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
columnName,
false,
null,
null,
null
)
);
@ -5678,7 +5668,6 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
columnName,
false,
null,
null,
null
)
);
@ -5803,7 +5792,6 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
"sort_col_" + i,
false,
null,
null,
null
);
sortExpression = sortSpecification.getSortExpression();
@ -6193,7 +6181,11 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
break;
case DEFAULT:
default:
appendSql( PARAM_MARKER );
jdbcParameter.getExpressionType()
.getJdbcMappings()
.get( 0 )
.getJdbcType()
.appendWriteExpression( "?", this, getDialect() );
parameterBinders.add( jdbcParameter.getParameterBinder() );
jdbcParameters.addParameter( jdbcParameter );

View File

@ -18,4 +18,8 @@ public interface SqlAstProcessingState {
SqlExpressionResolver getSqlExpressionResolver();
SqlAstCreationState getSqlAstCreationState();
default boolean isTopLevel() {//todo: naming
return getParentState() == null;
}
}

View File

@ -37,7 +37,6 @@ public class ColumnReference implements Expression, Assignable {
private final SelectablePath selectablePath;
private final boolean isFormula;
private final String readExpression;
private final String customWriteExpression;
private final JdbcMapping jdbcMapping;
public ColumnReference(TableReference tableReference, SelectableMapping selectableMapping) {
@ -47,7 +46,6 @@ public class ColumnReference implements Expression, Assignable {
selectableMapping.getSelectablePath(),
selectableMapping.isFormula(),
selectableMapping.getCustomReadExpression(),
selectableMapping.getCustomWriteExpression(),
selectableMapping.getJdbcMapping()
);
}
@ -59,7 +57,6 @@ public class ColumnReference implements Expression, Assignable {
null,
false,
null,
null,
jdbcMapping
);
}
@ -71,7 +68,6 @@ public class ColumnReference implements Expression, Assignable {
selectableMapping.getSelectablePath(),
selectableMapping.isFormula(),
selectableMapping.getCustomReadExpression(),
selectableMapping.getCustomWriteExpression(),
selectableMapping.getJdbcMapping()
);
}
@ -83,7 +79,6 @@ public class ColumnReference implements Expression, Assignable {
selectableMapping.getSelectablePath(),
selectableMapping.isFormula(),
selectableMapping.getCustomReadExpression(),
selectableMapping.getCustomWriteExpression(),
jdbcMapping
);
}
@ -93,7 +88,6 @@ public class ColumnReference implements Expression, Assignable {
String columnExpression,
boolean isFormula,
String customReadExpression,
String customWriteExpression,
JdbcMapping jdbcMapping) {
this(
tableReference.getIdentificationVariable(),
@ -101,7 +95,6 @@ public class ColumnReference implements Expression, Assignable {
null,
isFormula,
customReadExpression,
customWriteExpression,
jdbcMapping
);
}
@ -111,9 +104,8 @@ public class ColumnReference implements Expression, Assignable {
String columnExpression,
boolean isFormula,
String customReadExpression,
String customWriteExpression,
JdbcMapping jdbcMapping) {
this( qualifier, columnExpression, null, isFormula, customReadExpression, customWriteExpression, jdbcMapping );
this( qualifier, columnExpression, null, isFormula, customReadExpression, jdbcMapping );
}
public ColumnReference(
@ -122,7 +114,6 @@ public class ColumnReference implements Expression, Assignable {
SelectablePath selectablePath,
boolean isFormula,
String customReadExpression,
String customWriteExpression,
JdbcMapping jdbcMapping) {
this.qualifier = StringHelper.nullIfEmpty( qualifier );
@ -142,15 +133,6 @@ public class ColumnReference implements Expression, Assignable {
this.isFormula = isFormula;
this.readExpression = customReadExpression;
//TODO: writeExpression is never used, can it be removed?
if ( !isFormula && customWriteExpression != null ) {
this.customWriteExpression = customWriteExpression;
}
else {
this.customWriteExpression = null;
}
this.jdbcMapping = jdbcMapping;
}
@ -175,10 +157,6 @@ public class ColumnReference implements Expression, Assignable {
return selectablePath;
}
public String getCustomWriteExpression() {
return customWriteExpression;
}
public boolean isColumnExpressionFormula() {
return isFormula;
}

View File

@ -42,4 +42,23 @@ public interface Expression extends SqlAstNode, SqlSelectionProducer {
this
);
}
default SqlSelection createDomainResultSqlSelection(
int jdbcPosition,
int valuesArrayPosition,
JavaType javaType,
TypeConfiguration typeConfiguration) {
// Apply possible jdbc type wrapping
final Expression expression;
final JdbcMappingContainer expressionType = getExpressionType();
if ( expressionType == null ) {
expression = this;
}
else {
expression = expressionType.getJdbcMappings().get( 0 ).getJdbcType().wrapTopLevelSelectionExpression( this );
}
return expression == this
? createSqlSelection( jdbcPosition, valuesArrayPosition, javaType, typeConfiguration )
: new SqlSelectionImpl( jdbcPosition, valuesArrayPosition, expression );
}
}

View File

@ -15,11 +15,13 @@ import org.hibernate.Incubating;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.jdbc.Size;
import org.hibernate.query.sqm.CastType;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.sql.ast.spi.StringBuilderSqlAppender;
import org.hibernate.sql.ast.tree.expression.Expression;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.ValueBinder;
import org.hibernate.type.descriptor.ValueExtractor;
import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.java.BasicJavaType;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.sql.spi.DdlTypeRegistry;
import org.hibernate.type.spi.TypeConfiguration;
@ -160,6 +162,35 @@ public interface JdbcType extends Serializable {
return null;
}
/**
* Wraps the top level selection expression to be able to read values with this JdbcType's ValueExtractor.
* @since 6.2
*/
@Incubating
default Expression wrapTopLevelSelectionExpression(Expression expression) {
return expression;
}
/**
* Wraps the write expression to be able to write values with this JdbcType's ValueBinder.
* @since 6.2
*/
@Incubating
default String wrapWriteExpression(String writeExpression, Dialect dialect) {
final StringBuilder sb = new StringBuilder( writeExpression.length() );
appendWriteExpression( writeExpression, new StringBuilderSqlAppender( sb ), dialect );
return sb.toString();
}
/**
* Append the write expression wrapped in a way to be able to write values with this JdbcType's ValueBinder.
* @since 6.2
*/
@Incubating
default void appendWriteExpression(String writeExpression, SqlAppender appender, Dialect dialect) {
appender.append( writeExpression );
}
default boolean isInteger() {
int typeCode = getDdlTypeCode();
return SqlTypes.isIntegral(typeCode)

View File

@ -0,0 +1,170 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.type.descriptor.jdbc;
import java.sql.CallableStatement;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.metamodel.spi.RuntimeModelCreationContext;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.descriptor.ValueBinder;
import org.hibernate.type.descriptor.ValueExtractor;
import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.sql.spi.DdlTypeRegistry;
/**
* Specialized type mapping for {@code JSON} and the JSON SQL data type.
*
* @author Christian Beikov
*/
public class JsonAsStringJdbcType extends JsonJdbcType implements AdjustableJdbcType {
/**
* Singleton access
*/
public static final JsonAsStringJdbcType VARCHAR_INSTANCE = new JsonAsStringJdbcType( SqlTypes.LONG32VARCHAR, null );
public static final JsonAsStringJdbcType NVARCHAR_INSTANCE = new JsonAsStringJdbcType( SqlTypes.LONG32NVARCHAR, null );
public static final JsonAsStringJdbcType CLOB_INSTANCE = new JsonAsStringJdbcType( SqlTypes.CLOB, null );
public static final JsonAsStringJdbcType NCLOB_INSTANCE = new JsonAsStringJdbcType( SqlTypes.NCLOB, null );
private final boolean nationalized;
private final int ddlTypeCode;
protected JsonAsStringJdbcType(int ddlTypeCode, EmbeddableMappingType embeddableMappingType) {
super( embeddableMappingType );
this.ddlTypeCode = ddlTypeCode;
this.nationalized = ddlTypeCode == SqlTypes.LONG32NVARCHAR
|| ddlTypeCode == SqlTypes.NCLOB;
}
@Override
public int getJdbcTypeCode() {
return nationalized ? SqlTypes.NVARCHAR : SqlTypes.VARCHAR;
}
@Override
public int getDdlTypeCode() {
return ddlTypeCode;
}
@Override
public String toString() {
return "JsonAsStringJdbcType";
}
@Override
public JdbcType resolveIndicatedType(JdbcTypeIndicators indicators, JavaType<?> domainJtd) {
// Depending on the size of the column, we might have to adjust the jdbc type code for DDL.
// In some DBMS we can compare LOBs with special functions which is handled in the SqlAstTranslators,
// but that requires the correct jdbc type code to be available, which we ensure this way
if ( getEmbeddableMappingType() == null ) {
if ( needsLob( indicators ) ) {
return indicators.isNationalized() ? NCLOB_INSTANCE : CLOB_INSTANCE;
}
else {
return indicators.isNationalized() ? NVARCHAR_INSTANCE : VARCHAR_INSTANCE;
}
}
else {
if ( needsLob( indicators ) ) {
return new JsonAsStringJdbcType(
indicators.isNationalized() ? SqlTypes.NCLOB : SqlTypes.CLOB,
getEmbeddableMappingType()
);
}
else {
return new JsonAsStringJdbcType(
indicators.isNationalized() ? SqlTypes.LONG32NVARCHAR : SqlTypes.LONG32VARCHAR,
getEmbeddableMappingType()
);
}
}
}
protected boolean needsLob(JdbcTypeIndicators indicators) {
final Dialect dialect = indicators.getTypeConfiguration()
.getServiceRegistry()
.getService( JdbcServices.class )
.getDialect();
final long length = indicators.getColumnLength();
final long maxLength = indicators.isNationalized() ?
dialect.getMaxNVarcharLength() :
dialect.getMaxVarcharLength();
if ( length > maxLength ) {
return true;
}
final DdlTypeRegistry ddlTypeRegistry = indicators.getTypeConfiguration().getDdlTypeRegistry();
final String typeName = ddlTypeRegistry.getTypeName( getDdlTypeCode(), dialect );
return typeName.equals( ddlTypeRegistry.getTypeName( SqlTypes.CLOB, dialect ) )
|| typeName.equals( ddlTypeRegistry.getTypeName( SqlTypes.NCLOB, dialect ) );
}
@Override
public AggregateJdbcType resolveAggregateJdbcType(
EmbeddableMappingType mappingType,
String sqlType,
RuntimeModelCreationContext creationContext) {
return new JsonAsStringJdbcType( ddlTypeCode, mappingType );
}
@Override
public <X> ValueBinder<X> getBinder(JavaType<X> javaType) {
if ( nationalized ) {
return new BasicBinder<>( javaType, this ) {
@Override
protected void doBind(PreparedStatement st, X value, int index, WrapperOptions options)
throws SQLException {
final String json = ( (JsonAsStringJdbcType) getJdbcType() ).toString( value, getJavaType(), options );
st.setNString( index, json );
}
@Override
protected void doBind(CallableStatement st, X value, String name, WrapperOptions options)
throws SQLException {
final String json = ( (JsonAsStringJdbcType) getJdbcType() ).toString( value, getJavaType(), options );
st.setNString( name, json );
}
};
}
else {
return super.getBinder( javaType );
}
}
@Override
public <X> ValueExtractor<X> getExtractor(JavaType<X> javaType) {
if ( nationalized ) {
return new BasicExtractor<>( javaType, this ) {
@Override
protected X doExtract(ResultSet rs, int paramIndex, WrapperOptions options) throws SQLException {
return fromString( rs.getNString( paramIndex ), getJavaType(), options );
}
@Override
protected X doExtract(CallableStatement statement, int index, WrapperOptions options)
throws SQLException {
return fromString( statement.getNString( index ), getJavaType(), options );
}
@Override
protected X doExtract(CallableStatement statement, String name, WrapperOptions options)
throws SQLException {
return fromString( statement.getNString( name ), getJavaType(), options );
}
};
}
else {
return super.getExtractor( javaType );
}
}
}

View File

@ -11,6 +11,8 @@ import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.metamodel.mapping.EmbeddableMappingType;
import org.hibernate.metamodel.spi.RuntimeModelCreationContext;
import org.hibernate.type.SqlTypes;
@ -18,20 +20,30 @@ import org.hibernate.type.descriptor.ValueBinder;
import org.hibernate.type.descriptor.ValueExtractor;
import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.sql.spi.DdlTypeRegistry;
/**
* Specialized type mapping for {@code SQLXML} and the XML SQL data type.
*
* @author Christian Beikov
*/
public class XmlAsStringJdbcType extends XmlJdbcType {
public class XmlAsStringJdbcType extends XmlJdbcType implements AdjustableJdbcType {
/**
* Singleton access
*/
public static final XmlAsStringJdbcType INSTANCE = new XmlAsStringJdbcType( null );
public static final XmlAsStringJdbcType VARCHAR_INSTANCE = new XmlAsStringJdbcType( SqlTypes.LONG32VARCHAR, null );
public static final XmlAsStringJdbcType NVARCHAR_INSTANCE = new XmlAsStringJdbcType( SqlTypes.LONG32NVARCHAR, null );
public static final XmlAsStringJdbcType CLOB_INSTANCE = new XmlAsStringJdbcType( SqlTypes.CLOB, null );
public static final XmlAsStringJdbcType NCLOB_INSTANCE = new XmlAsStringJdbcType( SqlTypes.NCLOB, null );
private XmlAsStringJdbcType(EmbeddableMappingType embeddableMappingType) {
private final boolean nationalized;
private final int ddlTypeCode;
public XmlAsStringJdbcType(int ddlTypeCode, EmbeddableMappingType embeddableMappingType) {
super( embeddableMappingType );
this.ddlTypeCode = ddlTypeCode;
this.nationalized = ddlTypeCode == SqlTypes.LONG32NVARCHAR
|| ddlTypeCode == SqlTypes.NCLOB;
}
@Override
@ -39,12 +51,12 @@ public class XmlAsStringJdbcType extends XmlJdbcType {
EmbeddableMappingType mappingType,
String sqlType,
RuntimeModelCreationContext creationContext) {
return new XmlAsStringJdbcType( mappingType );
return new XmlAsStringJdbcType( ddlTypeCode, mappingType );
}
@Override
public int getJdbcTypeCode() {
return SqlTypes.VARCHAR;
return nationalized ? SqlTypes.NVARCHAR : SqlTypes.VARCHAR;
}
@Override
@ -52,13 +64,92 @@ public class XmlAsStringJdbcType extends XmlJdbcType {
return SqlTypes.SQLXML;
}
@Override
public int getDdlTypeCode() {
return ddlTypeCode;
}
@Override
public String toString() {
return "XmlAsStringJdbcType";
}
@Override
public JdbcType resolveIndicatedType(JdbcTypeIndicators indicators, JavaType<?> domainJtd) {
// Depending on the size of the column, we might have to adjust the jdbc type code for DDL.
// In some DBMS we can compare LOBs with special functions which is handled in the SqlAstTranslators,
// but that requires the correct jdbc type code to be available, which we ensure this way
if ( getEmbeddableMappingType() == null ) {
if ( needsLob( indicators ) ) {
return indicators.isNationalized() ? NCLOB_INSTANCE : CLOB_INSTANCE;
}
else {
return indicators.isNationalized() ? NVARCHAR_INSTANCE : VARCHAR_INSTANCE;
}
}
else {
if ( needsLob( indicators ) ) {
return new XmlAsStringJdbcType(
indicators.isNationalized() ? SqlTypes.NCLOB : SqlTypes.CLOB,
getEmbeddableMappingType()
);
}
else {
return new XmlAsStringJdbcType(
indicators.isNationalized() ? SqlTypes.LONG32NVARCHAR : SqlTypes.LONG32VARCHAR,
getEmbeddableMappingType()
);
}
}
}
protected boolean needsLob(JdbcTypeIndicators indicators) {
final Dialect dialect = indicators.getTypeConfiguration()
.getServiceRegistry()
.getService( JdbcServices.class )
.getDialect();
final long length = indicators.getColumnLength();
final long maxLength = indicators.isNationalized() ?
dialect.getMaxNVarcharLength() :
dialect.getMaxVarcharLength();
if ( length > maxLength ) {
return true;
}
final DdlTypeRegistry ddlTypeRegistry = indicators.getTypeConfiguration().getDdlTypeRegistry();
final String typeName = ddlTypeRegistry.getTypeName( getDdlTypeCode(), dialect );
return typeName.equals( ddlTypeRegistry.getTypeName( SqlTypes.CLOB, dialect ) )
|| typeName.equals( ddlTypeRegistry.getTypeName( SqlTypes.NCLOB, dialect ) );
}
@Override
public <X> ValueBinder<X> getBinder(JavaType<X> javaType) {
if ( nationalized ) {
return new BasicBinder<>( javaType, this ) {
@Override
protected void doBind(PreparedStatement st, X value, int index, WrapperOptions options)
throws SQLException {
final String xml = ( (XmlAsStringJdbcType) getJdbcType() ).toString(
value,
getJavaType(),
options
);
st.setNString( index, xml );
}
@Override
protected void doBind(CallableStatement st, X value, String name, WrapperOptions options)
throws SQLException {
final String xml = ( (XmlAsStringJdbcType) getJdbcType() ).toString(
value,
getJavaType(),
options
);
st.setNString( name, xml );
}
};
}
else {
return new BasicBinder<>( javaType, this ) {
@Override
protected void doBind(PreparedStatement st, X value, int index, WrapperOptions options)
@ -83,9 +174,43 @@ public class XmlAsStringJdbcType extends XmlJdbcType {
}
};
}
}
@Override
public <X> ValueExtractor<X> getExtractor(JavaType<X> javaType) {
if ( nationalized ) {
return new BasicExtractor<>( javaType, this ) {
@Override
protected X doExtract(ResultSet rs, int paramIndex, WrapperOptions options) throws SQLException {
return getObject( rs.getNString( paramIndex ), options );
}
@Override
protected X doExtract(CallableStatement statement, int index, WrapperOptions options)
throws SQLException {
return getObject( statement.getNString( index ), options );
}
@Override
protected X doExtract(CallableStatement statement, String name, WrapperOptions options)
throws SQLException {
return getObject( statement.getNString( name ), options );
}
private X getObject(String xml, WrapperOptions options) throws SQLException {
if ( xml == null ) {
return null;
}
return ( (XmlAsStringJdbcType) getJdbcType() ).fromString(
xml,
getJavaType(),
options
);
}
};
}
else {
return new BasicExtractor<>( javaType, this ) {
@Override
@ -117,3 +242,4 @@ public class XmlAsStringJdbcType extends XmlJdbcType {
};
}
}
}

View File

@ -9,11 +9,14 @@ package org.hibernate.spatial.dialect.cockroachdb;
import org.hibernate.boot.model.FunctionContributions;
import org.hibernate.boot.model.TypeContributions;
import org.hibernate.dialect.PostgreSQLPGObjectJdbcType;
import org.hibernate.query.sqm.function.SqmFunctionRegistry;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.spatial.FunctionKey;
import org.hibernate.spatial.HSMessageLogger;
import org.hibernate.spatial.contributor.ContributorImplementor;
import org.hibernate.spatial.dialect.postgis.PGCastingGeographyJdbcType;
import org.hibernate.spatial.dialect.postgis.PGCastingGeometryJdbcType;
import org.hibernate.spatial.dialect.postgis.PGGeographyJdbcType;
import org.hibernate.spatial.dialect.postgis.PGGeometryJdbcType;
import org.hibernate.spatial.dialect.postgis.PostgisSqmFunctionDescriptors;
@ -29,9 +32,15 @@ public class CockroachDbContributor implements ContributorImplementor {
@Override
public void contributeJdbcTypes(TypeContributions typeContributions) {
HSMessageLogger.SPATIAL_MSG_LOGGER.typeContributions( this.getClass().getCanonicalName() );
if ( PostgreSQLPGObjectJdbcType.isUsable() ) {
typeContributions.contributeJdbcType( PGGeometryJdbcType.INSTANCE_WKB_2 );
typeContributions.contributeJdbcType( PGGeographyJdbcType.INSTANCE_WKB_2 );
}
else {
typeContributions.contributeJdbcType( PGCastingGeometryJdbcType.INSTANCE_WKB_2 );
typeContributions.contributeJdbcType( PGCastingGeographyJdbcType.INSTANCE_WKB_2 );
}
}
@Override
public void contributeFunctions(FunctionContributions functionContributions) {

View File

@ -0,0 +1,161 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.spatial.dialect.postgis;
import java.sql.CallableStatement;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Types;
import org.hibernate.dialect.Dialect;
import org.hibernate.spatial.GeometryLiteralFormatter;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.type.descriptor.ValueBinder;
import org.hibernate.type.descriptor.ValueExtractor;
import org.hibernate.type.descriptor.WrapperOptions;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.jdbc.BasicBinder;
import org.hibernate.type.descriptor.jdbc.BasicExtractor;
import org.hibernate.type.descriptor.jdbc.JdbcLiteralFormatter;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.geolatte.geom.ByteBuffer;
import org.geolatte.geom.Geometry;
import org.geolatte.geom.codec.Wkb;
import org.geolatte.geom.codec.WkbDecoder;
import org.geolatte.geom.codec.Wkt;
import org.geolatte.geom.codec.WktDecoder;
import org.geolatte.geom.codec.WktEncoder;
/**
* Type Descriptor for the Postgis Geometry type
*
* @author Karel Maesen, Geovise BVBA
*/
public abstract class AbstractCastingPostGISJdbcType implements JdbcType {
private final Wkb.Dialect wkbDialect;
AbstractCastingPostGISJdbcType(Wkb.Dialect dialect) {
wkbDialect = dialect;
}
@Override
public <T> JdbcLiteralFormatter<T> getJdbcLiteralFormatter(JavaType<T> javaType) {
return new PGGeometryLiteralFormatter<>( getConstructorFunction(), javaType );
}
@Override
public abstract int getDefaultSqlTypeCode();
protected abstract String getConstructorFunction();
@Override
public void appendWriteExpression(
String writeExpression,
SqlAppender appender,
Dialect dialect) {
appender.append( getConstructorFunction() );
appender.append( '(' );
appender.append( writeExpression );
appender.append( ')' );
}
public Geometry<?> toGeometry(String wkt) {
if ( wkt == null ) {
return null;
}
if ( wkt.startsWith( "00" ) || wkt.startsWith( "01" ) ) {
//we have a WKB because this wkt starts with the bit-order byte
ByteBuffer buffer = ByteBuffer.from( wkt );
final WkbDecoder decoder = Wkb.newDecoder( wkbDialect );
return decoder.decode( buffer );
}
else {
return parseWkt( wkt );
}
}
private static Geometry<?> parseWkt(String pgValue) {
final WktDecoder decoder = Wkt.newDecoder( Wkt.Dialect.POSTGIS_EWKT_1 );
return decoder.decode( pgValue );
}
@Override
public int getJdbcTypeCode() {
return Types.VARCHAR;
}
@Override
public <X> ValueBinder<X> getBinder(final JavaType<X> javaType) {
return new BasicBinder<X>( javaType, this ) {
@Override
protected void doBind(PreparedStatement st, X value, int index, WrapperOptions options)
throws SQLException {
st.setString( index, toWkt( value, options ) );
}
@Override
protected void doBind(CallableStatement st, X value, String name, WrapperOptions options)
throws SQLException {
st.setString( name, toWkt( value, options ) );
}
private String toWkt(X value, WrapperOptions options) throws SQLException {
final WktEncoder encoder = Wkt.newEncoder( Wkt.Dialect.POSTGIS_EWKT_1 );
final Geometry<?> geometry = getJavaType().unwrap( value, Geometry.class, options );
return encoder.encode( geometry );
}
};
}
@Override
public <X> ValueExtractor<X> getExtractor(final JavaType<X> javaType) {
return new BasicExtractor<X>( javaType, this ) {
@Override
protected X doExtract(ResultSet rs, int paramIndex, WrapperOptions options) throws SQLException {
return getJavaType().wrap( toGeometry( rs.getString( paramIndex ) ), options );
}
@Override
protected X doExtract(CallableStatement statement, int index, WrapperOptions options) throws SQLException {
return getJavaType().wrap( toGeometry( statement.getString( index ) ), options );
}
@Override
protected X doExtract(CallableStatement statement, String name, WrapperOptions options)
throws SQLException {
return getJavaType().wrap( toGeometry( statement.getString( name ) ), options );
}
};
}
static class PGGeometryLiteralFormatter<T> extends GeometryLiteralFormatter<T> {
private final String constructorFunction;
public PGGeometryLiteralFormatter(String constructorFunction, JavaType<T> javaType) {
super( javaType, Wkt.Dialect.POSTGIS_EWKT_1, "" );
this.constructorFunction = constructorFunction;
}
@Override
public void appendJdbcLiteral(SqlAppender appender, T value, Dialect dialect, WrapperOptions wrapperOptions) {
Geometry<?> geom = javaType.unwrap( value, Geometry.class, wrapperOptions );
appender.append( constructorFunction );
appender.appendSql( "('" );
appender.appendSql( Wkt.toWkt( geom, wktDialect ) );
appender.appendSql( "')" );
}
}
}

View File

@ -0,0 +1,38 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.spatial.dialect.postgis;
import org.hibernate.type.SqlTypes;
import org.geolatte.geom.codec.Wkb;
/**
* Type Descriptor for the Postgis Geography type
*
* @author Karel Maesen, Geovise BVBA
*/
public class PGCastingGeographyJdbcType extends AbstractCastingPostGISJdbcType {
// Type descriptor instance using EWKB v2 (postgis versions >= 2.2.2, see: https://trac.osgeo.org/postgis/ticket/3181)
public static final PGCastingGeographyJdbcType INSTANCE_WKB_2 = new PGCastingGeographyJdbcType( Wkb.Dialect.POSTGIS_EWKB_2 );
private PGCastingGeographyJdbcType(Wkb.Dialect dialect) {
super( dialect );
}
@Override
public int getDefaultSqlTypeCode() {
return SqlTypes.GEOGRAPHY;
}
@Override
protected String getConstructorFunction() {
return "st_geogfromtext";
}
}

View File

@ -0,0 +1,38 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.spatial.dialect.postgis;
import org.hibernate.type.SqlTypes;
import org.geolatte.geom.codec.Wkb;
/**
* Type Descriptor for the Postgis Geometry type
*
* @author Karel Maesen, Geovise BVBA
*/
public class PGCastingGeometryJdbcType extends AbstractCastingPostGISJdbcType {
// Type descriptor instance using EWKB v2 (postgis versions >= 2.2.2, see: https://trac.osgeo.org/postgis/ticket/3181)
public static final PGCastingGeometryJdbcType INSTANCE_WKB_2 = new PGCastingGeometryJdbcType( Wkb.Dialect.POSTGIS_EWKB_2 );
private PGCastingGeometryJdbcType(Wkb.Dialect dialect) {
super( dialect );
}
@Override
public int getDefaultSqlTypeCode() {
return SqlTypes.GEOMETRY;
}
@Override
protected String getConstructorFunction() {
return "st_geomfromewkt";
}
}

View File

@ -9,6 +9,7 @@ package org.hibernate.spatial.dialect.postgis;
import org.hibernate.boot.model.FunctionContributions;
import org.hibernate.boot.model.TypeContributions;
import org.hibernate.dialect.PostgreSQLPGObjectJdbcType;
import org.hibernate.query.sqm.function.SqmFunctionRegistry;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.spatial.HSMessageLogger;
@ -25,9 +26,15 @@ public class PostgisDialectContributor implements ContributorImplementor {
@Override
public void contributeJdbcTypes(TypeContributions typeContributions) {
HSMessageLogger.SPATIAL_MSG_LOGGER.typeContributions( this.getClass().getCanonicalName() );
if ( PostgreSQLPGObjectJdbcType.isUsable() ) {
typeContributions.contributeJdbcType( PGGeometryJdbcType.INSTANCE_WKB_2 );
typeContributions.contributeJdbcType( PGGeographyJdbcType.INSTANCE_WKB_2 );
}
else {
typeContributions.contributeJdbcType( PGCastingGeometryJdbcType.INSTANCE_WKB_2 );
typeContributions.contributeJdbcType( PGCastingGeographyJdbcType.INSTANCE_WKB_2 );
}
}
@Override
public void contributeFunctions(FunctionContributions functionContributions) {