HHH-14744 : Refactor contextual information for SchemaManagementTool to be more easily extended by Hibernate Reactive

HHH-14744 : Restore databases/pgsql/resources/hibernate.properties and gradle/databases.gradle
This commit is contained in:
Gail Badner 2021-06-11 12:09:35 -07:00 committed by Sanne Grinovero
parent a083481c5b
commit 4bb3de09a1
16 changed files with 1469 additions and 1046 deletions

View File

@ -22,4 +22,5 @@ hibernate.cache.region_prefix hibernate.test
hibernate.cache.region.factory_class org.hibernate.testing.cache.CachingRegionFactory
hibernate.service.allow_crawling=false
hibernate.session.events.log=true
hibernate.session.events.log=true

View File

@ -6,10 +6,19 @@
*/
package org.hibernate.dialect;
import java.sql.DatabaseMetaData;
import java.sql.SQLException;
import java.util.List;
import org.hibernate.dialect.identity.IdentityColumnSupport;
import org.hibernate.dialect.identity.PostgreSQL10IdentityColumnSupport;
import org.hibernate.engine.jdbc.env.spi.AnsiSqlKeywords;
import org.hibernate.engine.jdbc.env.spi.IdentifierCaseStrategy;
import org.hibernate.engine.jdbc.env.spi.IdentifierHelper;
import org.hibernate.engine.jdbc.env.spi.IdentifierHelperBuilder;
import org.hibernate.engine.jdbc.env.spi.NameQualifierSupport;
import org.hibernate.tool.schema.extract.internal.SequenceInformationExtractorPostgresSQLDatabaseImpl;
import org.hibernate.tool.schema.extract.spi.SequenceInformationExtractor;
/**
* An SQL dialect for Postgres 10 and later.
@ -31,4 +40,40 @@ public class PostgreSQL10Dialect extends PostgreSQL95Dialect {
super.augmentRecognizedTableTypes( tableTypesList );
tableTypesList.add( "PARTITIONED TABLE" );
}
public IdentifierHelper buildIdentifierHelper(
IdentifierHelperBuilder builder,
DatabaseMetaData dbMetaData) throws SQLException {
if ( dbMetaData != null ) {
builder.applyIdentifierCasing( dbMetaData );
builder.applyReservedWords( dbMetaData );
}
else {
builder.setUnquotedCaseStrategy( IdentifierCaseStrategy.LOWER );
builder.setQuotedCaseStrategy( IdentifierCaseStrategy.MIXED );
}
builder.applyReservedWords( AnsiSqlKeywords.INSTANCE.sql2003() );
builder.applyReservedWords( getKeywords() );
builder.setNameQualifierSupport( getNameQualifierSupport() );
return builder.build();
}
@Override
public NameQualifierSupport getNameQualifierSupport() {
return NameQualifierSupport.SCHEMA;
}
@Override
public SequenceInformationExtractor getSequenceInformationExtractor() {
return SequenceInformationExtractorPostgresSQLDatabaseImpl.INSTANCE;
}
@Override
public String getCurrentSchemaCommand() {
return "select current_schema from sys.dummy";
}
}

View File

@ -23,7 +23,7 @@ import org.hibernate.tool.schema.extract.spi.InformationExtractor;
import org.hibernate.tool.schema.extract.spi.NameSpaceTablesInformation;
import org.hibernate.tool.schema.extract.spi.SequenceInformation;
import org.hibernate.tool.schema.extract.spi.TableInformation;
import org.hibernate.tool.schema.internal.exec.ImprovedExtractionContextImpl;
import org.hibernate.tool.schema.spi.SchemaManagementTool;
/**
* @author Steve Ebersole
@ -31,7 +31,7 @@ import org.hibernate.tool.schema.internal.exec.ImprovedExtractionContextImpl;
public class DatabaseInformationImpl
implements DatabaseInformation, ExtractionContext.DatabaseObjectAccess {
private final JdbcEnvironment jdbcEnvironment;
private final ImprovedExtractionContextImpl extractionContext;
private final ExtractionContext extractionContext;
private final InformationExtractor extractor;
private final Map<QualifiedSequenceName, SequenceInformation> sequenceInformationMap = new HashMap<QualifiedSequenceName, SequenceInformation>();
@ -40,10 +40,10 @@ public class DatabaseInformationImpl
ServiceRegistry serviceRegistry,
JdbcEnvironment jdbcEnvironment,
DdlTransactionIsolator ddlTransactionIsolator,
Namespace.Name defaultNamespace) throws SQLException {
Namespace.Name defaultNamespace,
SchemaManagementTool tool) throws SQLException {
this.jdbcEnvironment = jdbcEnvironment;
this.extractionContext = new ImprovedExtractionContextImpl(
this.extractionContext = tool.createExtractionContext(
serviceRegistry,
jdbcEnvironment,
ddlTransactionIsolator,
@ -53,7 +53,7 @@ public class DatabaseInformationImpl
);
// todo : make this pluggable
this.extractor = new InformationExtractorJdbcDatabaseMetaDataImpl( extractionContext );
this.extractor = tool.createInformationExtractor( extractionContext );
// because we do not have defined a way to locate sequence info by name
initializeSequences();

View File

@ -36,50 +36,31 @@ public class SequenceInformationExtractorLegacyImpl implements SequenceInformati
return SequenceInformationExtractorNoOpImpl.INSTANCE.extractMetadata( extractionContext );
}
final IdentifierHelper identifierHelper = extractionContext.getJdbcEnvironment().getIdentifierHelper();
final Statement statement = extractionContext.getJdbcConnection().createStatement();
try {
final ResultSet resultSet = statement.executeQuery( lookupSql );
try {
final List<SequenceInformation> sequenceInformationList = new ArrayList<>();
while ( resultSet.next() ) {
sequenceInformationList.add(
new SequenceInformationImpl(
new QualifiedSequenceName(
identifierHelper.toIdentifier(
resultSetCatalogName( resultSet )
),
identifierHelper.toIdentifier(
resultSetSchemaName( resultSet )
),
identifierHelper.toIdentifier(
resultSetSequenceName( resultSet )
)
),
resultSetStartValueSize( resultSet ),
resultSetMinValue( resultSet ),
resultSetMaxValue( resultSet ),
resultSetIncrementValue( resultSet )
)
);
return extractionContext.getQueryResults(
lookupSql,
null,
(ExtractionContext.ResultSetProcessor<Iterable<SequenceInformation>>) resultSet -> {
final IdentifierHelper identifierHelper = extractionContext.getJdbcEnvironment()
.getIdentifierHelper();
final List<SequenceInformation> sequenceInformationList = new ArrayList<>();
while ( resultSet.next() ) {
sequenceInformationList.add(
new SequenceInformationImpl(
new QualifiedSequenceName(
identifierHelper.toIdentifier( resultSetCatalogName( resultSet ) ),
identifierHelper.toIdentifier( resultSetSchemaName( resultSet ) ),
identifierHelper.toIdentifier( resultSetSequenceName( resultSet ) )
),
resultSetStartValueSize( resultSet ),
resultSetMinValue( resultSet ),
resultSetMaxValue( resultSet ),
resultSetIncrementValue( resultSet )
)
);
}
return sequenceInformationList;
}
return sequenceInformationList;
}
finally {
try {
resultSet.close();
}
catch (SQLException ignore) {
}
}
}
finally {
try {
statement.close();
}
catch (SQLException ignore) {
}
}
);
}
protected String sequenceNameColumn() {

View File

@ -8,8 +8,8 @@ package org.hibernate.tool.schema.extract.internal;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.hibernate.boot.model.relational.QualifiedSequenceName;
@ -42,60 +42,49 @@ public class SequenceInformationExtractorMariaDBDatabaseImpl extends SequenceInf
return SequenceInformationExtractorNoOpImpl.INSTANCE.extractMetadata(extractionContext);
}
final IdentifierHelper identifierHelper = extractionContext.getJdbcEnvironment().getIdentifierHelper();
final List<SequenceInformation> sequenceInformationList = new ArrayList<>();
final List<String> sequenceNames = new ArrayList<>();
try (
final Statement statement = extractionContext.getJdbcConnection().createStatement();
final ResultSet resultSet = statement.executeQuery( lookupSql );
) {
final List<String> sequenceNames = extractionContext.getQueryResults( lookupSql, null, resultSet -> {
final List<String> sequences = new ArrayList<>();
while ( resultSet.next() ) {
sequenceNames.add( resultSetSequenceName( resultSet ) );
sequences.add( resultSetSequenceName( resultSet ) );
}
}
return sequences;
});
if ( !sequenceNames.isEmpty() ) {
StringBuilder sequenceInfoQueryBuilder = new StringBuilder();
for ( String sequenceName : sequenceNames ) {
if ( sequenceInfoQueryBuilder.length() > 0 ) {
sequenceInfoQueryBuilder.append( UNION_ALL );
}
sequenceInfoQueryBuilder.append( String.format( SQL_SEQUENCE_QUERY, sequenceName ) );
}
return extractionContext.getQueryResults(
sequenceInfoQueryBuilder.toString(),
null,
(ExtractionContext.ResultSetProcessor<Iterable<SequenceInformation>>) resultSet -> {
final List<SequenceInformation> sequenceInformationList = new ArrayList<>();
final IdentifierHelper identifierHelper = extractionContext.getJdbcEnvironment()
.getIdentifierHelper();
int index = 0;
try (
final Statement statement = extractionContext.getJdbcConnection().createStatement();
final ResultSet resultSet = statement.executeQuery( sequenceInfoQueryBuilder.toString() );
) {
while ( resultSet.next() ) {
SequenceInformation sequenceInformation = new SequenceInformationImpl(
new QualifiedSequenceName(
null,
null,
identifierHelper.toIdentifier(
resultSetSequenceName(resultSet)
)
),
resultSetStartValueSize(resultSet),
resultSetMinValue(resultSet),
resultSetMaxValue(resultSet),
resultSetIncrementValue(resultSet)
);
sequenceInformationList.add(sequenceInformation);
}
}
while ( resultSet.next() ) {
SequenceInformation sequenceInformation = new SequenceInformationImpl(
new QualifiedSequenceName(
null,
null,
identifierHelper.toIdentifier( resultSetSequenceName(resultSet) )
),
resultSetStartValueSize(resultSet),
resultSetMinValue(resultSet),
resultSetMaxValue(resultSet),
resultSetIncrementValue(resultSet)
);
sequenceInformationList.add(sequenceInformation);
}
return sequenceInformationList;
});
}
return sequenceInformationList;
return Collections.emptyList();
}
protected String resultSetSequenceName(ResultSet resultSet) throws SQLException {

View File

@ -0,0 +1,40 @@
package org.hibernate.tool.schema.extract.internal;
import java.sql.ResultSet;
import java.sql.SQLException;
/**
* PostgreSQL stores the sequence metadata as strings. PostgreSQL's JDBC driver does the
* conversion automatically, but, unfortunately Vert.x driver does not do this conversion.
*
* This class is intended to make this functionality work with both the JDBC and Vert.X
* drivers.
*
* @author Gail Badner
*/
public class SequenceInformationExtractorPostgresSQLDatabaseImpl extends SequenceInformationExtractorLegacyImpl {
//Singleton access
public static final SequenceInformationExtractorPostgresSQLDatabaseImpl INSTANCE = new SequenceInformationExtractorPostgresSQLDatabaseImpl();
protected Long resultSetStartValueSize(ResultSet resultSet) throws SQLException {
return convertStringValueToLong( resultSet, sequenceStartValueColumn() );
}
protected Long resultSetMinValue(ResultSet resultSet) throws SQLException {
return convertStringValueToLong( resultSet, sequenceMinValueColumn() );
}
protected Long resultSetMaxValue(ResultSet resultSet) throws SQLException {
return convertStringValueToLong( resultSet, sequenceMaxValueColumn() );
}
protected Long resultSetIncrementValue(ResultSet resultSet) throws SQLException {
return convertStringValueToLong( resultSet, sequenceIncrementColumn() );
}
private Long convertStringValueToLong(ResultSet resultSet, String columnLabel) throws SQLException {
// column value is of type character_data so get it as a String
final String stringValue = resultSet.getString( columnLabel );
return stringValue != null ? Long.valueOf( stringValue ) : null;
}
}

View File

@ -8,6 +8,9 @@ package org.hibernate.tool.schema.extract.spi;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import org.hibernate.Incubating;
import org.hibernate.boot.model.naming.Identifier;
@ -21,6 +24,7 @@ import org.hibernate.service.ServiceRegistry;
* well as to delegates needed in performing extraction.
*
* @author Steve Ebersole
* @author Gail Badner
*/
@Incubating
public interface ExtractionContext {
@ -29,9 +33,31 @@ public interface ExtractionContext {
Connection getJdbcConnection();
DatabaseMetaData getJdbcDatabaseMetaData();
@Incubating
default <T> T getQueryResults(
String queryString,
Object[] positionalParameters,
ResultSetProcessor<T> resultSetProcessor) throws SQLException {
try (PreparedStatement statement = getJdbcConnection().prepareStatement( queryString )) {
if ( positionalParameters != null ) {
for ( int i = 0 ; i < positionalParameters.length ; i++ ) {
statement.setObject( i + 1, positionalParameters[i] );
}
}
try (ResultSet resultSet = statement.executeQuery()) {
return resultSetProcessor.process( resultSet );
}
}
}
Identifier getDefaultCatalog();
Identifier getDefaultSchema();
@Incubating
interface ResultSetProcessor<T> {
T process(ResultSet resultSet) throws SQLException;
}
/**
* In conjunction with {@link #getDatabaseObjectAccess()} provides access to
* information about known database objects to the extractor.

View File

@ -96,7 +96,8 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
final DatabaseInformation databaseInformation = Helper.buildDatabaseInformation(
tool.getServiceRegistry(),
ddlTransactionIsolator,
metadata.getDatabase().getDefaultNamespace().getName()
metadata.getDatabase().getDefaultNamespace().getName(),
tool
);
final GenerationTarget[] targets = tool.buildGenerationTargets(

View File

@ -57,11 +57,11 @@ public abstract class AbstractSchemaValidator implements SchemaValidator {
final JdbcContext jdbcContext = tool.resolveJdbcContext( options.getConfigurationValues() );
final DdlTransactionIsolator isolator = tool.getDdlTransactionIsolator( jdbcContext );
final DatabaseInformation databaseInformation = Helper.buildDatabaseInformation(
tool.getServiceRegistry(),
isolator,
metadata.getDatabase().getDefaultNamespace().getName()
metadata.getDatabase().getDefaultNamespace().getName(),
tool
);
try {

View File

@ -32,6 +32,7 @@ import org.hibernate.tool.schema.internal.exec.ScriptSourceInputFromUrl;
import org.hibernate.tool.schema.internal.exec.ScriptTargetOutputToFile;
import org.hibernate.tool.schema.internal.exec.ScriptTargetOutputToUrl;
import org.hibernate.tool.schema.internal.exec.ScriptTargetOutputToWriter;
import org.hibernate.tool.schema.spi.SchemaManagementTool;
import org.hibernate.tool.schema.spi.ScriptSourceInput;
import org.hibernate.tool.schema.spi.ScriptTargetOutput;
@ -175,14 +176,16 @@ public class Helper {
public static DatabaseInformation buildDatabaseInformation(
ServiceRegistry serviceRegistry,
DdlTransactionIsolator ddlTransactionIsolator,
Namespace.Name defaultNamespace) {
Namespace.Name defaultNamespace,
SchemaManagementTool tool) {
final JdbcEnvironment jdbcEnvironment = serviceRegistry.getService( JdbcEnvironment.class );
try {
return new DatabaseInformationImpl(
serviceRegistry,
jdbcEnvironment,
ddlTransactionIsolator,
defaultNamespace
defaultNamespace,
tool
);
}
catch (SQLException e) {

View File

@ -93,7 +93,7 @@ public class HibernateSchemaManagementTool implements SchemaManagementTool, Serv
return new IndividuallySchemaValidatorImpl( this, getSchemaFilterProvider( options ).getValidateFilter() );
}
}
private SchemaFilterProvider getSchemaFilterProvider(Map options) {
final Object configuredOption = (options == null)
? null
@ -176,7 +176,10 @@ public class HibernateSchemaManagementTool implements SchemaManagementTool, Serv
}
if ( targetDescriptor.getTargetTypes().contains( TargetType.DATABASE ) ) {
targets[index] = new GenerationTargetToDatabase( ddlTransactionIsolator, false );
targets[index] = customTarget == null
? new GenerationTargetToDatabase( ddlTransactionIsolator, false )
: customTarget;
index++;
}
return targets;

View File

@ -7,10 +7,21 @@
package org.hibernate.tool.schema.spi;
import java.util.Map;
import java.util.function.BiFunction;
import java.util.function.Function;
import org.hibernate.Incubating;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.boot.model.relational.Namespace;
import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
import org.hibernate.resource.transaction.spi.DdlTransactionIsolator;
import org.hibernate.service.Service;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.tool.schema.extract.internal.InformationExtractorJdbcDatabaseMetaDataImpl;
import org.hibernate.tool.schema.extract.spi.ExtractionContext;
import org.hibernate.tool.schema.extract.spi.InformationExtractor;
import org.hibernate.tool.schema.internal.exec.GenerationTarget;
import org.hibernate.tool.schema.internal.exec.ImprovedExtractionContextImpl;
/**
* Contract for schema management tool integration.
@ -32,4 +43,25 @@ public interface SchemaManagementTool extends Service {
* @param generationTarget the custom instance to use.
*/
void setCustomDatabaseGenerationTarget(GenerationTarget generationTarget);
default ExtractionContext createExtractionContext(
ServiceRegistry serviceRegistry,
JdbcEnvironment jdbcEnvironment,
DdlTransactionIsolator ddlTransactionIsolator,
Identifier defaultCatalog,
Identifier defaultSchema,
ExtractionContext.DatabaseObjectAccess databaseObjectAccess) {
return new ImprovedExtractionContextImpl(
serviceRegistry,
jdbcEnvironment,
ddlTransactionIsolator,
defaultCatalog,
defaultSchema,
databaseObjectAccess
);
}
default InformationExtractor createInformationExtractor(ExtractionContext extractionContext) {
return new InformationExtractorJdbcDatabaseMetaDataImpl( extractionContext );
}
}

View File

@ -17,6 +17,7 @@ import org.hibernate.boot.spi.MetadataImplementor;
import org.hibernate.cfg.Environment;
import org.hibernate.engine.jdbc.connections.spi.ConnectionProvider;
import org.hibernate.engine.jdbc.env.internal.JdbcEnvironmentInitiator;
import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.resource.transaction.spi.DdlTransactionIsolator;
import org.hibernate.tool.schema.extract.internal.DatabaseInformationImpl;
@ -24,7 +25,9 @@ import org.hibernate.tool.schema.extract.internal.ExtractionContextImpl;
import org.hibernate.tool.schema.extract.internal.InformationExtractorJdbcDatabaseMetaDataImpl;
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
import org.hibernate.tool.schema.extract.spi.ExtractionContext;
import org.hibernate.tool.schema.internal.exec.ImprovedExtractionContextImpl;
import org.hibernate.tool.schema.internal.exec.JdbcContext;
import org.hibernate.tool.schema.spi.SchemaManagementTool;
import org.junit.After;
import org.junit.Test;
@ -122,8 +125,10 @@ public class TestExtraPhysicalTableTypes {
ssr,
database.getJdbcEnvironment(),
ddlTransactionIsolator,
database.getDefaultNamespace().getName()
database.getDefaultNamespace().getName(),
database.getServiceRegistry().getService( SchemaManagementTool.class )
);
ExtractionContextImpl extractionContext = new ExtractionContextImpl(
ssr,
database.getJdbcEnvironment(),

View File

@ -21,6 +21,7 @@ import javax.persistence.Column;
import javax.persistence.ElementCollection;
import javax.persistence.Entity;
import javax.persistence.ForeignKey;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.Index;
import javax.persistence.Inheritance;
@ -163,6 +164,7 @@ public class SchemaUpdateTest {
@Table(name = "`testentity`")
public static class LowercaseTableNameEntity {
@Id
@GeneratedValue
long id;
String field1;
@ -173,6 +175,7 @@ public class SchemaUpdateTest {
@Entity(name = "TestEntity1")
public static class TestEntity {
@Id
@GeneratedValue
@Column(name = "`Id`")
long id;
String field1;
@ -192,6 +195,7 @@ public class SchemaUpdateTest {
@Table(name = "`TESTENTITY`")
public static class UppercaseTableNameEntity {
@Id
@GeneratedValue
long id;
String field1;
@ -207,6 +211,7 @@ public class SchemaUpdateTest {
@Table(name = "`TESTentity`", indexes = {@Index(name = "index1", columnList = "`FieLd1`"), @Index(name = "Index2", columnList = "`FIELD_2`")})
public static class MixedCaseTableNameEntity {
@Id
@GeneratedValue
long id;
@Column(name = "`FieLd1`")
String field1;
@ -224,6 +229,8 @@ public class SchemaUpdateTest {
@Entity(name = "Match")
public static class Match {
@Id
@GeneratedValue
long id;
String match;
@ -236,6 +243,8 @@ public class SchemaUpdateTest {
@Inheritance(strategy = InheritanceType.JOINED)
public static class InheritanceRootEntity {
@Id
@GeneratedValue
protected Long id;
}