HHH-14469 - Support schema-tooling on sub-sets of the relational model known to Hibernate

- @BootstrapServiceRegistry
- Support for filtering at schema-tooling level
This commit is contained in:
Steve Ebersole 2021-02-25 15:14:59 -06:00
parent 7b7597f40e
commit ddf434df7e
92 changed files with 2036 additions and 320 deletions

View File

@ -7,6 +7,7 @@
package org.hibernate.boot;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.function.Consumer;
@ -192,4 +193,9 @@ public interface Metadata extends Mapping {
java.util.Collection<Table> collectTableMappings();
Map<String, SqmFunctionDescriptor> getSqlFunctionMap();
/**
* All of the known model contributors
*/
Set<String> getContributors();
}

View File

@ -54,14 +54,14 @@ import org.hibernate.boot.model.relational.Namespace;
import org.hibernate.boot.model.relational.QualifiedTableName;
import org.hibernate.boot.model.source.internal.ImplicitColumnNamingSecondPass;
import org.hibernate.boot.model.source.spi.LocalMetadataBuildingContext;
import org.hibernate.boot.spi.BootstrapContext;
import org.hibernate.boot.spi.InFlightMetadataCollector;
import org.hibernate.boot.spi.MetadataBuildingContext;
import org.hibernate.boot.spi.MetadataBuildingOptions;
import org.hibernate.boot.query.NamedHqlQueryDefinition;
import org.hibernate.boot.query.NamedNativeQueryDefinition;
import org.hibernate.boot.query.NamedProcedureCallDefinition;
import org.hibernate.boot.query.NamedResultSetMappingDescriptor;
import org.hibernate.boot.spi.BootstrapContext;
import org.hibernate.boot.spi.InFlightMetadataCollector;
import org.hibernate.boot.spi.MetadataBuildingContext;
import org.hibernate.boot.spi.MetadataBuildingOptions;
import org.hibernate.boot.spi.NaturalIdUniqueKeyBinder;
import org.hibernate.cfg.AnnotatedClassType;
import org.hibernate.cfg.AvailableSettings;
@ -231,6 +231,11 @@ public class InFlightMetadataCollectorImpl implements InFlightMetadataCollector
return sqlFunctionMap;
}
@Override
public Set<String> getContributors() {
throw new UnsupportedOperationException();
}
@Override
public void validate() throws MappingException {
// nothing to do
@ -744,7 +749,8 @@ public class InFlightMetadataCollectorImpl implements InFlightMetadataCollector
String catalogName,
String name,
String subselectFragment,
boolean isAbstract) {
boolean isAbstract,
MetadataBuildingContext buildingContext) {
final Namespace namespace = getDatabase().locateNamespace(
getDatabase().toIdentifier( catalogName ),
getDatabase().toIdentifier( schemaName )
@ -761,17 +767,21 @@ public class InFlightMetadataCollectorImpl implements InFlightMetadataCollector
}
if ( subselectFragment != null ) {
return new Table( namespace, logicalName, subselectFragment, isAbstract );
return new Table( buildingContext.getCurrentContributorName(), namespace, logicalName, subselectFragment, isAbstract );
}
else {
Table table = namespace.locateTable( logicalName );
if ( table != null ) {
final Table existing = namespace.locateTable( logicalName );
if ( existing != null ) {
if ( !isAbstract ) {
table.setAbstract( false );
existing.setAbstract( false );
}
return table;
return existing;
}
return namespace.createTable( logicalName, isAbstract );
return namespace.createTable(
logicalName,
(physicalName) -> new Table( buildingContext.getCurrentContributorName(), namespace, physicalName, isAbstract )
);
}
}
@ -782,7 +792,8 @@ public class InFlightMetadataCollectorImpl implements InFlightMetadataCollector
String name,
boolean isAbstract,
String subselectFragment,
Table includedTable) throws DuplicateMappingException {
Table includedTable,
MetadataBuildingContext buildingContext) throws DuplicateMappingException {
final Namespace namespace = getDatabase().locateNamespace(
getDatabase().toIdentifier( catalogName ),
getDatabase().toIdentifier( schemaName )
@ -799,7 +810,17 @@ public class InFlightMetadataCollectorImpl implements InFlightMetadataCollector
}
if ( subselectFragment != null ) {
return new DenormalizedTable( namespace, logicalName, subselectFragment, isAbstract, includedTable );
return namespace.createDenormalizedTable(
logicalName,
(physicalName) -> new DenormalizedTable(
buildingContext.getCurrentContributorName(),
namespace,
logicalName,
subselectFragment,
isAbstract,
includedTable
)
);
}
else {
Table table = namespace.locateTable( logicalName );
@ -807,7 +828,16 @@ public class InFlightMetadataCollectorImpl implements InFlightMetadataCollector
throw new DuplicateMappingException( DuplicateMappingException.Type.TABLE, logicalName.toString() );
}
else {
table = namespace.createDenormalizedTable( logicalName, isAbstract, includedTable );
table = namespace.createDenormalizedTable(
logicalName,
(physicalTableName) -> new DenormalizedTable(
buildingContext.getCurrentContributorName(),
namespace,
physicalTableName,
isAbstract,
includedTable
)
);
}
return table;
}

View File

@ -16,9 +16,10 @@ import org.hibernate.boot.spi.MetadataBuildingContext;
import org.hibernate.boot.spi.MetadataBuildingOptions;
/**
* @author Steve Ebersole
* Root MetadataBuildingContext
*/
public class MetadataBuildingContextRootImpl implements MetadataBuildingContext {
private final String contributor;
private final BootstrapContext bootstrapContext;
private final MetadataBuildingOptions options;
private final MappingDefaults mappingDefaults;
@ -27,9 +28,11 @@ public class MetadataBuildingContextRootImpl implements MetadataBuildingContext
private final TypeDefinitionRegistryStandardImpl typeDefinitionRegistry;
public MetadataBuildingContextRootImpl(
String contributor,
BootstrapContext bootstrapContext,
MetadataBuildingOptions options,
InFlightMetadataCollector metadataCollector) {
this.contributor = contributor;
this.bootstrapContext = bootstrapContext;
this.options = options;
this.mappingDefaults = options.getMappingDefaults();
@ -77,4 +80,9 @@ public class MetadataBuildingContextRootImpl implements MetadataBuildingContext
public TypeDefinitionRegistryStandardImpl getTypeDefinitionRegistry() {
return typeDefinitionRegistry;
}
@Override
public String getCurrentContributorName() {
return contributor;
}
}

View File

@ -26,6 +26,7 @@ import org.hibernate.boot.model.IdentifierGeneratorDefinition;
import org.hibernate.boot.model.TypeDefinition;
import org.hibernate.boot.model.relational.Database;
import org.hibernate.boot.model.relational.Namespace;
import org.hibernate.boot.model.relational.Sequence;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.boot.spi.BootstrapContext;
import org.hibernate.boot.spi.MetadataBuildingOptions;
@ -310,6 +311,27 @@ public class MetadataImpl implements MetadataImplementor, Serializable {
return sqlFunctionMap;
}
@Override
public Set<String> getContributors() {
final HashSet<String> contributors = new HashSet<>();
entityBindingMap.forEach(
(s, persistentClass) -> contributors.add( persistentClass.getContributor() )
);
for ( Namespace namespace : database.getNamespaces() ) {
for ( Table table : namespace.getTables() ) {
contributors.add( table.getContributor() );
}
for ( Sequence sequence : namespace.getSequences() ) {
contributors.add( sequence.getContributor() );
}
}
return contributors;
}
@Override
public java.util.Collection<Table> collectTableMappings() {
ArrayList<Table> tables = new ArrayList<>();

View File

@ -29,6 +29,7 @@ import org.hibernate.boot.model.source.spi.MetadataSourceProcessor;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.boot.spi.AdditionalJaxbMappingProducer;
import org.hibernate.boot.spi.BootstrapContext;
import org.hibernate.boot.spi.MetadataBuildingContext;
import org.hibernate.boot.spi.MetadataBuildingOptions;
import org.hibernate.boot.spi.MetadataContributor;
import org.hibernate.boot.spi.MetadataImplementor;
@ -133,6 +134,7 @@ public class MetadataBuildingProcess {
final ClassLoaderService classLoaderService = options.getServiceRegistry().getService( ClassLoaderService.class );
final MetadataBuildingContextRootImpl rootMetadataBuildingContext = new MetadataBuildingContextRootImpl(
"orm",
bootstrapContext,
options,
metadataCollector
@ -290,7 +292,7 @@ public class MetadataBuildingProcess {
metadataCollector.processSecondPasses( rootMetadataBuildingContext );
if ( options.isXmlMappingEnabled() ) {
Iterable<AdditionalJaxbMappingProducer> producers = classLoaderService.loadJavaServices( AdditionalJaxbMappingProducer.class );
final Iterable<AdditionalJaxbMappingProducer> producers = classLoaderService.loadJavaServices( AdditionalJaxbMappingProducer.class );
if ( producers != null ) {
final EntityHierarchyBuilder hierarchyBuilder = new EntityHierarchyBuilder();
// final MappingBinder mappingBinder = new MappingBinder( true );

View File

@ -0,0 +1,15 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.boot.model.relational;
import org.hibernate.mapping.Contributable;
/**
* Contributable specialization for Tables and Sequences
*/
public interface ContributableDatabaseObject extends Contributable, Exportable {
}

View File

@ -10,6 +10,7 @@ import java.util.Collection;
import java.util.Map;
import java.util.Objects;
import java.util.TreeMap;
import java.util.function.Function;
import org.hibernate.HibernateException;
import org.hibernate.boot.model.naming.Identifier;
@ -33,8 +34,8 @@ public class Namespace {
private final Name name;
private final Name physicalName;
private Map<Identifier, Table> tables = new TreeMap<>();
private Map<Identifier, Sequence> sequences = new TreeMap<>();
private final Map<Identifier, Table> tables = new TreeMap<>();
private final Map<Identifier, Sequence> sequences = new TreeMap<>();
public Namespace(PhysicalNamingStrategy physicalNamingStrategy, JdbcEnvironment jdbcEnvironment, Name name) {
this.physicalNamingStrategy = physicalNamingStrategy;
@ -89,28 +90,29 @@ public class Namespace {
*
* @return the created table.
*/
public Table createTable(Identifier logicalTableName, boolean isAbstract) {
public Table createTable(Identifier logicalTableName, Function<Identifier,Table> creator) {
final Table existing = tables.get( logicalTableName );
if ( existing != null ) {
return existing;
}
final Identifier physicalTableName = physicalNamingStrategy.toPhysicalTableName( logicalTableName, jdbcEnvironment );
Table table = new Table( this, physicalTableName, isAbstract );
final Table table = creator.apply( physicalTableName );
tables.put( logicalTableName, table );
return table;
}
public DenormalizedTable createDenormalizedTable(Identifier logicalTableName, boolean isAbstract, Table includedTable) {
public DenormalizedTable createDenormalizedTable(Identifier logicalTableName, Function<Identifier,DenormalizedTable> creator) {
final Table existing = tables.get( logicalTableName );
if ( existing != null ) {
// for now assume it is
return (DenormalizedTable) existing;
}
final Identifier physicalTableName = physicalNamingStrategy.toPhysicalTableName( logicalTableName, jdbcEnvironment );
DenormalizedTable table = new DenormalizedTable( this, physicalTableName, isAbstract, includedTable );
final DenormalizedTable table = creator.apply( physicalTableName );
tables.put( logicalTableName, table );
return table;
}
@ -118,21 +120,15 @@ public class Namespace {
return sequences.get( name );
}
public Sequence createSequence(Identifier logicalName, int initialValue, int increment) {
public Sequence createSequence(Identifier logicalName, Function<Identifier,Sequence> creator) {
if ( sequences.containsKey( logicalName ) ) {
throw new HibernateException( "Sequence was already registered with that name [" + logicalName.toString() + "]" );
}
final Identifier physicalName = physicalNamingStrategy.toPhysicalSequenceName( logicalName, jdbcEnvironment );
Sequence sequence = new Sequence(
this.physicalName.getCatalog(),
this.physicalName.getSchema(),
physicalName,
initialValue,
increment
);
final Sequence sequence = creator.apply( physicalName );
sequences.put( logicalName, sequence );
return sequence;
}

View File

@ -14,7 +14,7 @@ import org.hibernate.boot.model.naming.Identifier;
*
* @author Steve Ebersole
*/
public class Sequence implements Exportable {
public class Sequence implements ContributableDatabaseObject {
public static class Name extends QualifiedNameParser.NameParts {
public Name(
Identifier catalogIdentifier,
@ -26,21 +26,33 @@ public class Sequence implements Exportable {
private final QualifiedSequenceName name;
private final String exportIdentifier;
private final String contributor;
private int initialValue = 1;
private int incrementSize = 1;
public Sequence(Identifier catalogName, Identifier schemaName, Identifier sequenceName) {
this.name = new QualifiedSequenceName( catalogName, schemaName, sequenceName );
public Sequence(
String contributor,
Identifier catalogName,
Identifier schemaName,
Identifier sequenceName) {
this.contributor = contributor;
this.name = new QualifiedSequenceName(
catalogName,
schemaName,
sequenceName
);
this.exportIdentifier = name.render();
}
public Sequence(
String contributor,
Identifier catalogName,
Identifier schemaName,
Identifier sequenceName,
int initialValue,
int incrementSize) {
this( catalogName, schemaName, sequenceName );
this( contributor, catalogName, schemaName, sequenceName );
this.initialValue = initialValue;
this.incrementSize = incrementSize;
}
@ -54,6 +66,11 @@ public class Sequence implements Exportable {
return exportIdentifier;
}
@Override
public String getContributor() {
return contributor;
}
public int getInitialValue() {
return initialValue;
}

View File

@ -96,7 +96,10 @@ public class EntityHierarchyBuilder {
final RootEntitySourceImpl rootEntitySource = new RootEntitySourceImpl( mappingDocument, jaxbRootEntity );
entitySourceByNameMap.put( rootEntitySource.getEntityNamingSource().getEntityName(), rootEntitySource );
final EntityHierarchySourceImpl hierarchy = new EntityHierarchySourceImpl( rootEntitySource );
final EntityHierarchySourceImpl hierarchy = new EntityHierarchySourceImpl(
rootEntitySource,
mappingDocument
);
entityHierarchyList.add( hierarchy );
linkAnyWaiting( mappingDocument, rootEntitySource );

View File

@ -44,6 +44,7 @@ import org.hibernate.internal.util.StringHelper;
*/
public class EntityHierarchySourceImpl implements EntityHierarchySource {
private final RootEntitySourceImpl rootEntitySource;
private final MappingDocument rootEntityMappingDocument;
private final IdentifierSource identifierSource;
private final VersionAttributeSource versionAttributeSource;
@ -57,8 +58,11 @@ public class EntityHierarchySourceImpl implements EntityHierarchySource {
private Set<String> collectedEntityNames = new HashSet<>();
public EntityHierarchySourceImpl(RootEntitySourceImpl rootEntitySource) {
public EntityHierarchySourceImpl(
RootEntitySourceImpl rootEntitySource,
MappingDocument rootEntityMappingDocument) {
this.rootEntitySource = rootEntitySource;
this.rootEntityMappingDocument = rootEntityMappingDocument;
this.rootEntitySource.injectHierarchy( this );
this.identifierSource = interpretIdentifierSource( rootEntitySource );
@ -74,6 +78,10 @@ public class EntityHierarchySourceImpl implements EntityHierarchySource {
collectedEntityNames.add( rootEntitySource.getEntityNamingSource().getEntityName() );
}
public MappingDocument getRootEntityMappingDocument() {
return rootEntityMappingDocument;
}
private static IdentifierSource interpretIdentifierSource(RootEntitySourceImpl rootEntitySource) {
final JaxbHbmSimpleIdType simpleId = rootEntitySource.jaxbEntityMapping().getId();
final JaxbHbmCompositeIdType compositeId = rootEntitySource.jaxbEntityMapping().getCompositeId();

View File

@ -55,6 +55,7 @@ public class HbmMetadataSourceProcessorImpl implements MetadataSourceProcessor {
}
final MappingDocument mappingDocument = new MappingDocument(
"orm",
(JaxbHbmHibernateMapping) xmlBinding.getRoot(),
xmlBinding.getOrigin(),
rootBuildingContext

View File

@ -54,11 +54,14 @@ public class MappingDocument implements HbmLocalMetadataBuildingContext, Metadat
private final TypeDefinitionRegistryStandardImpl typeDefinitionRegistry;
private final String contributor;
public MappingDocument(
String contributor,
JaxbHbmHibernateMapping documentRoot,
Origin origin,
MetadataBuildingContext rootBuildingContext) {
this.contributor = contributor;
this.documentRoot = documentRoot;
this.origin = origin;
this.rootBuildingContext = rootBuildingContext;
@ -163,6 +166,11 @@ public class MappingDocument implements HbmLocalMetadataBuildingContext, Metadat
return typeDefinitionRegistry;
}
@Override
public String getCurrentContributorName() {
return contributor;
}
@Override
public void prepare() {
// nothing to do here

View File

@ -198,7 +198,7 @@ public class ModelBinder {
}
public void bindEntityHierarchy(EntityHierarchySourceImpl hierarchySource) {
final RootClass rootEntityDescriptor = new RootClass( metadataBuildingContext );
final RootClass rootEntityDescriptor = new RootClass( hierarchySource.getRootEntityMappingDocument() );
bindRootEntity( hierarchySource, rootEntityDescriptor );
hierarchySource.getRoot()
.getLocalMetadataBuildingContext()
@ -1764,12 +1764,15 @@ public class ModelBinder {
Table secondaryTable;
final Identifier logicalTableName;
if ( TableSource.class.isInstance( secondaryTableSource.getTableSource() ) ) {
if ( secondaryTableSource.getTableSource() instanceof TableSource ) {
final TableSource tableSource = (TableSource) secondaryTableSource.getTableSource();
logicalTableName = database.toIdentifier( tableSource.getExplicitTableName() );
secondaryTable = namespace.locateTable( logicalTableName );
if ( secondaryTable == null ) {
secondaryTable = namespace.createTable( logicalTableName, false );
secondaryTable = namespace.createTable(
logicalTableName,
(identifier) -> new Table( mappingDocument.getCurrentContributorName(), namespace, identifier, false )
);
}
else {
secondaryTable.setAbstract( false );
@ -1780,6 +1783,7 @@ public class ModelBinder {
else {
final InLineViewSource inLineViewSource = (InLineViewSource) secondaryTableSource.getTableSource();
secondaryTable = new Table(
metadataBuildingContext.getCurrentContributorName(),
namespace,
inLineViewSource.getSelectStatement(),
false
@ -2951,13 +2955,26 @@ public class ModelBinder {
}
if ( denormalizedSuperTable == null ) {
table = namespace.createTable( logicalTableName, isAbstract );
table = namespace.createTable(
logicalTableName,
(identifier) -> new Table(
mappingDocument.getCurrentContributorName(),
namespace,
identifier,
isAbstract
)
);
}
else {
table = namespace.createDenormalizedTable(
logicalTableName,
isAbstract,
denormalizedSuperTable
(physicalTableName) -> new DenormalizedTable(
mappingDocument.getCurrentContributorName(),
namespace,
physicalTableName,
isAbstract,
denormalizedSuperTable
)
);
}
}
@ -2966,10 +2983,16 @@ public class ModelBinder {
subselect = inLineViewSource.getSelectStatement();
logicalTableName = database.toIdentifier( inLineViewSource.getLogicalName() );
if ( denormalizedSuperTable == null ) {
table = new Table( namespace, subselect, isAbstract );
table = new Table( mappingDocument.getCurrentContributorName(), namespace, subselect, isAbstract );
}
else {
table = new DenormalizedTable( namespace, subselect, isAbstract, denormalizedSuperTable );
table = new DenormalizedTable(
mappingDocument.getCurrentContributorName(),
namespace,
subselect,
isAbstract,
denormalizedSuperTable
);
}
table.setName( logicalTableName.render() );
}
@ -3312,10 +3335,19 @@ public class ModelBinder {
.determineCollectionTableName( implicitNamingSource );
}
collectionTable = namespace.createTable( logicalName, false );
collectionTable = namespace.createTable(
logicalName,
(identifier) -> new Table(
metadataBuildingContext.getCurrentContributorName(),
namespace,
identifier,
false
)
);
}
else {
collectionTable = new Table(
metadataBuildingContext.getCurrentContributorName(),
namespace,
( (InLineViewSource) tableSpecSource ).getSelectStatement(),
false

View File

@ -239,4 +239,9 @@ public abstract class AbstractDelegatingMetadata implements MetadataImplementor
public NamedObjectRepository buildNamedQueryRepository(SessionFactoryImplementor sessionFactory) {
return delegate().buildNamedQueryRepository( sessionFactory );
}
@Override
public Set<String> getContributors() {
return delegate.getContributors();
}
}

View File

@ -115,7 +115,13 @@ public interface InFlightMetadataCollector extends Mapping, MetadataImplementor
*
* @return The created table metadata, or the existing reference.
*/
Table addTable(String schema, String catalog, String name, String subselect, boolean isAbstract);
Table addTable(
String schema,
String catalog,
String name,
String subselect,
boolean isAbstract,
MetadataBuildingContext buildingContext);
/**
* Adds a 'denormalized table' to this repository.
@ -126,7 +132,7 @@ public interface InFlightMetadataCollector extends Mapping, MetadataImplementor
* @param isAbstract Is the table abstract (i.e. not really existing in the DB)?
* @param subselect A select statement which defines a logical table, much
* like a DB view.
* @param includedTable ???
* @param includedTable The "common" table
*
* @return The created table metadata.
*
@ -138,7 +144,8 @@ public interface InFlightMetadataCollector extends Mapping, MetadataImplementor
String name,
boolean isAbstract,
String subselect,
Table includedTable) throws DuplicateMappingException;
Table includedTable,
MetadataBuildingContext buildingContext) throws DuplicateMappingException;
/**
* Adds metadata for a named query to this repository.

View File

@ -66,4 +66,9 @@ public interface MetadataBuildingContext {
}
TypeDefinitionRegistry getTypeDefinitionRegistry();
/**
* The name of the contributor whose mappings we are currently processing
*/
String getCurrentContributorName();
}

View File

@ -491,7 +491,8 @@ public class TableBinder {
logicalName.render(),
isAbstract,
subselect,
denormalizedSuperTableXref.getPrimaryTable()
denormalizedSuperTableXref.getPrimaryTable(),
buildingContext
);
}
else {
@ -500,7 +501,8 @@ public class TableBinder {
catalog,
logicalName.render(),
subselect,
isAbstract
isAbstract,
buildingContext
);
}

View File

@ -47,6 +47,11 @@ public interface IdentifierGenerator {
*/
String GENERATOR_NAME = "GENERATOR_NAME";
/**
* The contributor that contributed this generator
*/
String CONTRIBUTOR_NAME = "CONTRIBUTOR";
/**
* Generate a new identifier.
*

View File

@ -92,6 +92,8 @@ public class MultipleHiLoPerTableGenerator implements PersistentIdentifierGenera
private static final String DEFAULT_PK_COLUMN = "sequence_name";
private static final String DEFAULT_VALUE_COLUMN = "sequence_next_hi_value";
private String contributor;
private QualifiedName qualifiedTableName;
private String tableName;
private String segmentColumnName;
@ -273,6 +275,11 @@ public class MultipleHiLoPerTableGenerator implements PersistentIdentifierGenera
if ( maxLo >= 1 ) {
hiloOptimizer = new LegacyHiLoAlgorithmOptimizer( returnClass, maxLo );
}
contributor = params.getProperty( CONTRIBUTOR_NAME );
if ( contributor == null ) {
contributor = "orm";
}
}
protected QualifiedName determineGeneratorTableName(Properties params, JdbcEnvironment jdbcEnvironment) {
@ -316,7 +323,10 @@ public class MultipleHiLoPerTableGenerator implements PersistentIdentifierGenera
Table table = namespace.locateTable( qualifiedTableName.getObjectName() );
if ( table == null ) {
table = namespace.createTable( qualifiedTableName.getObjectName(), false );
table = namespace.createTable(
qualifiedTableName.getObjectName(),
(identifier) -> new Table( contributor, namespace, identifier, false )
);
// todo : not sure the best solution here. do we add the columns if missing? other?
table.setPrimaryKey( new PrimaryKey( table ) );

View File

@ -65,6 +65,8 @@ public class SequenceGenerator
@Deprecated
public static final String PARAMETERS = "parameters";
private String contributor;
private QualifiedName logicalQualifiedSequenceName;
private String sequenceName;
private Type identifierType;
@ -103,6 +105,13 @@ public class SequenceGenerator
"org.hibernate.id.enhanced.SequenceStyleGenerator generator instead."
);
}
contributor = determineContributor( params );
}
private String determineContributor(Properties params) {
final String contributor = params.getProperty( CONTRIBUTOR_NAME );
return contributor == null ? "orm" : contributor;
}
@Override
@ -181,8 +190,14 @@ public class SequenceGenerator
else {
sequence = namespace.createSequence(
logicalQualifiedSequenceName.getObjectName(),
1,
1
(physicalName) -> new Sequence(
contributor,
namespace.getPhysicalName().getCatalog(),
namespace.getPhysicalName().getSchema(),
physicalName,
1,
1
)
);
}

View File

@ -36,6 +36,7 @@ public class SequenceStructure implements DatabaseStructure {
SequenceStructure.class.getName()
);
private final String contributor;
private final QualifiedName logicalQualifiedSequenceName;
private final int initialValue;
private final int incrementSize;
@ -48,10 +49,12 @@ public class SequenceStructure implements DatabaseStructure {
public SequenceStructure(
JdbcEnvironment jdbcEnvironment,
String contributor,
QualifiedName qualifiedSequenceName,
int initialValue,
int incrementSize,
Class numberType) {
this.contributor = contributor;
this.logicalQualifiedSequenceName = qualifiedSequenceName;
this.initialValue = initialValue;
@ -179,7 +182,17 @@ public class SequenceStructure implements DatabaseStructure {
sequence.validate( initialValue, sourceIncrementSize );
}
else {
sequence = namespace.createSequence( logicalQualifiedSequenceName.getObjectName(), initialValue, sourceIncrementSize );
sequence = namespace.createSequence(
logicalQualifiedSequenceName.getObjectName(),
(physicalName) -> new Sequence(
contributor,
namespace.getPhysicalName().getCatalog(),
namespace.getPhysicalName().getSchema(),
physicalName,
initialValue,
sourceIncrementSize
)
);
}
this.sequenceName = database.getJdbcEnvironment().getQualifiedObjectNameFormatter().format(

View File

@ -501,7 +501,14 @@ public class SequenceStyleGenerator
QualifiedName sequenceName,
int initialValue,
int incrementSize) {
return new SequenceStructure( jdbcEnvironment, sequenceName, initialValue, incrementSize, type.getReturnedClass() );
return new SequenceStructure(
jdbcEnvironment,
determineContributor( params ),
sequenceName,
initialValue,
incrementSize,
type.getReturnedClass()
);
}
@SuppressWarnings("WeakerAccess")
@ -513,7 +520,23 @@ public class SequenceStyleGenerator
int initialValue,
int incrementSize) {
final Identifier valueColumnName = determineValueColumnName( params, jdbcEnvironment );
return new TableStructure( jdbcEnvironment, sequenceName, valueColumnName, initialValue, incrementSize, type.getReturnedClass() );
final String contributor = determineContributor( params );
return new TableStructure(
jdbcEnvironment,
contributor,
sequenceName,
valueColumnName,
initialValue,
incrementSize,
type.getReturnedClass()
);
}
private String determineContributor(Properties params) {
final String contributor = params.getProperty( IdentifierGenerator.CONTRIBUTOR_NAME );
return contributor == null ? "orm" : contributor;
}

View File

@ -250,6 +250,8 @@ public class TableGenerator implements PersistentIdentifierGenerator, Configurab
private Optimizer optimizer;
private long accessCount;
private String contributor;
@Override
public Object generatorKey() {
return qualifiedTableName.render();
@ -386,6 +388,11 @@ public class TableGenerator implements PersistentIdentifierGenerator, Configurab
incrementSize,
optimizerInitialValue
);
contributor = params.getProperty( CONTRIBUTOR_NAME );
if ( contributor == null ) {
contributor = "orm";
}
}
/**
@ -735,7 +742,10 @@ public class TableGenerator implements PersistentIdentifierGenerator, Configurab
Table table = namespace.locateTable( qualifiedTableName.getObjectName() );
if ( table == null ) {
table = namespace.createTable( qualifiedTableName.getObjectName(), false );
table = namespace.createTable(
qualifiedTableName.getObjectName(),
(identifier) -> new Table( contributor, namespace, identifier, false )
);
// todo : not sure the best solution here. do we add the columns if missing? other?
final Column segmentColumn = new ExportableColumn(

View File

@ -55,6 +55,8 @@ public class TableStructure implements DatabaseStructure {
private final int incrementSize;
private final Class numberType;
private String contributor;
private String tableNameText;
private String valueColumnNameText;
@ -64,13 +66,16 @@ public class TableStructure implements DatabaseStructure {
private boolean applyIncrementSizeToSourceValues;
private int accessCounter;
public TableStructure(
JdbcEnvironment jdbcEnvironment,
String contributor,
QualifiedName qualifiedTableName,
Identifier valueColumnNameIdentifier,
int initialValue,
int incrementSize,
Class numberType) {
this.contributor = contributor;
this.logicalQualifiedTableName = qualifiedTableName;
this.logicalValueColumnNameIdentifier = valueColumnNameIdentifier;
@ -251,7 +256,10 @@ public class TableStructure implements DatabaseStructure {
Table table = namespace.locateTable( logicalQualifiedTableName.getObjectName() );
boolean tableCreated = false;
if ( table == null ) {
table = namespace.createTable( logicalQualifiedTableName.getObjectName(), false );
table = namespace.createTable(
logicalQualifiedTableName.getObjectName(),
(identifier) -> new Table( contributor, namespace, identifier, false )
);
tableCreated = true;
}

View File

@ -7,6 +7,7 @@
package org.hibernate.id.factory;
import java.util.Properties;
import org.hibernate.boot.spi.MetadataBuildingContext;
import org.hibernate.dialect.Dialect;
import org.hibernate.id.IdentifierGenerator;
import org.hibernate.type.Type;
@ -22,7 +23,7 @@ public interface IdentifierGeneratorFactory {
*
* @return the dialect
*/
public Dialect getDialect();
Dialect getDialect();
/**
* Allow injection of the dialect to use.
@ -33,7 +34,7 @@ public interface IdentifierGeneratorFactory {
* ctor injected.
*/
@Deprecated
public void setDialect(Dialect dialect);
void setDialect(Dialect dialect);
/**
* Given a strategy, retrieve the appropriate identifier generator instance.
@ -44,7 +45,7 @@ public interface IdentifierGeneratorFactory {
*
* @return The appropriate generator instance.
*/
public IdentifierGenerator createIdentifierGenerator(String strategy, Type type, Properties config);
IdentifierGenerator createIdentifierGenerator(String strategy, Type type, Properties config);
/**
* Retrieve the class that will be used as the {@link IdentifierGenerator} for the given strategy.
@ -52,5 +53,5 @@ public interface IdentifierGeneratorFactory {
* @param strategy The strategy
* @return The generator class.
*/
public Class getIdentifierGeneratorClass(String strategy);
Class getIdentifierGeneratorClass(String strategy);
}

View File

@ -0,0 +1,15 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.mapping;
/**
* Part of the mapping model that is associated with a contributor. ORM, Envers, Search, etc
* @author Steve Ebersole
*/
public interface Contributable {
String getContributor();
}

View File

@ -22,30 +22,36 @@ public class DenormalizedTable extends Table {
private final Table includedTable;
public DenormalizedTable(Table includedTable) {
this.includedTable = includedTable;
includedTable.setHasDenormalizedTables();
}
public DenormalizedTable(Namespace namespace, Identifier physicalTableName, boolean isAbstract, Table includedTable) {
super( namespace, physicalTableName, isAbstract );
public DenormalizedTable(
String contributor,
Namespace namespace,
Identifier physicalTableName,
boolean isAbstract,
Table includedTable) {
super( contributor, namespace, physicalTableName, isAbstract );
this.includedTable = includedTable;
includedTable.setHasDenormalizedTables();
}
public DenormalizedTable(
String contributor,
Namespace namespace,
Identifier physicalTableName,
String subselectFragment,
boolean isAbstract,
Table includedTable) {
super( namespace, physicalTableName, subselectFragment, isAbstract );
super( contributor, namespace, physicalTableName, subselectFragment, isAbstract );
this.includedTable = includedTable;
includedTable.setHasDenormalizedTables();
}
public DenormalizedTable(Namespace namespace, String subselect, boolean isAbstract, Table includedTable) {
super( namespace, subselect, isAbstract );
public DenormalizedTable(
String contributor,
Namespace namespace,
String subselect,
boolean isAbstract,
Table includedTable) {
super( contributor, namespace, subselect, isAbstract );
this.includedTable = includedTable;
includedTable.setHasDenormalizedTables();
}

View File

@ -35,13 +35,14 @@ import org.hibernate.sql.Alias;
*
* @author Gavin King
*/
public abstract class PersistentClass implements AttributeContainer, Serializable, Filterable, MetaAttributable {
public abstract class PersistentClass implements AttributeContainer, Serializable, Filterable, MetaAttributable, Contributable {
private static final Alias PK_ALIAS = new Alias( 15, "PK" );
public static final String NULL_DISCRIMINATOR_MAPPING = "null";
public static final String NOT_NULL_DISCRIMINATOR_MAPPING = "not null";
private final MetadataBuildingContext metadataBuildingContext;
private final String contributor;
private String entityName;
@ -95,6 +96,11 @@ public abstract class PersistentClass implements AttributeContainer, Serializabl
public PersistentClass(MetadataBuildingContext metadataBuildingContext) {
this.metadataBuildingContext = metadataBuildingContext;
this.contributor = metadataBuildingContext.getCurrentContributorName();
}
public String getContributor() {
return contributor;
}
public ServiceRegistry getServiceRegistry() {

View File

@ -334,6 +334,11 @@ public abstract class SimpleValue implements KeyValue {
AvailableSettings.PREFER_POOLED_VALUES_LO,
cs.getSetting( AvailableSettings.PREFER_POOLED_VALUES_LO, StandardConverters.BOOLEAN, false )
);
params.put(
IdentifierGenerator.CONTRIBUTOR_NAME,
buildingContext.getCurrentContributorName()
);
if ( cs.getSettings().get( AvailableSettings.PREFERRED_POOLED_OPTIMIZER ) != null ) {
params.put(
AvailableSettings.PREFERRED_POOLED_OPTIMIZER,
@ -342,7 +347,11 @@ public abstract class SimpleValue implements KeyValue {
}
identifierGeneratorFactory.setDialect( dialect );
identifierGenerator = identifierGeneratorFactory.createIdentifierGenerator( identifierGeneratorStrategy, getType(), params );
identifierGenerator = identifierGeneratorFactory.createIdentifierGenerator(
identifierGeneratorStrategy,
getType(),
params
);
return identifierGenerator;
}

View File

@ -21,6 +21,7 @@ import org.hibernate.HibernateException;
import org.hibernate.MappingException;
import org.hibernate.boot.Metadata;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.boot.model.relational.ContributableDatabaseObject;
import org.hibernate.boot.model.relational.Exportable;
import org.hibernate.boot.model.relational.InitCommand;
import org.hibernate.boot.model.relational.Namespace;
@ -42,10 +43,12 @@ import org.jboss.logging.Logger;
* @author Gavin King
*/
@SuppressWarnings("deprecation")
public class Table implements RelationalModel, Serializable, Exportable {
public class Table implements RelationalModel, Serializable, ContributableDatabaseObject {
private static final Logger log = Logger.getLogger( Table.class );
private static final Column[] EMPTY_COLUMN_ARRAY = new Column[0];
private final String contributor;
private Identifier catalog;
private Identifier schema;
private Identifier name;
@ -70,16 +73,24 @@ public class Table implements RelationalModel, Serializable, Exportable {
private List<InitCommand> initCommands;
public Table() {
this( "orm" );
}
public Table(String name) {
public Table(String contributor) {
this( contributor, null );
}
public Table(String contributor, String name) {
this.contributor = contributor;
setName( name );
}
public Table(
String contributor,
Namespace namespace,
Identifier physicalTableName,
boolean isAbstract) {
this.contributor = contributor;
this.catalog = namespace.getPhysicalName().getCatalog();
this.schema = namespace.getPhysicalName().getSchema();
this.name = physicalTableName;
@ -87,17 +98,12 @@ public class Table implements RelationalModel, Serializable, Exportable {
}
public Table(
Identifier catalog,
Identifier schema,
String contributor,
Namespace namespace,
Identifier physicalTableName,
String subselect,
boolean isAbstract) {
this.catalog = catalog;
this.schema = schema;
this.name = physicalTableName;
this.isAbstract = isAbstract;
}
public Table(Namespace namespace, Identifier physicalTableName, String subselect, boolean isAbstract) {
this.contributor = contributor;
this.catalog = namespace.getPhysicalName().getCatalog();
this.schema = namespace.getPhysicalName().getSchema();
this.name = physicalTableName;
@ -105,13 +111,19 @@ public class Table implements RelationalModel, Serializable, Exportable {
this.isAbstract = isAbstract;
}
public Table(Namespace namespace, String subselect, boolean isAbstract) {
public Table(String contributor, Namespace namespace, String subselect, boolean isAbstract) {
this.contributor = contributor;
this.catalog = namespace.getPhysicalName().getCatalog();
this.schema = namespace.getPhysicalName().getSchema();
this.subselect = subselect;
this.isAbstract = isAbstract;
}
@Override
public String getContributor() {
return contributor;
}
/**
* @deprecated Should use {@link QualifiedObjectNameFormatter#format} on QualifiedObjectNameFormatter
* obtained from {@link org.hibernate.engine.jdbc.env.spi.JdbcEnvironment}

View File

@ -52,6 +52,11 @@ public interface EntityMappingType extends ManagedMappingType, EntityValuedModel
*/
EntityPersister getEntityPersister();
default String getContributor() {
// todo (6.0) : needed for the HHH-14470 half related to HHH-14469
return "orm";
}
default EntityRepresentationStrategy getRepresentationStrategy() {
return getEntityPersister().getRepresentationStrategy();
}

View File

@ -12,13 +12,14 @@ import java.util.function.Function;
import org.hibernate.boot.model.relational.Exportable;
import org.hibernate.dialect.Dialect;
import org.hibernate.mapping.Contributable;
import org.hibernate.metamodel.mapping.EntityMappingType;
import org.hibernate.persister.entity.Joinable;
/**
* @author Steve Ebersole
*/
public class IdTable implements Exportable {
public class IdTable implements Exportable, Contributable {
private final EntityMappingType entityDescriptor;
private final String qualifiedTableName;
@ -77,9 +78,10 @@ public class IdTable implements Exportable {
}
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Exportable
@Override
public String getContributor() {
return entityDescriptor.getContributor();
}
@Override
public String getExportIdentifier() {

View File

@ -39,6 +39,7 @@ import org.hibernate.tool.schema.internal.ExceptionHandlerCollectingImpl;
import org.hibernate.tool.schema.internal.ExceptionHandlerHaltImpl;
import org.hibernate.tool.schema.internal.Helper;
import org.hibernate.tool.schema.internal.SchemaCreatorImpl;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExceptionHandler;
import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaManagementException;
@ -60,7 +61,7 @@ import org.hibernate.tool.schema.spi.TargetDescriptor;
public class SchemaExport {
private static final CoreMessageLogger LOG = CoreLogging.messageLogger( SchemaExport.class );
public static enum Type {
public enum Type {
CREATE( Action.CREATE ),
DROP( Action.DROP ),
NONE( Action.NONE ),
@ -81,7 +82,7 @@ public class SchemaExport {
}
}
public static enum Action {
public enum Action {
/**
* None - duh :P
*/
@ -228,7 +229,6 @@ public class SchemaExport {
execute( targetTypes, action, metadata, ( (MetadataImplementor) metadata ).getMetadataBuildingOptions().getServiceRegistry() );
}
@SuppressWarnings("unchecked")
public void execute(EnumSet<TargetType> targetTypes, Action action, Metadata metadata, ServiceRegistry serviceRegistry) {
if ( action == Action.NONE ) {
LOG.debug( "Skipping SchemaExport as Action.NONE was passed" );
@ -255,7 +255,7 @@ public class SchemaExport {
Metadata metadata,
ServiceRegistry serviceRegistry,
TargetDescriptor targetDescriptor) {
Map config = new HashMap( serviceRegistry.getService( ConfigurationService.class ).getSettings() );
Map<String,Object> config = new HashMap<>( serviceRegistry.getService( ConfigurationService.class ).getSettings() );
config.put( AvailableSettings.HBM2DDL_DELIMITER, delimiter );
config.put( AvailableSettings.FORMAT_SQL, format );
config.put( AvailableSettings.HBM2DDL_IMPORT_FILES, importFiles );
@ -287,6 +287,7 @@ public class SchemaExport {
tool.getSchemaDropper( config ).doDrop(
metadata,
executionOptions,
ContributableMatcher.ALL,
sourceDescriptor,
targetDescriptor
);
@ -296,6 +297,7 @@ public class SchemaExport {
tool.getSchemaCreator( config ).doCreation(
metadata,
executionOptions,
ContributableMatcher.ALL,
sourceDescriptor,
targetDescriptor
);
@ -396,7 +398,7 @@ public class SchemaExport {
private static MetadataImplementor buildMetadata(
CommandLineArgs parsedArgs,
StandardServiceRegistry serviceRegistry) throws Exception {
StandardServiceRegistry serviceRegistry) {
final MetadataSources metadataSources = new MetadataSources( serviceRegistry );
for ( String filename : parsedArgs.hbmXmlFiles ) {

View File

@ -35,6 +35,7 @@ import org.hibernate.service.ServiceRegistry;
import org.hibernate.tool.schema.TargetType;
import org.hibernate.tool.schema.internal.ExceptionHandlerCollectingImpl;
import org.hibernate.tool.schema.internal.ExceptionHandlerHaltImpl;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExceptionHandler;
import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaManagementTool;
@ -90,7 +91,7 @@ public class SchemaUpdate {
final TargetDescriptor targetDescriptor = SchemaExport.buildTargetDescriptor( targetTypes, outputFile, serviceRegistry );
try {
tool.getSchemaMigrator( config ).doMigration( metadata, executionOptions, targetDescriptor );
tool.getSchemaMigrator( config ).doMigration( metadata, executionOptions, ContributableMatcher.ALL, targetDescriptor );
}
finally {
if ( exceptionHandler instanceof ExceptionHandlerCollectingImpl ) {

View File

@ -31,6 +31,7 @@ import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.log.DeprecationLogger;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.tool.schema.internal.ExceptionHandlerHaltImpl;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaManagementTool;
import org.hibernate.tool.schema.spi.SchemaManagementToolCoordinator;
@ -61,7 +62,7 @@ public class SchemaValidator {
ExceptionHandlerHaltImpl.INSTANCE
);
tool.getSchemaValidator( config ).doValidation( metadata, executionOptions );
tool.getSchemaValidator( config ).doValidation( metadata, executionOptions, ContributableMatcher.ALL );
}
public static void main(String[] args) {

View File

@ -103,7 +103,7 @@ public enum Action {
return NONE;
}
if ( Action.class.isInstance( value ) ) {
if ( value instanceof Action ) {
return (Action) value;
}
@ -152,7 +152,7 @@ public enum Action {
return NONE;
}
if ( Action.class.isInstance( value ) ) {
if ( value instanceof Action ) {
return hbm2ddlSetting( (Action) value );
}

View File

@ -47,6 +47,7 @@ import org.hibernate.tool.schema.extract.spi.TableInformation;
import org.hibernate.tool.schema.internal.exec.GenerationTarget;
import org.hibernate.tool.schema.internal.exec.JdbcContext;
import org.hibernate.tool.schema.spi.CommandAcceptanceException;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.Exporter;
import org.hibernate.tool.schema.spi.SchemaFilter;
@ -89,7 +90,11 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
}
@Override
public void doMigration(Metadata metadata, ExecutionOptions options, TargetDescriptor targetDescriptor) {
public void doMigration(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
TargetDescriptor targetDescriptor) {
if ( !targetDescriptor.getTargetTypes().isEmpty() ) {
final JdbcContext jdbcContext = tool.resolveJdbcContext( options.getConfigurationValues() );
final DdlTransactionIsolator ddlTransactionIsolator = tool.getDdlTransactionIsolator( jdbcContext );
@ -112,7 +117,7 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
}
try {
performMigration( metadata, databaseInformation, options, jdbcContext.getDialect(), targets );
performMigration( metadata, databaseInformation, options, contributableInclusionFilter, jdbcContext.getDialect(), targets );
}
finally {
for ( GenerationTarget target : targets ) {
@ -144,6 +149,7 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
Metadata metadata,
DatabaseInformation existingDatabase,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
Dialect dialect,
Formatter formatter,
Set<String> exportIdentifiers,
@ -156,6 +162,7 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
Metadata metadata,
DatabaseInformation existingDatabase,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
Dialect dialect,
GenerationTarget... targets) {
final boolean format = Helper.interpretFormattingEnabled( options.getConfigurationValues() );
@ -209,6 +216,7 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
metadata,
existingDatabase,
options,
contributableInclusionFilter,
dialect,
formatter,
exportIdentifiers,
@ -219,8 +227,11 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
targets
);
tablesInformation.put( namespace, nameSpaceTablesInformation );
if ( schemaFilter.includeNamespace( namespace ) ) {
if ( options.getSchemaFilter().includeNamespace( namespace ) ) {
for ( Sequence sequence : namespace.getSequences() ) {
if ( ! contributableInclusionFilter.matches( sequence ) ) {
continue;
}
checkExportIdentifier( sequence, exportIdentifiers );
final SequenceInformation sequenceInformation = existingDatabase.getSequenceInformation( sequence.getName() );
if ( sequenceInformation == null ) {
@ -241,14 +252,19 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
//NOTE : Foreign keys must be created *after* all tables of all namespaces for cross namespace fks. see HHH-10420
for ( Namespace namespace : database.getNamespaces() ) {
if ( schemaFilter.includeNamespace( namespace ) ) {
if ( options.getSchemaFilter().includeNamespace( namespace ) ) {
final NameSpaceTablesInformation nameSpaceTablesInformation = tablesInformation.get( namespace );
for ( Table table : namespace.getTables() ) {
if ( schemaFilter.includeTable( table ) ) {
final TableInformation tableInformation = nameSpaceTablesInformation.getTableInformation( table );
if ( tableInformation == null || tableInformation.isPhysicalTable() ) {
applyForeignKeys( table, tableInformation, dialect, metadata, formatter, options, targets );
}
if ( ! options.getSchemaFilter().includeTable( table ) ) {
continue;
}
if ( ! contributableInclusionFilter.matches( table ) ) {
continue;
}
final TableInformation tableInformation = nameSpaceTablesInformation.getTableInformation( table );
if ( tableInformation == null || tableInformation.isPhysicalTable() ) {
applyForeignKeys( table, tableInformation, dialect, metadata, formatter, options, targets );
}
}
}

View File

@ -23,6 +23,7 @@ import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
import org.hibernate.tool.schema.extract.spi.SequenceInformation;
import org.hibernate.tool.schema.extract.spi.TableInformation;
import org.hibernate.tool.schema.internal.exec.JdbcContext;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaFilter;
import org.hibernate.tool.schema.spi.SchemaManagementException;
@ -53,7 +54,10 @@ public abstract class AbstractSchemaValidator implements SchemaValidator {
}
@Override
public void doValidation(Metadata metadata, ExecutionOptions options) {
public void doValidation(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter) {
final JdbcContext jdbcContext = tool.resolveJdbcContext( options.getConfigurationValues() );
final DdlTransactionIsolator isolator = tool.getDdlTransactionIsolator( jdbcContext );
@ -65,7 +69,7 @@ public abstract class AbstractSchemaValidator implements SchemaValidator {
);
try {
performValidation( metadata, databaseInformation, options, jdbcContext.getDialect() );
performValidation( metadata, databaseInformation, options, contributableInclusionFilter, jdbcContext.getDialect() );
}
finally {
try {
@ -83,22 +87,27 @@ public abstract class AbstractSchemaValidator implements SchemaValidator {
Metadata metadata,
DatabaseInformation databaseInformation,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
Dialect dialect) {
for ( Namespace namespace : metadata.getDatabase().getNamespaces() ) {
if ( schemaFilter.includeNamespace( namespace ) ) {
validateTables( metadata, databaseInformation, options, dialect, namespace );
if ( options.getSchemaFilter().includeNamespace( namespace ) ) {
validateTables( metadata, databaseInformation, options, contributableInclusionFilter, dialect, namespace );
}
}
for ( Namespace namespace : metadata.getDatabase().getNamespaces() ) {
if ( schemaFilter.includeNamespace( namespace ) ) {
if ( options.getSchemaFilter().includeNamespace( namespace ) ) {
for ( Sequence sequence : namespace.getSequences() ) {
if ( schemaFilter.includeSequence( sequence ) ) {
final SequenceInformation sequenceInformation = databaseInformation.getSequenceInformation(
sequence.getName()
);
validateSequence( sequence, sequenceInformation );
if ( ! options.getSchemaFilter().includeSequence( sequence ) ) {
continue;
}
if ( ! contributableInclusionFilter.matches( sequence ) ) {
continue;
}
final SequenceInformation sequenceInformation = databaseInformation.getSequenceInformation( sequence.getName() );
validateSequence( sequence, sequenceInformation );
}
}
}
@ -108,6 +117,7 @@ public abstract class AbstractSchemaValidator implements SchemaValidator {
Metadata metadata,
DatabaseInformation databaseInformation,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
Dialect dialect, Namespace namespace);
protected void validateTable(

View File

@ -18,6 +18,7 @@ import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
import org.hibernate.tool.schema.extract.spi.NameSpaceTablesInformation;
import org.hibernate.tool.schema.extract.spi.TableInformation;
import org.hibernate.tool.schema.internal.exec.GenerationTarget;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaFilter;
@ -40,6 +41,7 @@ public class GroupedSchemaMigratorImpl extends AbstractSchemaMigrator {
Metadata metadata,
DatabaseInformation existingDatabase,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
Dialect dialect,
Formatter formatter,
Set<String> exportIdentifiers,
@ -50,7 +52,7 @@ public class GroupedSchemaMigratorImpl extends AbstractSchemaMigrator {
final NameSpaceTablesInformation tablesInformation =
new NameSpaceTablesInformation( metadata.getDatabase().getJdbcEnvironment().getIdentifierHelper() );
if ( schemaFilter.includeNamespace( namespace ) ) {
if ( options.getSchemaFilter().includeNamespace( namespace ) ) {
createSchemaAndCatalog(
existingDatabase,
options,
@ -62,9 +64,12 @@ public class GroupedSchemaMigratorImpl extends AbstractSchemaMigrator {
namespace,
targets
);
final NameSpaceTablesInformation tables = existingDatabase.getTablesInformation( namespace );
for ( Table table : namespace.getTables() ) {
if ( schemaFilter.includeTable( table ) && table.isPhysicalTable() ) {
if ( options.getSchemaFilter().includeTable( table )
&& table.isPhysicalTable()
&& contributableInclusionFilter.matches( table ) ) {
checkExportIdentifier( table, exportIdentifiers );
final TableInformation tableInformation = tables.getTableInformation( table );
if ( tableInformation == null ) {
@ -78,7 +83,9 @@ public class GroupedSchemaMigratorImpl extends AbstractSchemaMigrator {
}
for ( Table table : namespace.getTables() ) {
if ( schemaFilter.includeTable( table ) && table.isPhysicalTable() ) {
if ( options.getSchemaFilter().includeTable( table )
&& table.isPhysicalTable()
&& contributableInclusionFilter.matches( table ) ) {
final TableInformation tableInformation = tablesInformation.getTableInformation( table );
if ( tableInformation == null || tableInformation.isPhysicalTable() ) {
applyIndexes( table, tableInformation, dialect, metadata, formatter, options, targets );

View File

@ -12,6 +12,7 @@ import org.hibernate.dialect.Dialect;
import org.hibernate.mapping.Table;
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
import org.hibernate.tool.schema.extract.spi.NameSpaceTablesInformation;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaFilter;
@ -34,11 +35,14 @@ public class GroupedSchemaValidatorImpl extends AbstractSchemaValidator {
Metadata metadata,
DatabaseInformation databaseInformation,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
Dialect dialect, Namespace namespace) {
final NameSpaceTablesInformation tables = databaseInformation.getTablesInformation( namespace );
for ( Table table : namespace.getTables() ) {
if ( schemaFilter.includeTable( table ) && table.isPhysicalTable() ) {
if ( options.getSchemaFilter().includeTable( table )
&& table.isPhysicalTable()
&& contributableInclusionFilter.matches( table ) ) {
validateTable(
table,
tables.getTableInformation( table ),

View File

@ -18,6 +18,7 @@ import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
import org.hibernate.tool.schema.extract.spi.NameSpaceTablesInformation;
import org.hibernate.tool.schema.extract.spi.TableInformation;
import org.hibernate.tool.schema.internal.exec.GenerationTarget;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaFilter;
@ -40,6 +41,7 @@ public class IndividuallySchemaMigratorImpl extends AbstractSchemaMigrator {
Metadata metadata,
DatabaseInformation existingDatabase,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
Dialect dialect,
Formatter formatter,
Set<String> exportIdentifiers,
@ -51,7 +53,7 @@ public class IndividuallySchemaMigratorImpl extends AbstractSchemaMigrator {
final NameSpaceTablesInformation tablesInformation =
new NameSpaceTablesInformation( metadata.getDatabase().getJdbcEnvironment().getIdentifierHelper() );
if ( schemaFilter.includeNamespace( namespace ) ) {
if ( options.getSchemaFilter().includeNamespace( namespace ) ) {
createSchemaAndCatalog(
existingDatabase,
options,
@ -64,7 +66,9 @@ public class IndividuallySchemaMigratorImpl extends AbstractSchemaMigrator {
targets
);
for ( Table table : namespace.getTables() ) {
if ( schemaFilter.includeTable( table ) && table.isPhysicalTable() ) {
if ( options.getSchemaFilter().includeTable( table )
&& table.isPhysicalTable()
&& contributableInclusionFilter.matches( table ) ) {
checkExportIdentifier( table, exportIdentifiers );
final TableInformation tableInformation = existingDatabase.getTableInformation( table.getQualifiedTableName() );
if ( tableInformation == null ) {
@ -78,7 +82,9 @@ public class IndividuallySchemaMigratorImpl extends AbstractSchemaMigrator {
}
for ( Table table : namespace.getTables() ) {
if ( schemaFilter.includeTable( table ) && table.isPhysicalTable() ) {
if ( options.getSchemaFilter().includeTable( table )
&& table.isPhysicalTable()
&& contributableInclusionFilter.matches( table ) ) {
final TableInformation tableInformation = tablesInformation.getTableInformation( table );
if ( tableInformation == null || tableInformation.isPhysicalTable() ) {
applyIndexes( table, tableInformation, dialect, metadata, formatter, options, targets );

View File

@ -12,6 +12,7 @@ import org.hibernate.dialect.Dialect;
import org.hibernate.mapping.Table;
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
import org.hibernate.tool.schema.extract.spi.TableInformation;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaFilter;
@ -34,10 +35,13 @@ public class IndividuallySchemaValidatorImpl extends AbstractSchemaValidator {
Metadata metadata,
DatabaseInformation databaseInformation,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
Dialect dialect,
Namespace namespace) {
for ( Table table : namespace.getTables() ) {
if ( schemaFilter.includeTable( table ) && table.isPhysicalTable() ) {
if ( options.getSchemaFilter().includeTable( table )
&& table.isPhysicalTable()
&& contributableInclusionFilter.matches( table ) ) {
final TableInformation tableInformation = databaseInformation.getTableInformation(
table.getQualifiedTableName()
);

View File

@ -15,6 +15,7 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import org.hibernate.Internal;
import org.hibernate.boot.Metadata;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.boot.model.relational.AuxiliaryDatabaseObject;
@ -48,6 +49,7 @@ import org.hibernate.tool.schema.internal.exec.JdbcContext;
import org.hibernate.tool.schema.internal.exec.ScriptSourceInputFromUrl;
import org.hibernate.tool.schema.internal.exec.ScriptSourceInputNonExistentImpl;
import org.hibernate.tool.schema.spi.CommandAcceptanceException;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExceptionHandler;
import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaCreator;
@ -105,6 +107,7 @@ public class SchemaCreatorImpl implements SchemaCreator {
public void doCreation(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
SourceDescriptor sourceDescriptor,
TargetDescriptor targetDescriptor) {
if ( targetDescriptor.getTargetTypes().isEmpty() ) {
@ -119,13 +122,15 @@ public class SchemaCreatorImpl implements SchemaCreator {
true
);
doCreation( metadata, jdbcContext.getDialect(), options, sourceDescriptor, targets );
doCreation( metadata, jdbcContext.getDialect(), options, contributableInclusionFilter, sourceDescriptor, targets );
}
@Internal
public void doCreation(
Metadata metadata,
Dialect dialect,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
SourceDescriptor sourceDescriptor,
GenerationTarget... targets) {
for ( GenerationTarget target : targets ) {
@ -133,7 +138,7 @@ public class SchemaCreatorImpl implements SchemaCreator {
}
try {
performCreation( metadata, dialect, options, sourceDescriptor, targets );
performCreation( metadata, dialect, options, contributableInclusionFilter, sourceDescriptor, targets );
}
finally {
for ( GenerationTarget target : targets ) {
@ -151,6 +156,7 @@ public class SchemaCreatorImpl implements SchemaCreator {
Metadata metadata,
Dialect dialect,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
SourceDescriptor sourceDescriptor,
GenerationTarget... targets) {
final SqlScriptCommandExtractor commandExtractor = tool.getServiceRegistry().getService( SqlScriptCommandExtractor.class );
@ -164,17 +170,17 @@ public class SchemaCreatorImpl implements SchemaCreator {
break;
}
case METADATA: {
createFromMetadata( metadata, options, dialect, formatter, targets );
createFromMetadata( metadata, options, contributableInclusionFilter, dialect, formatter, targets );
break;
}
case METADATA_THEN_SCRIPT: {
createFromMetadata( metadata, options, dialect, formatter, targets );
createFromMetadata( metadata, options, contributableInclusionFilter, dialect, formatter, targets );
createFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets );
break;
}
case SCRIPT_THEN_METADATA: {
createFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets );
createFromMetadata( metadata, options, dialect, formatter, targets );
createFromMetadata( metadata, options, contributableInclusionFilter, dialect, formatter, targets );
}
}
@ -197,12 +203,31 @@ public class SchemaCreatorImpl implements SchemaCreator {
}
}
@Internal
public void createFromMetadata(
Metadata metadata,
ExecutionOptions options,
Dialect dialect,
Formatter formatter,
GenerationTarget... targets) {
createFromMetadata(
metadata,
options,
(contributed) -> true,
dialect,
formatter,
targets
);
}
@Internal
public void createFromMetadata(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionMatcher,
Dialect dialect,
Formatter formatter,
GenerationTarget... targets) {
boolean tryToCreateCatalogs = false;
boolean tryToCreateSchemas = false;
if ( options.shouldManageNamespaces() ) {
@ -223,7 +248,7 @@ public class SchemaCreatorImpl implements SchemaCreator {
Set<Identifier> exportedCatalogs = new HashSet<>();
for ( Namespace namespace : database.getNamespaces() ) {
if ( !schemaFilter.includeNamespace( namespace ) ) {
if ( ! options.getSchemaFilter().includeNamespace( namespace ) ) {
continue;
}
@ -276,16 +301,22 @@ public class SchemaCreatorImpl implements SchemaCreator {
// then, create all schema objects (tables, sequences, constraints, etc) in each schema
for ( Namespace namespace : database.getNamespaces() ) {
if ( !schemaFilter.includeNamespace( namespace ) ) {
if ( ! options.getSchemaFilter().includeNamespace( namespace ) ) {
continue;
}
// sequences
for ( Sequence sequence : namespace.getSequences() ) {
if ( !schemaFilter.includeSequence( sequence ) ) {
if ( ! options.getSchemaFilter().includeSequence( sequence ) ) {
continue;
}
if ( ! contributableInclusionMatcher.matches( sequence ) ) {
continue;
}
checkExportIdentifier( sequence, exportIdentifiers );
applySqlStrings(
dialect.getSequenceExporter().getSqlCreateStrings(
sequence,
@ -307,10 +338,17 @@ public class SchemaCreatorImpl implements SchemaCreator {
if ( !table.isPhysicalTable() ){
continue;
}
if ( !schemaFilter.includeTable( table ) ) {
if ( ! options.getSchemaFilter().includeTable( table ) ) {
continue;
}
if ( ! contributableInclusionMatcher.matches( table ) ) {
continue;
}
checkExportIdentifier( table, exportIdentifiers );
applySqlStrings(
dialect.getTableExporter().getSqlCreateStrings( table, metadata ),
formatter,
@ -324,9 +362,14 @@ public class SchemaCreatorImpl implements SchemaCreator {
if ( !table.isPhysicalTable() ){
continue;
}
if ( !schemaFilter.includeTable( table ) ) {
if ( ! options.getSchemaFilter().includeTable( table ) ) {
continue;
}
if ( ! contributableInclusionMatcher.matches( table ) ) {
continue;
}
// indexes
final Iterator indexItr = table.getIndexIterator();
while ( indexItr.hasNext() ) {
@ -359,14 +402,19 @@ public class SchemaCreatorImpl implements SchemaCreator {
for ( Namespace namespace : database.getNamespaces() ) {
// NOTE : Foreign keys must be created *after* unique keys for numerous DBs. See HHH-8390
if ( !schemaFilter.includeNamespace( namespace ) ) {
if ( ! options.getSchemaFilter().includeNamespace( namespace ) ) {
continue;
}
for ( Table table : namespace.getTables() ) {
if ( !schemaFilter.includeTable( table ) ) {
if ( ! options.getSchemaFilter().includeTable( table ) ) {
continue;
}
if ( ! contributableInclusionMatcher.matches( table ) ) {
continue;
}
// foreign keys
final Iterator fkItr = table.getForeignKeyIterator();
while ( fkItr.hasNext() ) {
@ -540,6 +588,11 @@ public class SchemaCreatorImpl implements SchemaCreator {
public ExceptionHandler getExceptionHandler() {
return ExceptionHandlerHaltImpl.INSTANCE;
}
@Override
public SchemaFilter getSchemaFilter() {
return schemaFilter;
}
};
createFromMetadata( metadata, options, dialect, FormatStyle.NONE.getFormatter(), target );
@ -547,7 +600,10 @@ public class SchemaCreatorImpl implements SchemaCreator {
return target.commands;
}
/**
* Intended for use from tests
*/
@Internal
public void doCreation(
Metadata metadata,
final boolean manageNamespaces,
@ -562,6 +618,10 @@ public class SchemaCreatorImpl implements SchemaCreator {
);
}
/**
* Intended for use from tests
*/
@Internal
public void doCreation(
Metadata metadata,
final ServiceRegistry serviceRegistry,
@ -586,7 +646,13 @@ public class SchemaCreatorImpl implements SchemaCreator {
public ExceptionHandler getExceptionHandler() {
return ExceptionHandlerLoggedImpl.INSTANCE;
}
@Override
public SchemaFilter getSchemaFilter() {
return schemaFilter;
}
},
(contributed) -> true,
new SourceDescriptor() {
@Override
public SourceType getSourceType() {

View File

@ -15,6 +15,7 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import org.hibernate.Internal;
import org.hibernate.boot.Metadata;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.boot.model.relational.AuxiliaryDatabaseObject;
@ -46,6 +47,7 @@ import org.hibernate.tool.schema.internal.exec.GenerationTarget;
import org.hibernate.tool.schema.internal.exec.GenerationTargetToDatabase;
import org.hibernate.tool.schema.internal.exec.JdbcContext;
import org.hibernate.tool.schema.spi.CommandAcceptanceException;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.DelayedDropAction;
import org.hibernate.tool.schema.spi.ExceptionHandler;
import org.hibernate.tool.schema.spi.ExecutionOptions;
@ -100,6 +102,7 @@ public class SchemaDropperImpl implements SchemaDropper {
public void doDrop(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
SourceDescriptor sourceDescriptor,
TargetDescriptor targetDescriptor) {
@ -110,21 +113,39 @@ public class SchemaDropperImpl implements SchemaDropper {
final JdbcContext jdbcContext = tool.resolveJdbcContext( options.getConfigurationValues() );
final GenerationTarget[] targets = tool.buildGenerationTargets( targetDescriptor, jdbcContext, options.getConfigurationValues(), true );
doDrop( metadata, options, jdbcContext.getDialect(), sourceDescriptor, targets );
doDrop( metadata, options, contributableInclusionFilter, jdbcContext.getDialect(), sourceDescriptor, targets );
}
/**
* For use from testing
*/
@Internal
public void doDrop(
Metadata metadata,
ExecutionOptions options,
Dialect dialect,
SourceDescriptor sourceDescriptor,
GenerationTarget... targets) {
doDrop( metadata, options, (contributed) -> true, dialect, sourceDescriptor, targets );
}
/**
* For use from testing
*/
@Internal
public void doDrop(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
Dialect dialect,
SourceDescriptor sourceDescriptor,
GenerationTarget... targets) {
for ( GenerationTarget target : targets ) {
target.prepare();
}
try {
performDrop( metadata, options, dialect, sourceDescriptor, targets );
performDrop( metadata, options, contributableInclusionFilter, dialect, sourceDescriptor, targets );
}
finally {
for ( GenerationTarget target : targets ) {
@ -141,6 +162,7 @@ public class SchemaDropperImpl implements SchemaDropper {
private void performDrop(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
Dialect dialect,
SourceDescriptor sourceDescriptor,
GenerationTarget... targets) {
@ -152,15 +174,15 @@ public class SchemaDropperImpl implements SchemaDropper {
dropFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets );
}
else if ( sourceDescriptor.getSourceType() == SourceType.METADATA ) {
dropFromMetadata( metadata, options, dialect, formatter, targets );
dropFromMetadata( metadata, options, contributableInclusionFilter, dialect, formatter, targets );
}
else if ( sourceDescriptor.getSourceType() == SourceType.METADATA_THEN_SCRIPT ) {
dropFromMetadata( metadata, options, dialect, formatter, targets );
dropFromMetadata( metadata, options, contributableInclusionFilter, dialect, formatter, targets );
dropFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets );
}
else {
dropFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets );
dropFromMetadata( metadata, options, dialect, formatter, targets );
dropFromMetadata( metadata, options, contributableInclusionFilter, dialect, formatter, targets );
}
}
@ -182,6 +204,7 @@ public class SchemaDropperImpl implements SchemaDropper {
private void dropFromMetadata(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
Dialect dialect,
Formatter formatter,
GenerationTarget... targets) {
@ -221,30 +244,38 @@ public class SchemaDropperImpl implements SchemaDropper {
for ( Namespace namespace : database.getNamespaces() ) {
if ( !schemaFilter.includeNamespace( namespace ) ) {
if ( ! options.getSchemaFilter().includeNamespace( namespace ) ) {
continue;
}
// we need to drop all constraints/indexes prior to dropping the tables
applyConstraintDropping( namespace, metadata, formatter, options, targets );
applyConstraintDropping( namespace, metadata, formatter, options, contributableInclusionFilter, targets );
// now it's safe to drop the tables
for ( Table table : namespace.getTables() ) {
if ( !table.isPhysicalTable() ) {
if ( ! table.isPhysicalTable() ) {
continue;
}
if ( !schemaFilter.includeTable( table ) ) {
if ( ! options.getSchemaFilter().includeTable( table ) ) {
continue;
}
if ( ! contributableInclusionFilter.matches( table ) ) {
continue;
}
checkExportIdentifier( table, exportIdentifiers );
applySqlStrings( dialect.getTableExporter().getSqlDropStrings( table, metadata ), formatter, options,targets );
}
for ( Sequence sequence : namespace.getSequences() ) {
if ( !schemaFilter.includeSequence( sequence ) ) {
if ( ! options.getSchemaFilter().includeSequence( sequence ) ) {
continue;
}
if ( ! contributableInclusionFilter.matches( sequence ) ) {
continue;
}
checkExportIdentifier( sequence, exportIdentifiers );
applySqlStrings( dialect.getSequenceExporter().getSqlDropStrings( sequence, metadata ), formatter, options, targets );
}
}
@ -270,7 +301,7 @@ public class SchemaDropperImpl implements SchemaDropper {
for ( Namespace namespace : database.getNamespaces() ) {
if ( !schemaFilter.includeNamespace( namespace ) ) {
if ( ! options.getSchemaFilter().includeNamespace( namespace ) ) {
continue;
}
@ -309,6 +340,7 @@ public class SchemaDropperImpl implements SchemaDropper {
Metadata metadata,
Formatter formatter,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
GenerationTarget... targets) {
final Dialect dialect = metadata.getDatabase().getJdbcEnvironment().getDialect();
@ -320,7 +352,10 @@ public class SchemaDropperImpl implements SchemaDropper {
if ( !table.isPhysicalTable() ) {
continue;
}
if ( !schemaFilter.includeTable( table ) ) {
if ( ! options.getSchemaFilter().includeTable( table ) ) {
continue;
}
if ( ! contributableInclusionFilter.matches( table ) ) {
continue;
}
@ -408,9 +443,14 @@ public class SchemaDropperImpl implements SchemaDropper {
public ExceptionHandler getExceptionHandler() {
return ExceptionHandlerHaltImpl.INSTANCE;
}
@Override
public SchemaFilter getSchemaFilter() {
return schemaFilter;
}
};
dropFromMetadata( metadata, options, dialect, FormatStyle.NONE.getFormatter(), target );
dropFromMetadata( metadata, options, (contributed) -> true, dialect, FormatStyle.NONE.getFormatter(), target );
return target.commands;
}
@ -419,9 +459,13 @@ public class SchemaDropperImpl implements SchemaDropper {
public DelayedDropAction buildDelayedAction(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
SourceDescriptor sourceDescriptor) {
final JournalingGenerationTarget target = new JournalingGenerationTarget();
doDrop( metadata, options, tool.getServiceRegistry().getService( JdbcEnvironment.class ).getDialect(), sourceDescriptor, target );
final Dialect dialect = tool.getServiceRegistry().getService( JdbcEnvironment.class ).getDialect();
doDrop( metadata, options, contributableInclusionFilter, dialect, sourceDescriptor, target );
return new DelayedDropActionImpl( target.commands, tool.getCustomDatabaseGenerationTarget() );
}
@ -475,7 +519,13 @@ public class SchemaDropperImpl implements SchemaDropper {
public ExceptionHandler getExceptionHandler() {
return ExceptionHandlerLoggedImpl.INSTANCE;
}
@Override
public SchemaFilter getSchemaFilter() {
return schemaFilter;
}
},
(contributed) -> true,
serviceRegistry.getService( JdbcEnvironment.class ).getDialect(),
new SourceDescriptor() {
@Override

View File

@ -0,0 +1,30 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.tool.schema.spi;
import org.hibernate.boot.model.relational.ContributableDatabaseObject;
/**
* Matcher for whether tables and sequences should be included based on its
* {@link ContributableDatabaseObject#getContributor()}
*/
@FunctionalInterface
public interface ContributableMatcher {
/**
* Matches everything
*/
ContributableMatcher ALL = contributed -> true;
/**
* Matches nothing
*/
ContributableMatcher NONE = contributed -> false;
/**
* Does the given `contributed` match this matcher?
*/
boolean matches(ContributableDatabaseObject contributed);
}

View File

@ -9,6 +9,7 @@ package org.hibernate.tool.schema.spi;
import java.util.Map;
import org.hibernate.Incubating;
import org.hibernate.boot.model.relational.Exportable;
/**
* Parameter object representing options for schema management tool execution
@ -18,6 +19,10 @@ import org.hibernate.Incubating;
@Incubating
public interface ExecutionOptions {
Map getConfigurationValues();
boolean shouldManageNamespaces();
ExceptionHandler getExceptionHandler();
SchemaFilter getSchemaFilter();
}

View File

@ -14,9 +14,7 @@ import org.hibernate.boot.Metadata;
* <p/>
* The actual contract here is kind of convoluted with the design
* idea of allowing this to work in ORM (JDBC) as well as in non-JDBC
* environments (OGM, e.g.) simultaneously. ExecutionContext allows
*
* @author Steve Ebersole
* environments (OGM, e.g.) simultaneously.
*/
@Incubating
public interface SchemaCreator {
@ -25,8 +23,14 @@ public interface SchemaCreator {
*
* @param metadata Represents the schema to be created.
* @param options Options for executing the creation
* @param contributableInclusionFilter Filter for Contributable instances to use
* @param sourceDescriptor description of the source(s) of creation commands
* @param targetDescriptor description of the target(s) for the creation commands
*/
void doCreation(Metadata metadata, ExecutionOptions options, SourceDescriptor sourceDescriptor, TargetDescriptor targetDescriptor);
void doCreation(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
SourceDescriptor sourceDescriptor,
TargetDescriptor targetDescriptor);
}

View File

@ -11,8 +11,6 @@ import org.hibernate.boot.Metadata;
/**
* Service delegate for handling schema dropping.
*
* @author Steve Ebersole
*/
@Incubating
public interface SchemaDropper {
@ -21,10 +19,16 @@ public interface SchemaDropper {
*
* @param metadata Represents the schema to be dropped.
* @param options Options for executing the drop
* @param contributableInclusionFilter Filter for Contributable instances to use
* @param sourceDescriptor description of the source(s) of drop commands
* @param targetDescriptor description of the target(s) for the drop commands
*/
void doDrop(Metadata metadata, ExecutionOptions options, SourceDescriptor sourceDescriptor, TargetDescriptor targetDescriptor);
void doDrop(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
SourceDescriptor sourceDescriptor,
TargetDescriptor targetDescriptor);
/**
* Build a delayed Runnable for performing schema dropping. This implicitly
@ -32,9 +36,14 @@ public interface SchemaDropper {
*
* @param metadata The metadata to drop
* @param options The drop options
* @param contributableInclusionFilter Filter for Contributable instances to use
* @param sourceDescriptor For access to the {@link SourceDescriptor#getScriptSourceInput()}
*
* @return The Runnable
*/
DelayedDropAction buildDelayedAction(Metadata metadata, ExecutionOptions options, SourceDescriptor sourceDescriptor);
DelayedDropAction buildDelayedAction(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
SourceDescriptor sourceDescriptor);
}

View File

@ -51,4 +51,24 @@ public interface SchemaFilter {
*/
boolean includeSequence(Sequence sequence);
/**
* Matches everything
*/
SchemaFilter ALL = new SchemaFilter() {
@Override
public boolean includeNamespace( Namespace namespace ) {
return true;
}
@Override
public boolean includeTable( Table table ) {
return true;
}
@Override
public boolean includeSequence( Sequence sequence ) {
return true;
}
};
}

View File

@ -7,8 +7,13 @@
package org.hibernate.tool.schema.spi;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import org.hibernate.Internal;
import org.hibernate.boot.Metadata;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.cfg.AvailableSettings;
@ -18,6 +23,7 @@ import org.hibernate.service.ServiceRegistry;
import org.hibernate.tool.schema.Action;
import org.hibernate.tool.schema.SourceType;
import org.hibernate.tool.schema.TargetType;
import org.hibernate.tool.schema.internal.DefaultSchemaFilter;
import org.hibernate.tool.schema.internal.ExceptionHandlerHaltImpl;
import org.hibernate.tool.schema.internal.ExceptionHandlerLoggedImpl;
import org.hibernate.tool.schema.internal.Helper;
@ -48,48 +54,124 @@ public class SchemaManagementToolCoordinator {
public static void process(
final Metadata metadata,
final ServiceRegistry serviceRegistry,
final Map configurationValues,
final Map<?,?> configurationValues,
DelayedDropRegistry delayedDropRegistry) {
final ActionGrouping actions = ActionGrouping.interpret( configurationValues );
final Set<ActionGrouping> groupings = ActionGrouping.interpret( metadata, configurationValues );
if ( actions.getDatabaseAction() == Action.NONE && actions.getScriptAction() == Action.NONE ) {
if ( groupings.isEmpty() ) {
// no actions specified
log.debug( "No actions specified; doing nothing" );
log.debug( "No actions found; doing nothing" );
return;
}
Map<Action,Set<String>> databaseActionMap = null;
Map<Action,Set<String>> scriptActionMap = null;
for ( ActionGrouping grouping : groupings ) {
// for database action
if ( grouping.databaseAction != Action.NONE ) {
final Set<String> contributors;
if ( databaseActionMap == null ) {
databaseActionMap = new HashMap<>();
contributors = new HashSet<>();
databaseActionMap.put( grouping.databaseAction, contributors );
}
else {
contributors = databaseActionMap.computeIfAbsent(
grouping.databaseAction,
action -> new HashSet<>()
);
}
contributors.add( grouping.contributor );
}
// for script action
if ( grouping.scriptAction != Action.NONE ) {
final Set<String> contributors;
if ( scriptActionMap == null ) {
scriptActionMap = new HashMap<>();
contributors = new HashSet<>();
scriptActionMap.put( grouping.scriptAction, contributors );
}
else {
contributors = scriptActionMap.computeIfAbsent(
grouping.scriptAction,
action -> new HashSet<>()
);
}
contributors.add( grouping.contributor );
}
}
final SchemaManagementTool tool = serviceRegistry.getService( SchemaManagementTool.class );
final ConfigurationService configService = serviceRegistry.getService( ConfigurationService.class );
boolean haltOnError = configService.getSetting( AvailableSettings.HBM2DDL_HALT_ON_ERROR, StandardConverters.BOOLEAN, false);
final boolean haltOnError = configService.getSetting(
AvailableSettings.HBM2DDL_HALT_ON_ERROR,
StandardConverters.BOOLEAN,
false
);
final ExceptionHandler exceptionHandler = haltOnError ? ExceptionHandlerHaltImpl.INSTANCE : ExceptionHandlerLoggedImpl.INSTANCE;
final ExecutionOptions executionOptions = buildExecutionOptions(
configurationValues,
haltOnError ? ExceptionHandlerHaltImpl.INSTANCE :
ExceptionHandlerLoggedImpl.INSTANCE
exceptionHandler
);
performScriptAction( actions.getScriptAction(), metadata, tool, serviceRegistry, executionOptions );
performDatabaseAction( actions.getDatabaseAction(), metadata, tool, serviceRegistry, executionOptions );
if ( databaseActionMap != null ) {
databaseActionMap.forEach(
(action, contributors) -> {
if ( actions.getDatabaseAction() == Action.CREATE_DROP ) {
//noinspection unchecked
delayedDropRegistry.registerOnCloseAction(
tool.getSchemaDropper( configurationValues ).buildDelayedAction(
metadata,
executionOptions,
buildDatabaseTargetDescriptor(
configurationValues,
DropSettingSelector.INSTANCE,
serviceRegistry
)
)
performDatabaseAction(
action,
metadata,
tool,
serviceRegistry,
executionOptions,
(exportable) -> contributors.contains( exportable.getContributor() )
);
if ( action == Action.CREATE_DROP ) {
delayedDropRegistry.registerOnCloseAction(
tool.getSchemaDropper( configurationValues ).buildDelayedAction(
metadata,
executionOptions,
(exportable) -> contributors.contains( exportable.getContributor() ),
buildDatabaseTargetDescriptor(
configurationValues,
DropSettingSelector.INSTANCE,
serviceRegistry
)
)
);
}
}
);
}
if ( scriptActionMap != null ) {
scriptActionMap.forEach(
(action, contributors) -> {
performScriptAction( action, metadata, tool, serviceRegistry, executionOptions );
}
);
}
}
public static ExecutionOptions buildExecutionOptions(
final Map configurationValues,
final Map<?,?> configurationValues,
final ExceptionHandler exceptionHandler) {
return buildExecutionOptions(
configurationValues,
DefaultSchemaFilter.INSTANCE,
exceptionHandler
);
}
public static ExecutionOptions buildExecutionOptions(
final Map<?,?> configurationValues,
final SchemaFilter schemaFilter,
final ExceptionHandler exceptionHandler) {
return new ExecutionOptions() {
@Override
@ -98,7 +180,7 @@ public class SchemaManagementToolCoordinator {
}
@Override
public Map getConfigurationValues() {
public Map<?,?> getConfigurationValues() {
return configurationValues;
}
@ -106,16 +188,21 @@ public class SchemaManagementToolCoordinator {
public ExceptionHandler getExceptionHandler() {
return exceptionHandler;
}
@Override
public SchemaFilter getSchemaFilter() {
return schemaFilter;
}
};
}
@SuppressWarnings("unchecked")
private static void performDatabaseAction(
final Action action,
Metadata metadata,
SchemaManagementTool tool,
ServiceRegistry serviceRegistry,
final ExecutionOptions executionOptions) {
final ExecutionOptions executionOptions,
ContributableMatcher contributableInclusionFilter) {
// IMPL NOTE : JPA binds source and target info..
@ -130,6 +217,7 @@ public class SchemaManagementToolCoordinator {
tool.getSchemaCreator( executionOptions.getConfigurationValues() ).doCreation(
metadata,
executionOptions,
contributableInclusionFilter,
createDescriptor,
createDescriptor
);
@ -145,6 +233,7 @@ public class SchemaManagementToolCoordinator {
tool.getSchemaDropper( executionOptions.getConfigurationValues() ).doDrop(
metadata,
executionOptions,
contributableInclusionFilter,
dropDescriptor,
dropDescriptor
);
@ -156,6 +245,7 @@ public class SchemaManagementToolCoordinator {
tool.getSchemaCreator( executionOptions.getConfigurationValues() ).doCreation(
metadata,
executionOptions,
contributableInclusionFilter,
createDescriptor,
createDescriptor
);
@ -170,6 +260,7 @@ public class SchemaManagementToolCoordinator {
tool.getSchemaDropper( executionOptions.getConfigurationValues() ).doDrop(
metadata,
executionOptions,
contributableInclusionFilter,
dropDescriptor,
dropDescriptor
);
@ -184,6 +275,7 @@ public class SchemaManagementToolCoordinator {
tool.getSchemaMigrator( executionOptions.getConfigurationValues() ).doMigration(
metadata,
executionOptions,
contributableInclusionFilter,
migrateDescriptor
);
break;
@ -191,7 +283,8 @@ public class SchemaManagementToolCoordinator {
case VALIDATE: {
tool.getSchemaValidator( executionOptions.getConfigurationValues() ).doValidation(
metadata,
executionOptions
executionOptions,
contributableInclusionFilter
);
break;
}
@ -199,7 +292,7 @@ public class SchemaManagementToolCoordinator {
}
private static JpaTargetAndSourceDescriptor buildDatabaseTargetDescriptor(
Map configurationValues,
Map<?,?> configurationValues,
SettingSelector settingSelector,
ServiceRegistry serviceRegistry) {
final Object scriptSourceSetting = settingSelector.getScriptSourceSetting( configurationValues );
@ -215,8 +308,8 @@ public class SchemaManagementToolCoordinator {
);
}
final ScriptSourceInput scriptSourceInput = includesScripts ?
Helper.interpretScriptSourceSetting(
final ScriptSourceInput scriptSourceInput = includesScripts
? Helper.interpretScriptSourceSetting(
scriptSourceSetting,
serviceRegistry.getService( ClassLoaderService.class ),
(String) configurationValues.get( AvailableSettings.HBM2DDL_CHARSET_NAME )
@ -246,7 +339,6 @@ public class SchemaManagementToolCoordinator {
};
}
@SuppressWarnings("unchecked")
private static void performScriptAction(
Action scriptAction,
Metadata metadata,
@ -263,6 +355,7 @@ public class SchemaManagementToolCoordinator {
tool.getSchemaCreator( executionOptions.getConfigurationValues() ).doCreation(
metadata,
executionOptions,
(contributed) -> true,
createDescriptor,
createDescriptor
);
@ -278,6 +371,7 @@ public class SchemaManagementToolCoordinator {
tool.getSchemaDropper( executionOptions.getConfigurationValues() ).doDrop(
metadata,
executionOptions,
(contributed) -> true,
dropDescriptor,
dropDescriptor
);
@ -289,6 +383,7 @@ public class SchemaManagementToolCoordinator {
tool.getSchemaCreator( executionOptions.getConfigurationValues() ).doCreation(
metadata,
executionOptions,
(contributed) -> true,
createDescriptor,
createDescriptor
);
@ -303,6 +398,7 @@ public class SchemaManagementToolCoordinator {
tool.getSchemaDropper( executionOptions.getConfigurationValues() ).doDrop(
metadata,
executionOptions,
(contributed) -> true,
dropDescriptor,
dropDescriptor
);
@ -317,6 +413,7 @@ public class SchemaManagementToolCoordinator {
tool.getSchemaMigrator( executionOptions.getConfigurationValues() ).doMigration(
metadata,
executionOptions,
(contributed) -> true,
migrateDescriptor
);
break;
@ -328,7 +425,7 @@ public class SchemaManagementToolCoordinator {
}
private static JpaTargetAndSourceDescriptor buildScriptTargetDescriptor(
Map configurationValues,
Map<?,?> configurationValues,
SettingSelector settingSelector,
ServiceRegistry serviceRegistry) {
final Object scriptSourceSetting = settingSelector.getScriptSourceSetting( configurationValues );
@ -381,9 +478,9 @@ public class SchemaManagementToolCoordinator {
private interface SettingSelector {
Object getSourceTypeSetting(Map configurationValues);
Object getScriptSourceSetting(Map configurationValues);
Object getScriptTargetSetting(Map configurationValues);
Object getSourceTypeSetting(Map<?,?> configurationValues);
Object getScriptSourceSetting(Map<?,?> configurationValues);
Object getScriptTargetSetting(Map<?,?> configurationValues);
}
private static class CreateSettingSelector implements SettingSelector {
@ -393,17 +490,17 @@ public class SchemaManagementToolCoordinator {
public static final CreateSettingSelector INSTANCE = new CreateSettingSelector();
@Override
public Object getSourceTypeSetting(Map configurationValues) {
public Object getSourceTypeSetting(Map<?,?> configurationValues) {
return configurationValues.get( HBM2DDL_CREATE_SOURCE );
}
@Override
public Object getScriptSourceSetting(Map configurationValues) {
public Object getScriptSourceSetting(Map<?,?> configurationValues) {
return configurationValues.get( HBM2DDL_CREATE_SCRIPT_SOURCE );
}
@Override
public Object getScriptTargetSetting(Map configurationValues) {
public Object getScriptTargetSetting(Map<?,?> configurationValues) {
return configurationValues.get( HBM2DDL_SCRIPTS_CREATE_TARGET );
}
}
@ -415,17 +512,17 @@ public class SchemaManagementToolCoordinator {
public static final DropSettingSelector INSTANCE = new DropSettingSelector();
@Override
public Object getSourceTypeSetting(Map configurationValues) {
public Object getSourceTypeSetting(Map<?,?> configurationValues) {
return configurationValues.get( HBM2DDL_DROP_SOURCE );
}
@Override
public Object getScriptSourceSetting(Map configurationValues) {
public Object getScriptSourceSetting(Map<?,?> configurationValues) {
return configurationValues.get( HBM2DDL_DROP_SCRIPT_SOURCE );
}
@Override
public Object getScriptTargetSetting(Map configurationValues) {
public Object getScriptTargetSetting(Map<?,?> configurationValues) {
return configurationValues.get( HBM2DDL_SCRIPTS_DROP_TARGET );
}
}
@ -440,19 +537,19 @@ public class SchemaManagementToolCoordinator {
// for now we reuse the CREATE settings where applicable
@Override
public Object getSourceTypeSetting(Map configurationValues) {
public Object getSourceTypeSetting(Map<?,?> configurationValues) {
// for now, don't allow script source
return SourceType.METADATA;
}
@Override
public Object getScriptSourceSetting(Map configurationValues) {
public Object getScriptSourceSetting(Map<?,?> configurationValues) {
// for now, don't allow script source
return null;
}
@Override
public Object getScriptTargetSetting(Map configurationValues) {
public Object getScriptTargetSetting(Map<?,?> configurationValues) {
// for now, reuse the CREATE script target setting
return configurationValues.get( HBM2DDL_SCRIPTS_CREATE_TARGET );
}
@ -464,14 +561,20 @@ public class SchemaManagementToolCoordinator {
* simultaneously
*/
public static class ActionGrouping {
private final String contributor;
private final Action databaseAction;
private final Action scriptAction;
public ActionGrouping(Action databaseAction, Action scriptAction) {
public ActionGrouping(String contributor, Action databaseAction, Action scriptAction) {
this.contributor = contributor;
this.databaseAction = databaseAction;
this.scriptAction = scriptAction;
}
public String getContributor() {
return contributor;
}
public Action getDatabaseAction() {
return databaseAction;
}
@ -480,6 +583,10 @@ public class SchemaManagementToolCoordinator {
return scriptAction;
}
/**
* For test use
*/
@Internal
public static ActionGrouping interpret(Map configurationValues) {
// interpret the JPA settings first
Action databaseAction = Action.interpretJpaSetting( configurationValues.get( HBM2DDL_DATABASE_ACTION ) );
@ -493,7 +600,69 @@ public class SchemaManagementToolCoordinator {
}
}
return new ActionGrouping( databaseAction, scriptAction );
return new ActionGrouping( "orm", databaseAction, scriptAction );
}
public static Set<ActionGrouping> interpret(Metadata metadata, Map<?,?> configurationValues) {
// these represent the base (non-contributor-specific) values
final Action rootDatabaseAction = Action.interpretJpaSetting( configurationValues.get( HBM2DDL_DATABASE_ACTION ) );
final Action rootScriptAction = Action.interpretJpaSetting( configurationValues.get( HBM2DDL_SCRIPTS_ACTION ) );
final Action rootExportAction = Action.interpretHbm2ddlSetting( configurationValues.get( HBM2DDL_AUTO ) );
final Set<String> contributors = metadata.getContributors();
final Set<ActionGrouping> groupings = new HashSet<>( contributors.size() );
// for each contributor, look for specific tooling config values
for ( String contributor : contributors ) {
final Object contributorDatabaseActionSetting = configurationValues.get( HBM2DDL_DATABASE_ACTION + "." + contributor );
final Object contributorScriptActionSetting = configurationValues.get( HBM2DDL_SCRIPTS_ACTION + "." + contributor );
final Object contributorExportActionSetting = configurationValues.get( HBM2DDL_AUTO + "." + contributor );
final Action contributorDatabaseAction = contributorDatabaseActionSetting == null
? rootDatabaseAction
: Action.interpretJpaSetting( contributorDatabaseActionSetting );
final Action contributorScriptAction = contributorScriptActionSetting == null
? rootScriptAction
: Action.interpretJpaSetting( contributorScriptActionSetting );
final Action contributorExportAction = contributorExportActionSetting == null
? rootExportAction
: Action.interpretJpaSetting( contributorExportActionSetting );
Action databaseAction = contributorDatabaseAction;
if ( databaseAction == Action.NONE && contributorScriptAction == Action.NONE ) {
if ( contributorExportAction != Action.NONE ) {
databaseAction = contributorExportAction;
}
if ( databaseAction == Action.NONE ) {
log.debugf( "No schema actions specified for contributor `%s`; doing nothing", contributor );
continue;
}
}
groupings.add( new ActionGrouping( contributor, databaseAction, contributorScriptAction ) );
}
return groupings;
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( o == null || getClass() != o.getClass() ) {
return false;
}
ActionGrouping that = (ActionGrouping) o;
return contributor.equals( that.contributor ) &&
databaseAction == that.databaseAction &&
scriptAction == that.scriptAction;
}
@Override
public int hashCode() {
return Objects.hash( contributor );
}
}
}

View File

@ -21,7 +21,12 @@ public interface SchemaMigrator {
*
* @param metadata Represents the schema to be altered.
* @param options Options for executing the alteration
* @param contributableInclusionFilter Filter for Contributable instances to use
* @param targetDescriptor description of the target(s) for the alteration commands
*/
void doMigration(Metadata metadata, ExecutionOptions options, TargetDescriptor targetDescriptor);
void doMigration(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
TargetDescriptor targetDescriptor);
}

View File

@ -11,8 +11,6 @@ import org.hibernate.boot.Metadata;
/**
* Service delegate for handling schema validations
*
* @author Steve Ebersole
*/
@Incubating
public interface SchemaValidator {
@ -21,6 +19,7 @@ public interface SchemaValidator {
*
* @param metadata Represents the schema to be validated
* @param options Options for executing the validation
* @param contributableInclusionFilter Filter for Contributable instances to use
*/
void doValidation(Metadata metadata, ExecutionOptions options);
void doValidation(Metadata metadata, ExecutionOptions options, ContributableMatcher contributableInclusionFilter);
}

View File

@ -11,6 +11,7 @@ import java.util.Properties;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.tool.schema.Action;
import org.hibernate.tool.schema.spi.SchemaManagementToolCoordinator;
import org.hibernate.tool.schema.spi.SchemaManagementToolCoordinator.ActionGrouping;
import org.junit.Test;
@ -26,7 +27,7 @@ public class SchemaToolingAutoActionTests {
final Properties props = new Properties();
props.put( AvailableSettings.HBM2DDL_AUTO, Action.CREATE_DROP );
final SchemaManagementToolCoordinator.ActionGrouping actionGrouping = SchemaManagementToolCoordinator.ActionGrouping.interpret( props );
final ActionGrouping actionGrouping = ActionGrouping.interpret( props );
assertThat( actionGrouping.getDatabaseAction(), is( Action.CREATE_DROP ) );

View File

@ -175,6 +175,10 @@ public class GeneratedValueTests extends BaseUnitTestCase {
.getDefaultNamespace()
.locateSequence( Identifier.toIdentifier( "my_db_sequence" ) );
assertThat( sequence, notNullValue() );
assertThat( sequence.getName().getSequenceName().getText(), is( "my_db_sequence" ) );
assertThat( sequence.getInitialValue(), is( 100 ) );
assertThat( sequence.getIncrementSize(), is( 500 ) );
final String[] sqlCreateStrings = new H2Dialect().getSequenceExporter().getSqlCreateStrings(
sequence,
bootModel
@ -320,7 +324,7 @@ public class GeneratedValueTests extends BaseUnitTestCase {
@Entity
public static class ExplicitSequenceGeneratorImplicitNameEntity {
/**
* This entity does not have explicit {@link SequenceGenerator} defined
* This entity does have explicit {@link SequenceGenerator} defined
*/
@Id
@GeneratedValue( strategy = GenerationType.SEQUENCE, generator = "my_db_sequence" )

View File

@ -23,6 +23,7 @@ import org.hibernate.service.spi.SessionFactoryServiceRegistry;
import org.hibernate.stat.spi.StatisticsImplementor;
import org.hibernate.testing.TestForIssue;
import org.hibernate.testing.orm.junit.BootstrapServiceRegistry;
import org.hibernate.testing.orm.junit.DomainModel;
import org.hibernate.testing.orm.junit.ServiceRegistry;
import org.hibernate.testing.orm.junit.SessionFactory;
@ -41,7 +42,7 @@ import static org.junit.jupiter.api.Assertions.fail;
"org/hibernate/orm/test/keymanytoone/bidir/component/EagerMapping.hbm.xml"
})
@SessionFactory(generateStatistics = true)
@ServiceRegistry(integrators = EagerKeyManyToOneTest.CustomLoadIntegrator.class)
@BootstrapServiceRegistry( integrators = EagerKeyManyToOneTest.CustomLoadIntegrator.class )
public class EagerKeyManyToOneTest {
public static class CustomLoadIntegrator implements Integrator {

View File

@ -0,0 +1,23 @@
<?xml version="1.0"?>
<!--
~ Hibernate, Relational Persistence for Idiomatic Java
~
~ License: GNU Lesser General Public License (LGPL), version 2.1 or later
~ See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
-->
<!DOCTYPE hibernate-mapping PUBLIC
"-//Hibernate/Hibernate Mapping DTD 3.0//EN"
"http://www.hibernate.org/dtd/hibernate-mapping-3.0.dtd">
<hibernate-mapping>
<class entity-name="DynamicEntity">
<id name="id" type="integer"/>
<natural-id>
<property name="referenceCode" type="string"/>
</natural-id>
<property name="data" type="string"/>
</class>
</hibernate-mapping>

View File

@ -0,0 +1,260 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.orm.test.metamodel.contributed;
import java.io.InputStream;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import org.hibernate.boot.jaxb.Origin;
import org.hibernate.boot.jaxb.SourceType;
import org.hibernate.boot.jaxb.hbm.spi.JaxbHbmHibernateMapping;
import org.hibernate.boot.jaxb.internal.MappingBinder;
import org.hibernate.boot.jaxb.spi.Binding;
import org.hibernate.boot.model.source.internal.hbm.MappingDocument;
import org.hibernate.boot.registry.StandardServiceRegistry;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.boot.spi.AdditionalJaxbMappingProducer;
import org.hibernate.boot.spi.MetadataBuildingContext;
import org.hibernate.boot.spi.MetadataImplementor;
import org.hibernate.dialect.Dialect;
import org.hibernate.dialect.H2Dialect;
import org.hibernate.engine.config.spi.ConfigurationService;
import org.hibernate.tool.schema.internal.SchemaCreatorImpl;
import org.hibernate.tool.schema.internal.SchemaDropperImpl;
import org.hibernate.tool.schema.spi.ExceptionHandler;
import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaFilter;
import org.hibernate.tool.schema.spi.SchemaManagementTool;
import org.hibernate.tool.schema.spi.ScriptSourceInput;
import org.hibernate.tool.schema.spi.SourceDescriptor;
import org.hibernate.testing.hamcrest.CaseInsensitiveContainsMatcher;
import org.hibernate.testing.hamcrest.CaseInsensitiveStartsWithMatcher;
import org.hibernate.testing.hamcrest.CollectionElementMatcher;
import org.hibernate.testing.orm.JournalingGenerationTarget;
import org.hibernate.testing.orm.junit.BootstrapServiceRegistry;
import org.hibernate.testing.orm.junit.BootstrapServiceRegistry.JavaService;
import org.hibernate.testing.orm.junit.DomainModel;
import org.hibernate.testing.orm.junit.DomainModelScope;
import org.junit.jupiter.api.Test;
import org.jboss.jandex.IndexView;
import org.hamcrest.Matchers;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.not;
/**
* @author Steve Ebersole
*/
@BootstrapServiceRegistry(
javaServices = @JavaService( role = AdditionalJaxbMappingProducer.class, impl = BasicContributorTests.Contributor.class )
)
@DomainModel( annotatedClasses = BasicContributorTests.MainEntity.class )
public class BasicContributorTests {
@Test
public void testContributorFiltering(DomainModelScope scope) {
final MetadataImplementor metadata = scope.getDomainModel();
assertThat( metadata.getEntityBindings().size(), Matchers.is( 2 ) );
final StandardServiceRegistry serviceRegistry = metadata
.getMetadataBuildingOptions()
.getServiceRegistry();
final Map settings = serviceRegistry.getService( ConfigurationService.class ).getSettings();
ExecutionOptions options = new ExecutionOptions() {
@Override
public Map getConfigurationValues() {
return settings;
}
@Override
public boolean shouldManageNamespaces() {
return false;
}
@Override
public ExceptionHandler getExceptionHandler() {
return Throwable::printStackTrace;
}
@Override
public SchemaFilter getSchemaFilter() {
return SchemaFilter.ALL;
}
};
final SchemaManagementTool schemaManagementTool = serviceRegistry.getService( SchemaManagementTool.class );
final SourceDescriptor sourceDescriptor = new SourceDescriptor() {
@Override
public org.hibernate.tool.schema.SourceType getSourceType() {
return org.hibernate.tool.schema.SourceType.METADATA;
}
@Override
public ScriptSourceInput getScriptSourceInput() {
return null;
}
};
testDropping( metadata, settings, schemaManagementTool, sourceDescriptor, options );
testCreating( metadata, settings, schemaManagementTool, sourceDescriptor, options );
}
private void testCreating(
MetadataImplementor metadata,
Map settings,
SchemaManagementTool schemaManagementTool,
SourceDescriptor sourceDescriptor,
ExecutionOptions options) {
final SchemaCreatorImpl schemaCreator = (SchemaCreatorImpl) schemaManagementTool.getSchemaCreator( settings );
final Dialect dialect = new H2Dialect();
final JournalingGenerationTarget targetDescriptor = new JournalingGenerationTarget();
// first, unfiltered
targetDescriptor.clear();
schemaCreator.doCreation( metadata, dialect, options, contributed -> true, sourceDescriptor, targetDescriptor );
assertThat(
targetDescriptor.getCommands(),
CollectionElementMatcher.hasAllOf(
CaseInsensitiveContainsMatcher.contains( "main_table" ),
CaseInsensitiveContainsMatcher.contains( "DynamicEntity" )
)
);
// filter by `orm`
targetDescriptor.clear();
schemaCreator.doCreation( metadata, dialect, options, contributed -> "orm".equals( contributed.getContributor() ), sourceDescriptor, targetDescriptor );
assertThat(
targetDescriptor.getCommands(),
CollectionElementMatcher.hasAllOf( not( CaseInsensitiveContainsMatcher.contains( "DynamicEntity" ) ) )
);
// filter by `test`
targetDescriptor.clear();
schemaCreator.doCreation( metadata, dialect, options, contributed -> "test".equals( contributed.getContributor() ), sourceDescriptor, targetDescriptor );
assertThat(
targetDescriptor.getCommands(),
CollectionElementMatcher.hasAllOf( not( CaseInsensitiveContainsMatcher.contains( "main_table" ) ) )
);
}
private void testDropping(
MetadataImplementor metadata,
Map settings,
SchemaManagementTool schemaManagementTool,
SourceDescriptor sourceDescriptor, ExecutionOptions options) {
final SchemaDropperImpl schemaDropper = (SchemaDropperImpl) schemaManagementTool.getSchemaDropper( settings );
final JournalingGenerationTarget targetDescriptor = new JournalingGenerationTarget();
final Dialect dialect = new H2Dialect();
// first, unfiltered
targetDescriptor.clear();
schemaDropper.doDrop( metadata, options, contributed -> true, dialect, sourceDescriptor, targetDescriptor );
assertThat(
targetDescriptor.getCommands(),
CollectionElementMatcher.hasAllOf(
CaseInsensitiveStartsWithMatcher.startsWith( "drop table main_table" ),
CaseInsensitiveStartsWithMatcher.startsWith( "drop table DynamicEntity" )
)
);
// filter by `orm`
targetDescriptor.clear();
schemaDropper.doDrop( metadata, options, contributed -> "orm".equals( contributed.getContributor() ), dialect, sourceDescriptor, targetDescriptor );
assertThat(
targetDescriptor.getCommands(),
CollectionElementMatcher.hasAllOf( not( CaseInsensitiveStartsWithMatcher.startsWith( "drop table DynamicEntity" ) ) )
);
// filter by `test`
targetDescriptor.clear();
schemaDropper.doDrop( metadata, options, contributed -> "test".equals( contributed.getContributor() ), dialect, sourceDescriptor, targetDescriptor );
assertThat(
targetDescriptor.getCommands(),
CollectionElementMatcher.hasAllOf( not( CaseInsensitiveStartsWithMatcher.startsWith( "drop table main_table" ) ) )
);
}
@Entity( name = "MainEntity" )
@Table( name = "main_table" )
static class MainEntity {
@Id
private Integer id;
String name;
private MainEntity() {
}
public MainEntity(Integer id, String name) {
this.id = id;
this.name = name;
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
public static class Contributor implements AdditionalJaxbMappingProducer {
public Contributor() {
}
@Override
public Collection<MappingDocument> produceAdditionalMappings(
MetadataImplementor metadata,
IndexView jandexIndex,
MappingBinder mappingBinder,
MetadataBuildingContext buildingContext) {
return Collections.singletonList( createMappingDocument( mappingBinder, buildingContext ) );
}
private MappingDocument createMappingDocument(MappingBinder mappingBinder, MetadataBuildingContext buildingContext) {
final Origin origin = new Origin( SourceType.OTHER, "test" );
final ClassLoaderService classLoaderService = buildingContext.getBootstrapContext()
.getServiceRegistry()
.getService( ClassLoaderService.class );
final InputStream inputStream = classLoaderService.locateResourceStream( "org/hibernate/orm/test/metamodel/contributed/BasicContributorTests.hbm.xml" );
final Binding<JaxbHbmHibernateMapping> jaxbBinding = mappingBinder.bind( inputStream, origin );
final JaxbHbmHibernateMapping jaxbRoot = jaxbBinding.getRoot();
return new MappingDocument(
"test",
jaxbRoot,
origin,
buildingContext
);
}
}
}

View File

@ -0,0 +1,56 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.orm.test.metamodel.contributed;
import org.hibernate.boot.spi.AdditionalJaxbMappingProducer;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.metamodel.RuntimeMetamodels;
import org.hibernate.metamodel.model.domain.EntityDomainType;
import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.testing.orm.junit.BootstrapServiceRegistry;
import org.hibernate.testing.orm.junit.DomainModel;
import org.hibernate.testing.orm.junit.NotImplementedYet;
import org.hibernate.testing.orm.junit.ServiceRegistry;
import org.hibernate.testing.orm.junit.SessionFactory;
import org.hibernate.testing.orm.junit.SessionFactoryScope;
import org.hibernate.testing.orm.junit.Setting;
import org.junit.jupiter.api.Test;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.nullValue;
/**
* @author Steve Ebersole
*/
@BootstrapServiceRegistry(
javaServices = @BootstrapServiceRegistry.JavaService( role = AdditionalJaxbMappingProducer.class, impl = BasicContributorTests.Contributor.class )
)
@ServiceRegistry(
settings = @Setting(
name = AvailableSettings.JPA_METAMODEL_POPULATION,
value = "ignoreUnsupported"
)
)
@DomainModel( annotatedClasses = BasicContributorTests.MainEntity.class )
@SessionFactory
public class EntityHidingTests {
@Test
@NotImplementedYet( reason = "Contributed entity hiding is not yet implemented", strict = false )
public void testModel(SessionFactoryScope scope) {
final SessionFactoryImplementor sessionFactory = scope.getSessionFactory();
final RuntimeMetamodels runtimeMetamodels = sessionFactory.getRuntimeMetamodels();
final EntityDomainType<Object> jpaModelDescriptor = runtimeMetamodels.getJpaMetamodel().entity( "DynamicEntity" );
assertThat( jpaModelDescriptor, nullValue() );
final EntityPersister mappingModelDescriptor = runtimeMetamodels.getMappingMetamodel()
.findEntityDescriptor( "DynamicEntity" );
assertThat( mappingModelDescriptor, nullValue() );
}
}

View File

@ -24,9 +24,11 @@ import org.hibernate.tool.schema.TargetType;
import org.hibernate.tool.schema.internal.ExceptionHandlerLoggedImpl;
import org.hibernate.tool.schema.internal.SchemaCreatorImpl;
import org.hibernate.tool.schema.internal.script.MultiLineSqlScriptExtracter;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExceptionHandler;
import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaCreator;
import org.hibernate.tool.schema.spi.SchemaFilter;
import org.hibernate.tool.schema.spi.ScriptSourceInput;
import org.hibernate.tool.schema.spi.ScriptTargetOutput;
import org.hibernate.tool.schema.spi.SourceDescriptor;
@ -87,6 +89,7 @@ public class StatementsWithoutTerminalCharsImportFileTest extends BaseUnitTestCa
schemaCreator.doCreation(
buildMappings( ssr ),
this,
ContributableMatcher.ALL,
SourceDescriptorImpl.INSTANCE,
TargetDescriptorImpl.INSTANCE
);
@ -119,6 +122,11 @@ public class StatementsWithoutTerminalCharsImportFileTest extends BaseUnitTestCa
return ExceptionHandlerLoggedImpl.INSTANCE;
}
@Override
public SchemaFilter getSchemaFilter() {
return SchemaFilter.ALL;
}
private static class SourceDescriptorImpl implements SourceDescriptor {
/**
* Singleton access

View File

@ -60,7 +60,7 @@ public class ValueVisitorTest extends BaseUnitTestCase {
final MetadataImplementor metadata =
(MetadataImplementor) new MetadataSources( serviceRegistry )
.buildMetadata();
final Table tbl = new Table();
final Table tbl = new Table( "orm" );
final RootClass rootClass = new RootClass( metadataBuildingContext );
ValueVisitor vv = new ValueVisitorValidator();

View File

@ -85,7 +85,7 @@ public class ExportIdentifierTest extends BaseUnitTestCase {
Set<String> exportIdentifierSet) {
for ( Namespace namespace : namespaces ) {
final Table table = new Table( namespace, Identifier.toIdentifier( name ), false );
final Table table = new Table( "orm", namespace, Identifier.toIdentifier( name ), false );
addExportIdentifier( table, exportIdentifierList, exportIdentifierSet );
final ForeignKey foreignKey = new ForeignKey();
@ -118,6 +118,7 @@ public class ExportIdentifierTest extends BaseUnitTestCase {
for ( Namespace namespace : namespaces ) {
addExportIdentifier(
new Sequence(
"orm",
namespace.getName().getCatalog(),
namespace.getName().getSchema(),
Identifier.toIdentifier( name )
@ -138,7 +139,7 @@ public class ExportIdentifierTest extends BaseUnitTestCase {
namespace,
"create",
"drop",
Collections.<String>emptySet()
Collections.emptySet()
),
exportIdentifierList,
exportIdentifierSet
@ -158,7 +159,7 @@ public class ExportIdentifierTest extends BaseUnitTestCase {
namespace,
"create",
"drop",
Collections.<String>emptySet()
Collections.emptySet()
),
exportIdentifierList,
exportIdentifierSet

View File

@ -22,9 +22,11 @@ import org.hibernate.service.ServiceRegistry;
import org.hibernate.tool.schema.SourceType;
import org.hibernate.tool.schema.TargetType;
import org.hibernate.tool.schema.spi.CommandAcceptanceException;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExceptionHandler;
import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaDropper;
import org.hibernate.tool.schema.spi.SchemaFilter;
import org.hibernate.tool.schema.spi.SchemaManagementTool;
import org.hibernate.tool.schema.spi.ScriptSourceInput;
import org.hibernate.tool.schema.spi.ScriptTargetOutput;
@ -58,8 +60,13 @@ public class SchemaDropTest extends BaseUnitTestCase implements ExecutionOptions
@Test
public void testDropSequence() {
getSchemaDropper()
.doDrop( metadata, this, getSourceDescriptor(), getTargetDescriptor() );
getSchemaDropper().doDrop(
metadata,
this,
ContributableMatcher.ALL,
getSourceDescriptor(),
getTargetDescriptor()
);
}
private SchemaDropper getSchemaDropper() {
@ -109,6 +116,11 @@ public class SchemaDropTest extends BaseUnitTestCase implements ExecutionOptions
return this;
}
@Override
public SchemaFilter getSchemaFilter() {
return SchemaFilter.ALL;
}
@Override
public void handleException(CommandAcceptanceException exception) {
throw exception;

View File

@ -25,8 +25,10 @@ import org.hibernate.mapping.Table;
import org.hibernate.tool.hbm2ddl.SchemaExport;
import org.hibernate.tool.schema.TargetType;
import org.hibernate.tool.schema.internal.ExceptionHandlerLoggedImpl;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExceptionHandler;
import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaFilter;
import org.hibernate.tool.schema.spi.SchemaManagementTool;
import org.hibernate.tool.schema.spi.ScriptTargetOutput;
import org.hibernate.tool.schema.spi.TargetDescriptor;
@ -66,6 +68,7 @@ public class SchemaUpdateTableBackedSequenceTest extends BaseUnitTestCase {
TableStructure tableStructure = new TableStructure(
database.getJdbcEnvironment(),
"orm",
new QualifiedTableName( null, null, Identifier.toIdentifier( "test_seq" ) ),
Identifier.toIdentifier( "nextval" ),
20,
@ -98,7 +101,13 @@ public class SchemaUpdateTableBackedSequenceTest extends BaseUnitTestCase {
public ExceptionHandler getExceptionHandler() {
return ExceptionHandlerLoggedImpl.INSTANCE;
}
@Override
public SchemaFilter getSchemaFilter() {
return SchemaFilter.ALL;
}
},
ContributableMatcher.ALL,
new TargetDescriptor() {
@Override
public EnumSet<TargetType> getTargetTypes() {

View File

@ -35,8 +35,10 @@ import org.hibernate.tool.schema.internal.SchemaDropperImpl;
import org.hibernate.tool.schema.internal.IndividuallySchemaMigratorImpl;
import org.hibernate.tool.schema.internal.exec.GenerationTarget;
import org.hibernate.tool.schema.internal.exec.GenerationTargetToStdout;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExceptionHandler;
import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaFilter;
import org.hibernate.tool.schema.spi.SchemaManagementTool;
import org.hibernate.tool.schema.spi.ScriptSourceInput;
import org.hibernate.tool.schema.spi.ScriptTargetOutput;
@ -155,17 +157,24 @@ public class CrossSchemaForeignKeyGenerationTest extends BaseUnitTestCase {
public ExceptionHandler getExceptionHandler() {
return ExceptionHandlerLoggedImpl.INSTANCE;
}
@Override
public SchemaFilter getSchemaFilter() {
return SchemaFilter.ALL;
}
};
new IndividuallySchemaMigratorImpl( tool, DefaultSchemaFilter.INSTANCE ).doMigration(
metadata,
options,
ContributableMatcher.ALL,
TargetDescriptorImpl.INSTANCE
);
new IndividuallySchemaMigratorImpl( tool, DefaultSchemaFilter.INSTANCE ).doMigration(
metadata,
options,
ContributableMatcher.ALL,
TargetDescriptorImpl.INSTANCE
);
@ -216,11 +225,17 @@ public class CrossSchemaForeignKeyGenerationTest extends BaseUnitTestCase {
public ExceptionHandler getExceptionHandler() {
return ExceptionHandlerLoggedImpl.INSTANCE;
}
@Override
public SchemaFilter getSchemaFilter() {
return SchemaFilter.ALL;
}
};
new GroupedSchemaMigratorImpl( tool, DefaultSchemaFilter.INSTANCE ).doMigration(
metadata,
options,
ContributableMatcher.ALL,
TargetDescriptorImpl.INSTANCE
);

View File

@ -31,8 +31,10 @@ import org.hibernate.tool.schema.internal.ExceptionHandlerLoggedImpl;
import org.hibernate.tool.schema.internal.HibernateSchemaManagementTool;
import org.hibernate.tool.schema.internal.IndividuallySchemaMigratorImpl;
import org.hibernate.tool.schema.internal.exec.ScriptTargetOutputToFile;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExceptionHandler;
import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaFilter;
import org.hibernate.tool.schema.spi.SchemaManagementTool;
import org.hibernate.tool.schema.spi.ScriptTargetOutput;
import org.hibernate.tool.schema.spi.TargetDescriptor;
@ -86,6 +88,11 @@ public class UniqueConstraintDropTest {
public ExceptionHandler getExceptionHandler() {
return ExceptionHandlerLoggedImpl.INSTANCE;
}
@Override
public SchemaFilter getSchemaFilter() {
return SchemaFilter.ALL;
}
};
}
@ -102,6 +109,7 @@ public class UniqueConstraintDropTest {
.doMigration(
metadata,
options,
ContributableMatcher.ALL,
new TargetDescriptorImpl()
);

View File

@ -25,8 +25,10 @@ import org.hibernate.tool.schema.JdbcMetadaAccessStrategy;
import org.hibernate.tool.schema.SourceType;
import org.hibernate.tool.schema.TargetType;
import org.hibernate.tool.schema.internal.ExceptionHandlerLoggedImpl;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExceptionHandler;
import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaFilter;
import org.hibernate.tool.schema.spi.SchemaManagementTool;
import org.hibernate.tool.schema.spi.ScriptSourceInput;
import org.hibernate.tool.schema.spi.ScriptTargetOutput;
@ -94,7 +96,8 @@ public class LongVarcharValidationTest implements ExecutionOptions {
private void doValidation(MetadataImplementor metadata) {
ssr.getService( SchemaManagementTool.class ).getSchemaValidator( null ).doValidation(
metadata,
this
this,
ContributableMatcher.ALL
);
}
@ -102,6 +105,7 @@ public class LongVarcharValidationTest implements ExecutionOptions {
ssr.getService( SchemaManagementTool.class ).getSchemaCreator( null ).doCreation(
metadata,
this,
ContributableMatcher.ALL,
new SourceDescriptor() {
@Override
public SourceType getSourceType() {
@ -131,6 +135,7 @@ public class LongVarcharValidationTest implements ExecutionOptions {
ssr.getService( SchemaManagementTool.class ).getSchemaDropper( null ).doDrop(
metadata,
this,
ContributableMatcher.ALL,
new SourceDescriptor() {
@Override
public SourceType getSourceType() {
@ -179,4 +184,9 @@ public class LongVarcharValidationTest implements ExecutionOptions {
public ExceptionHandler getExceptionHandler() {
return ExceptionHandlerLoggedImpl.INSTANCE;
}
@Override
public SchemaFilter getSchemaFilter() {
return SchemaFilter.ALL;
}
}

View File

@ -26,8 +26,10 @@ import org.hibernate.tool.schema.JdbcMetadaAccessStrategy;
import org.hibernate.tool.schema.SourceType;
import org.hibernate.tool.schema.TargetType;
import org.hibernate.tool.schema.internal.ExceptionHandlerLoggedImpl;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExceptionHandler;
import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaFilter;
import org.hibernate.tool.schema.spi.SchemaManagementTool;
import org.hibernate.tool.schema.spi.ScriptSourceInput;
import org.hibernate.tool.schema.spi.ScriptTargetOutput;
@ -35,6 +37,7 @@ import org.hibernate.tool.schema.spi.SourceDescriptor;
import org.hibernate.tool.schema.spi.TargetDescriptor;
import org.hibernate.testing.TestForIssue;
import org.hibernate.test.legacy.S;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
@ -102,13 +105,14 @@ public class NumericValidationTest implements ExecutionOptions {
private void doValidation() {
ssr.getService( SchemaManagementTool.class ).getSchemaValidator( null )
.doValidation( metadata, this );
.doValidation( metadata, this, ContributableMatcher.ALL );
}
private void createSchema() {
ssr.getService( SchemaManagementTool.class ).getSchemaCreator( null ).doCreation(
metadata,
this,
ContributableMatcher.ALL,
new SourceDescriptor() {
@Override
public SourceType getSourceType() {
@ -162,4 +166,9 @@ public class NumericValidationTest implements ExecutionOptions {
public ExceptionHandler getExceptionHandler() {
return ExceptionHandlerLoggedImpl.INSTANCE;
}
@Override
public SchemaFilter getSchemaFilter() {
return SchemaFilter.ALL;
}
}

View File

@ -75,6 +75,10 @@ public class StoredProcedureResultSetMappingTest extends BaseUnitTestCase {
}
public static class ProcedureDefinition implements AuxiliaryDatabaseObject {
public ProcedureDefinition() {
}
@Override
public boolean appliesToDialect(Dialect dialect) {
return true;

View File

@ -12,6 +12,7 @@ import java.util.Map;
import org.hibernate.tool.schema.spi.CommandAcceptanceException;
import org.hibernate.tool.schema.spi.ExceptionHandler;
import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaFilter;
/**
* @author Steve Ebersole
@ -38,6 +39,11 @@ public class ExecutionOptionsTestImpl implements ExecutionOptions, ExceptionHand
return this;
}
@Override
public SchemaFilter getSchemaFilter() {
return SchemaFilter.ALL;
}
@Override
public void handleException(CommandAcceptanceException exception) {
throw exception;

View File

@ -10,6 +10,7 @@ import org.hibernate.boot.spi.MetadataImplementor;
import org.hibernate.dialect.H2Dialect;
import org.hibernate.tool.schema.internal.DefaultSchemaFilter;
import org.hibernate.tool.schema.internal.GroupedSchemaValidatorImpl;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.testing.RequiresDialect;
import org.hibernate.testing.TestForIssue;
@ -23,6 +24,6 @@ public class GroupedSchemaValidatorImplTest extends IndividuallySchemaValidatorI
@Override
protected void getSchemaValidator(MetadataImplementor metadata) {
new GroupedSchemaValidatorImpl( tool, DefaultSchemaFilter.INSTANCE )
.doValidation( metadata, executionOptions );
.doValidation( metadata, executionOptions, ContributableMatcher.ALL );
}
}

View File

@ -36,8 +36,10 @@ import org.hibernate.tool.schema.internal.IndividuallySchemaValidatorImpl;
import org.hibernate.tool.schema.internal.SchemaCreatorImpl;
import org.hibernate.tool.schema.internal.SchemaDropperImpl;
import org.hibernate.tool.schema.internal.exec.GenerationTargetToDatabase;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExceptionHandler;
import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaFilter;
import org.hibernate.tool.schema.spi.SchemaManagementException;
import org.hibernate.tool.schema.spi.SchemaManagementTool;
import org.hibernate.tool.schema.spi.SchemaValidator;
@ -114,6 +116,11 @@ public class IndividuallySchemaValidatorImplConnectionTest extends BaseUnitTestC
public ExceptionHandler getExceptionHandler() {
return ExceptionHandlerLoggedImpl.INSTANCE;
}
@Override
public SchemaFilter getSchemaFilter() {
return SchemaFilter.ALL;
}
};
}
@ -171,7 +178,7 @@ public class IndividuallySchemaValidatorImplConnectionTest extends BaseUnitTestC
SchemaValidator schemaValidator = new IndividuallySchemaValidatorImpl( tool, DefaultSchemaFilter.INSTANCE );
assertFalse( connection.getAutoCommit() );
schemaValidator.doValidation( metadata, executionOptions );
schemaValidator.doValidation( metadata, executionOptions, ContributableMatcher.ALL );
assertFalse( connection.getAutoCommit() );
}
finally {

View File

@ -33,8 +33,10 @@ import org.hibernate.tool.schema.internal.SchemaCreatorImpl;
import org.hibernate.tool.schema.internal.SchemaDropperImpl;
import org.hibernate.tool.schema.internal.IndividuallySchemaValidatorImpl;
import org.hibernate.tool.schema.internal.exec.GenerationTargetToDatabase;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExceptionHandler;
import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaFilter;
import org.hibernate.tool.schema.spi.SchemaManagementException;
import org.hibernate.tool.schema.spi.SchemaManagementTool;
@ -96,6 +98,11 @@ public class IndividuallySchemaValidatorImplTest extends BaseUnitTestCase {
public ExceptionHandler getExceptionHandler() {
return ExceptionHandlerLoggedImpl.INSTANCE;
}
@Override
public SchemaFilter getSchemaFilter() {
return SchemaFilter.ALL;
}
};
}
@ -250,7 +257,7 @@ public class IndividuallySchemaValidatorImplTest extends BaseUnitTestCase {
protected void getSchemaValidator(MetadataImplementor metadata) {
new IndividuallySchemaValidatorImpl( tool, DefaultSchemaFilter.INSTANCE )
.doValidation( metadata, executionOptions );
.doValidation( metadata, executionOptions, ContributableMatcher.ALL );
}
protected Properties properties() {

View File

@ -6,7 +6,6 @@
*/
package org.hibernate.test.tool.schema;
import java.sql.SQLSyntaxErrorException;
import java.util.Collections;
import java.util.EnumSet;
import javax.persistence.Entity;
@ -24,6 +23,7 @@ import org.hibernate.resource.transaction.spi.TransactionCoordinatorBuilder;
import org.hibernate.tool.schema.SourceType;
import org.hibernate.tool.schema.TargetType;
import org.hibernate.tool.schema.spi.CommandAcceptanceException;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.SchemaCreator;
import org.hibernate.tool.schema.spi.SchemaDropper;
import org.hibernate.tool.schema.spi.SchemaManagementTool;
@ -95,6 +95,7 @@ public class SchemaToolTransactionHandlingTest extends BaseUnitTestCase {
schemaDropper.doDrop(
mappings,
ExecutionOptionsTestImpl.INSTANCE,
ContributableMatcher.ALL,
SourceDescriptorImpl.INSTANCE,
TargetDescriptorImpl.INSTANCE
);
@ -104,6 +105,7 @@ public class SchemaToolTransactionHandlingTest extends BaseUnitTestCase {
schemaCreator.doCreation(
mappings,
ExecutionOptionsTestImpl.INSTANCE,
ContributableMatcher.ALL,
SourceDescriptorImpl.INSTANCE,
TargetDescriptorImpl.INSTANCE
);
@ -113,6 +115,7 @@ public class SchemaToolTransactionHandlingTest extends BaseUnitTestCase {
schemaDropper.doDrop(
mappings,
ExecutionOptionsTestImpl.INSTANCE,
ContributableMatcher.ALL,
SourceDescriptorImpl.INSTANCE,
TargetDescriptorImpl.INSTANCE
);
@ -166,6 +169,7 @@ public class SchemaToolTransactionHandlingTest extends BaseUnitTestCase {
smt.getSchemaCreator( Collections.emptyMap() ).doCreation(
mappings,
ExecutionOptionsTestImpl.INSTANCE,
ContributableMatcher.ALL,
SourceDescriptorImpl.INSTANCE,
TargetDescriptorImpl.INSTANCE
);
@ -177,7 +181,8 @@ public class SchemaToolTransactionHandlingTest extends BaseUnitTestCase {
try {
smt.getSchemaValidator( Collections.emptyMap() ).doValidation(
mappings,
ExecutionOptionsTestImpl.INSTANCE
ExecutionOptionsTestImpl.INSTANCE,
ContributableMatcher.ALL
);
}
finally {
@ -185,6 +190,7 @@ public class SchemaToolTransactionHandlingTest extends BaseUnitTestCase {
smt.getSchemaDropper( Collections.emptyMap() ).doDrop(
mappings,
ExecutionOptionsTestImpl.INSTANCE,
ContributableMatcher.ALL,
SourceDescriptorImpl.INSTANCE,
TargetDescriptorImpl.INSTANCE
);

View File

@ -30,6 +30,7 @@ import org.hibernate.tool.schema.extract.spi.NameSpaceTablesInformation;
import org.hibernate.tool.schema.extract.spi.TableInformation;
import org.hibernate.tool.schema.internal.AbstractSchemaMigrator;
import org.hibernate.tool.schema.internal.exec.GenerationTarget;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.junit.Assert;
import org.junit.Test;
@ -53,10 +54,19 @@ public class CheckForExistingForeignKeyTest {
* Needed implementation. Not used in test.
*/
@Override
protected NameSpaceTablesInformation performTablesMigration(Metadata metadata, DatabaseInformation existingDatabase, ExecutionOptions options,
protected NameSpaceTablesInformation performTablesMigration(
Metadata metadata,
DatabaseInformation existingDatabase,
ExecutionOptions options,
ContributableMatcher inclusionFilter,
Dialect dialect,
Formatter formatter, Set<String> exportIdentifiers, boolean tryToCreateCatalogs, boolean tryToCreateSchemas,
Set<Identifier> exportedCatalogs, Namespace namespace, GenerationTarget[] targets) {
Formatter formatter,
Set<String> exportIdentifiers,
boolean tryToCreateCatalogs,
boolean tryToCreateSchemas,
Set<Identifier> exportedCatalogs,
Namespace namespace,
GenerationTarget[] targets) {
return null;
}
}
@ -193,7 +203,7 @@ public class CheckForExistingForeignKeyTest {
ForeignKey foreignKey = new ForeignKey();
foreignKey.setName( "objectId2id" );
foreignKey.addColumn( new Column( "id" ) );
foreignKey.setReferencedTable( new Table( "table2" ) );
foreignKey.setReferencedTable( new Table( "orm", "table2" ) );
InformationExtractor informationExtractor = Mockito.mock( InformationExtractor.class );
IdentifierHelper identifierHelper = new IdentifierHelperImpl();
@ -230,7 +240,7 @@ public class CheckForExistingForeignKeyTest {
ForeignKey foreignKey = new ForeignKey();
foreignKey.setName( "objectId2id_1" );
foreignKey.addColumn( new Column( "id" ) );
foreignKey.setReferencedTable( new Table( "table2" ) );
foreignKey.setReferencedTable( new Table( "orm", "table2" ) );
InformationExtractor informationExtractor = Mockito.mock( InformationExtractor.class );
IdentifierHelper identifierHelper = new IdentifierHelperImpl();
@ -267,7 +277,7 @@ public class CheckForExistingForeignKeyTest {
ForeignKey foreignKey = new ForeignKey();
foreignKey.setName( "objectId2id_1" ); // Make sure the match is not successful based on key name
foreignKey.addColumn( new Column( "id" ) );
foreignKey.setReferencedTable( new Table( "table2" ) );
foreignKey.setReferencedTable( new Table( "orm", "table2" ) );
Name schemaName = new Name( new Identifier( "-", false ), new Identifier( "-", false ) );
InformationExtractor informationExtractor = Mockito.mock( InformationExtractor.class );
@ -305,7 +315,7 @@ public class CheckForExistingForeignKeyTest {
ForeignKey foreignKey = new ForeignKey();
foreignKey.setName( "objectId2id_1" ); // Make sure the match is not successful based on key name
foreignKey.addColumn( new Column( "id" ) );
foreignKey.setReferencedTable( new Table( "table2" ) );
foreignKey.setReferencedTable( new Table( "orm", "table2" ) );
Name schemaName = new Name( new Identifier( "-", false ), new Identifier( "-", false ) );
InformationExtractor informationExtractor = Mockito.mock( InformationExtractor.class );

View File

@ -65,29 +65,26 @@ public class AdditionalJaxbMappingProducerImpl implements AdditionalJaxbMappingP
// atm we do not have distinct origin info for envers
final Origin origin = new Origin( SourceType.OTHER, "envers" );
final MappingCollector mappingCollector = new MappingCollector() {
@Override
public void addDocument(Document document) throws DocumentException {
logXml( document );
final MappingCollector mappingCollector = (document) -> {
logXml( document );
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
try {
final Writer w = new BufferedWriter( new OutputStreamWriter( baos, "UTF-8" ) );
final XMLWriter xw = new XMLWriter( w, new OutputFormat( " ", true ) );
xw.write( document );
w.flush();
}
catch (IOException e) {
throw new HibernateException( "Unable to bind Envers-generated XML", e );
}
ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream( baos.toByteArray() );
BufferedInputStream bufferedInputStream = new BufferedInputStream( byteArrayInputStream );
final Binding<JaxbHbmHibernateMapping> jaxbBinding = mappingBinder.bind( bufferedInputStream, origin );
final JaxbHbmHibernateMapping jaxbRoot = jaxbBinding.getRoot();
additionalMappingDocuments.add( new MappingDocument( jaxbRoot, origin, buildingContext ) );
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
try {
final Writer w = new BufferedWriter( new OutputStreamWriter( baos, "UTF-8" ) );
final XMLWriter xw = new XMLWriter( w, new OutputFormat( " ", true ) );
xw.write( document );
w.flush();
}
catch (IOException e) {
throw new HibernateException( "Unable to bind Envers-generated XML", e );
}
ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream( baos.toByteArray() );
BufferedInputStream bufferedInputStream = new BufferedInputStream( byteArrayInputStream );
final Binding<JaxbHbmHibernateMapping> jaxbBinding = mappingBinder.bind( bufferedInputStream, origin );
final JaxbHbmHibernateMapping jaxbRoot = jaxbBinding.getRoot();
additionalMappingDocuments.add( new MappingDocument( "envers", jaxbRoot, origin, buildingContext ) );
};
enversService.initialize( metadata, mappingCollector );

View File

@ -24,15 +24,15 @@ public class OrderedSequenceStructure extends SequenceStructure {
private static final String ORDER = " ORDER";
private AuxiliaryDatabaseObject sequenceObject;
private final AuxiliaryDatabaseObject sequenceObject;
public OrderedSequenceStructure(
JdbcEnvironment jdbcEnvironment,
QualifiedName qualifiedSequenceName,
int initialValue,
int incrementSize,
Class numberType) {
super( jdbcEnvironment, qualifiedSequenceName, initialValue, incrementSize, numberType );
Class<?> numberType) {
super( jdbcEnvironment, "envers", qualifiedSequenceName, initialValue, incrementSize, numberType );
this.sequenceObject = new OrderedSequence();
}
@ -83,6 +83,7 @@ public class OrderedSequenceStructure extends SequenceStructure {
getSourceIncrementSize()
);
//noinspection deprecation
if ( dialect instanceof Oracle8iDialect ) {
for ( int i = 0; i < createStrings.length; ++i ) {
createStrings[ i ] = createStrings[ i ] + ORDER;
@ -94,7 +95,7 @@ public class OrderedSequenceStructure extends SequenceStructure {
@Override
public String[] sqlDropStrings(Dialect dialect) {
return dialect.getDropSequenceStrings( getName() );
return dialect.getSequenceSupport().getDropSequenceStrings( getName() );
}
}
}

View File

@ -51,6 +51,7 @@ public class ExportIdentifierTest extends BaseUnitTestCase {
for ( Namespace namespace : database.getNamespaces() ) {
final SequenceStructure sequenceStructure = new SequenceStructure(
ssr.getService( JdbcEnvironment.class ),
"envers",
new QualifiedNameImpl(
namespace.getName(),
Identifier.toIdentifier( "aSequence" )

View File

@ -0,0 +1,97 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.orm.test.envers;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import org.hibernate.boot.spi.MetadataImplementor;
import org.hibernate.envers.Audited;
import org.hibernate.envers.DefaultRevisionEntity;
import org.hibernate.mapping.PersistentClass;
import org.hibernate.testing.orm.junit.DomainModel;
import org.hibernate.testing.orm.junit.DomainModelScope;
import org.junit.jupiter.api.Test;
import org.hamcrest.Matchers;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
/**
* @author Steve Ebersole
*/
@DomainModel(
annotatedClasses = ModelContributorSmokeTests.SimpleEntity.class
)
public class ModelContributorSmokeTests {
@Test
public void simpleModelContributorTest(DomainModelScope scope) {
final MetadataImplementor domainModel = scope.getDomainModel();
// Should be 3
// 1) SimpleEntity
// 2) Enver's DefaultRevisionEntity
// 3) Enver's "shadow" of the domain entity (SimpleEntity_AUD)
assertThat( domainModel.getEntityBindings().size(), is( 3 ) );
checkModel(
domainModel.getEntityBinding( SimpleEntity.class.getName() ),
"orm"
);
checkModel(
domainModel.getEntityBinding( DefaultRevisionEntity.class.getName() ),
"envers"
);
checkModel(
domainModel.getEntityBinding( SimpleEntity.class.getName() + "_AUD" ),
"envers"
);
}
private void checkModel(PersistentClass entityBinding, String expectedContributor) {
assertThat( entityBinding.getContributor(), is( expectedContributor ) );
assertThat( entityBinding.getRootTable().getContributor(), is( expectedContributor ) );
}
@Entity( name = "SimpleEntity" )
@Table( name = "simple" )
@Audited
public static class SimpleEntity {
@Id
private Integer id;
String name;
public SimpleEntity() {
}
public SimpleEntity(Integer id, String name) {
this.id = id;
this.name = name;
}
public Integer getId() {
return id;
}
private void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
}

View File

@ -0,0 +1,94 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.orm.test.envers;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import org.hibernate.boot.spi.MetadataImplementor;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.envers.Audited;
import org.hibernate.envers.DefaultRevisionEntity;
import org.hibernate.mapping.PersistentClass;
import org.hibernate.metamodel.RuntimeMetamodels;
import org.hibernate.metamodel.model.domain.EntityDomainType;
import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.testing.orm.junit.DomainModel;
import org.hibernate.testing.orm.junit.DomainModelScope;
import org.hibernate.testing.orm.junit.ServiceRegistry;
import org.hibernate.testing.orm.junit.SessionFactory;
import org.hibernate.testing.orm.junit.SessionFactoryScope;
import org.hibernate.testing.orm.junit.Setting;
import org.junit.jupiter.api.Test;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
/**
* @author Steve Ebersole
*/
@ServiceRegistry(
settings = @Setting(
name = AvailableSettings.JPA_METAMODEL_POPULATION,
value = "ignoreUnsupported"
)
)
@DomainModel(
annotatedClasses = RuntimeModelSmokeTests.SimpleEntity.class
)
@SessionFactory
public class RuntimeModelSmokeTests {
public static final String FULL_NAME = "org.hibernate.orm.test.envers.RuntimeModelSmokeTests$SimpleEntity_AUD";
public static final String SIMPLE_NAME = "SimpleEntity_AUD";
@Test
public void basicTest(SessionFactoryScope scope) {
final RuntimeMetamodels runtimeMetamodels = scope.getSessionFactory().getRuntimeMetamodels();
final EntityPersister mappingType = runtimeMetamodels.getMappingMetamodel().findEntityDescriptor( FULL_NAME );
assertThat( mappingType, notNullValue() );
final EntityDomainType<Object> jpaType = runtimeMetamodels.getJpaMetamodel().entity( SIMPLE_NAME );
assertThat( mappingType, notNullValue() );
}
@Entity( name = "SimpleEntity" )
@Table( name = "simple" )
@Audited
public static class SimpleEntity {
@Id
private Integer id;
String name;
public SimpleEntity() {
}
public SimpleEntity(Integer id, String name) {
this.id = id;
this.name = name;
}
public Integer getId() {
return id;
}
private void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
}

View File

@ -0,0 +1,73 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.testing.boot;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.hibernate.boot.registry.classloading.internal.ClassLoaderServiceImpl;
/**
* @author Steve Ebersole
*/
public class ExtraJavaServicesClassLoaderService extends ClassLoaderServiceImpl {
private final List<JavaServiceDescriptor<?>> extraJavaServices;
public ExtraJavaServicesClassLoaderService(List<JavaServiceDescriptor<?>> extraJavaServices) {
this.extraJavaServices = extraJavaServices;
}
@Override
public <S> Collection<S> loadJavaServices(Class<S> serviceContract) {
final Collection<S> baseServices = super.loadJavaServices( serviceContract );
final List<S> services = new ArrayList<>( baseServices );
applyExtraJavaServices( serviceContract, services );
return services;
}
private <S> void applyExtraJavaServices(Class<S> serviceContract, List<S> services) {
extraJavaServices.forEach(
(javaServiceDescriptor) -> {
if ( serviceContract.isAssignableFrom( javaServiceDescriptor.role ) ) {
try {
final Object serviceInstance = javaServiceDescriptor.impl.getDeclaredConstructor().newInstance();
//noinspection unchecked
services.add( (S) serviceInstance );
}
catch (NoSuchMethodException | IllegalAccessException e) {
throw new RuntimeException( "Unable to access constructor for specified 'extra' Java service : " + javaServiceDescriptor.impl.getName(), e );
}
catch (InstantiationException | InvocationTargetException e) {
throw new RuntimeException( "Unable to instantiate specified 'extra' Java service : " + javaServiceDescriptor.impl.getName(), e );
}
}
}
);
}
public static class JavaServiceDescriptor<ROLE> {
private final Class<ROLE> role;
private final Class<? extends ROLE> impl;
public JavaServiceDescriptor(Class<ROLE> role, Class<? extends ROLE> impl) {
this.role = role;
this.impl = impl;
}
public Class<ROLE> getRole() {
return role;
}
public Class<? extends ROLE> getImpl() {
return impl;
}
}
}

View File

@ -85,4 +85,9 @@ public class MetadataBuildingContextTestingImpl implements MetadataBuildingConte
public TypeDefinitionRegistryStandardImpl getTypeDefinitionRegistry() {
return typeDefinitionRegistry;
}
@Override
public String getCurrentContributorName() {
return "orm";
}
}

View File

@ -0,0 +1,40 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.testing.hamcrest;
import java.util.Locale;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
import org.hamcrest.TypeSafeMatcher;
/**
* @author Steve Ebersole
*/
public class CaseInsensitiveContainsMatcher extends TypeSafeMatcher<String> {
private final String match;
public CaseInsensitiveContainsMatcher(String match) {
this.match = match.toLowerCase( Locale.ROOT );
}
public static Matcher<String> contains(String expected) {
expected = expected.toLowerCase( Locale.ROOT );
return new CaseInsensitiveContainsMatcher( expected );
}
@Override
protected boolean matchesSafely(String string) {
final String normalized = string.toLowerCase( Locale.ROOT ).trim();
return normalized.contains( match );
}
@Override
public void describeTo(Description description) {
description.appendText( "contains (case insensitive)" ).appendValue( match );
}
}

View File

@ -0,0 +1,39 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.testing.hamcrest;
import java.util.Locale;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
import org.hamcrest.TypeSafeMatcher;
/**
* @author Steve Ebersole
*/
public class CaseInsensitiveStartsWithMatcher extends TypeSafeMatcher<String> {
private final String match;
public CaseInsensitiveStartsWithMatcher(String match) {
this.match = match.toLowerCase( Locale.ROOT );
}
public static Matcher<String> startsWith(String expected) {
return new CaseInsensitiveStartsWithMatcher( expected );
}
@Override
protected boolean matchesSafely(String string) {
final String normalized = string.toLowerCase( Locale.ROOT ).trim();
return normalized.startsWith( match );
}
@Override
public void describeTo(Description description) {
description.appendText( "starts with (case insensitive)" ).appendValue( match );
}
}

View File

@ -0,0 +1,53 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.testing.hamcrest;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import org.hamcrest.BaseMatcher;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
/**
* @author Steve Ebersole
*/
public class CollectionElementMatcher<E,C extends Collection<E>> extends BaseMatcher<C> {
public static <T> Matcher<Collection<T>> hasAllOf(Matcher<T>... elementMatchers) {
return new CollectionElementMatcher<>( elementMatchers );
}
private final List<Matcher<E>> elementMatchers;
public CollectionElementMatcher(Matcher<E>... elementMatchers) {
this.elementMatchers = Arrays.asList( elementMatchers );
}
@Override
public boolean matches(Object o) {
assert o instanceof Collection;
final Collection collection = (Collection) o;
outer: for ( Matcher<E> valueMatcher : elementMatchers ) {
for ( Object value : collection ) {
if ( valueMatcher.matches( value ) ) {
continue outer;
}
}
return false;
}
return true;
}
@Override
public void describeTo(Description description) {
description.appendText( "contained" );
}
}

View File

@ -6,6 +6,7 @@
*/
package org.hibernate.testing.junit5;
import java.util.Set;
import java.util.function.Consumer;
import java.util.function.Function;
import javax.persistence.SharedCacheMode;
@ -24,6 +25,7 @@ import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.mapping.RootClass;
import org.hibernate.tool.schema.Action;
import org.hibernate.tool.schema.spi.SchemaManagementToolCoordinator;
import org.hibernate.tool.schema.spi.SchemaManagementToolCoordinator.ActionGrouping;
import org.junit.jupiter.api.AfterEach;
@ -72,10 +74,11 @@ public abstract class SessionFactoryBasedFunctionalTest
}
catch (Exception e) {
StandardServiceRegistryBuilder.destroy( ssr );
SchemaManagementToolCoordinator.ActionGrouping actions = SchemaManagementToolCoordinator.ActionGrouping.interpret(
ssrBuilder.getSettings() );
if ( ( exportSchema() || actions.getDatabaseAction() != Action.NONE ) && metadata != null ) {
dropDatabase( );
if ( exportSchema() && metadata != null ) {
final Set<ActionGrouping> groupings = ActionGrouping.interpret( metadata, ssrBuilder.getSettings() );
if ( ! groupings.isEmpty() ) {
dropDatabase();
}
}
throw e;
}

View File

@ -0,0 +1,39 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.testing.orm;
import java.util.ArrayList;
import org.hibernate.tool.schema.internal.exec.GenerationTarget;
/**
* @author Steve Ebersole
*/
public class JournalingGenerationTarget implements GenerationTarget {
private final ArrayList<String> commands = new ArrayList<>();
@Override
public void prepare() {
}
@Override
public void accept(String command) {
commands.add( command );
}
public ArrayList<String> getCommands() {
return commands;
}
@Override
public void release() {
}
public void clear() {
commands.clear();
}
}

View File

@ -0,0 +1,42 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.testing.orm.junit;
import java.lang.annotation.ElementType;
import java.lang.annotation.Inherited;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import org.hibernate.integrator.spi.Integrator;
/**
* Used to define the bootstrap ServiceRegistry to be used for testing.
*/
@Inherited
@Target( ElementType.TYPE )
@Retention( RetentionPolicy.RUNTIME )
@ServiceRegistryFunctionalTesting
public @interface BootstrapServiceRegistry {
Class<? extends Integrator>[] integrators() default {};
JavaService[] javaServices() default {};
@interface JavaService {
/**
* Logically `?` is `T`
*/
Class<?> role();
/**
* Logically `?` is `S extends T`
*/
Class<?> impl();
}
}

View File

@ -0,0 +1,17 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.testing.orm.junit;
import org.hibernate.boot.registry.BootstrapServiceRegistry;
import org.hibernate.boot.registry.BootstrapServiceRegistryBuilder;
/**
* Producer of BootstrapServiceRegistry
*/
public interface BootstrapServiceRegistryProducer {
BootstrapServiceRegistry produceServiceRegistry(BootstrapServiceRegistryBuilder builder);
}

View File

@ -11,6 +11,7 @@ import java.lang.reflect.InvocationTargetException;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.Consumer;
import java.util.function.Function;
import javax.persistence.EntityManager;
@ -209,10 +210,8 @@ public class EntityManagerFactoryExtension
MetadataImplementor model) {
final Map<String, Object> baseProperties = sessionFactory.getProperties();
final ActionGrouping actions = ActionGrouping.interpret( baseProperties );
// if there are explicit setting for auto schema tooling then skip the annotation
if ( actions.getDatabaseAction() != Action.NONE || actions.getScriptAction() != Action.NONE ) {
final Set<ActionGrouping> groupings = ActionGrouping.interpret( model, baseProperties );
if ( ! groupings.isEmpty() ) {
// the properties contained explicit settings for auto schema tooling - skip the annotation
return;
}

View File

@ -14,7 +14,6 @@ import java.lang.annotation.Target;
import org.hibernate.boot.registry.StandardServiceInitiator;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.integrator.spi.Integrator;
import org.hibernate.service.spi.ServiceContributor;
/**
@ -67,18 +66,11 @@ import org.hibernate.service.spi.ServiceContributor;
@Retention( RetentionPolicy.RUNTIME )
@ServiceRegistryFunctionalTesting
//@TestInstance( TestInstance.Lifecycle.PER_CLASS )
//
//@ExtendWith( FailureExpectedExtension.class )
//@ExtendWith( ServiceRegistryExtension.class )
//@ExtendWith( ServiceRegistryParameterResolver.class )
public @interface ServiceRegistry {
Class<? extends ServiceContributor>[] serviceContributors() default {};
Class<? extends StandardServiceInitiator>[] initiators() default {};
Class<? extends Integrator>[] integrators() default {};
Service[] services() default {};
Setting[] settings() default {};

View File

@ -6,6 +6,8 @@
*/
package org.hibernate.testing.orm.junit;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
@ -17,6 +19,8 @@ import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.integrator.spi.Integrator;
import org.hibernate.service.spi.ServiceContributor;
import org.hibernate.testing.boot.ExtraJavaServicesClassLoaderService;
import org.hibernate.testing.boot.ExtraJavaServicesClassLoaderService.JavaServiceDescriptor;
import org.junit.jupiter.api.extension.AfterAllCallback;
import org.junit.jupiter.api.extension.ExtensionContext;
import org.junit.jupiter.api.extension.TestExecutionExceptionHandler;
@ -61,35 +65,53 @@ public class ServiceRegistryExtension
final ServiceRegistryScopeImpl scope = new ServiceRegistryScopeImpl( );
log.debugf( "Creating ServiceRegistryScope - %s", context.getDisplayName() );
final ServiceRegistryProducer producer;
final BootstrapServiceRegistryProducer bsrProducer;
if ( testInstance instanceof ServiceRegistryProducer ) {
producer = (ServiceRegistryProducer) testInstance;
final Optional<BootstrapServiceRegistry> bsrAnnWrapper = AnnotationSupport.findAnnotation(
context.getElement().get(),
BootstrapServiceRegistry.class
);
if ( bsrAnnWrapper.isPresent() ) {
bsrProducer = bsrBuilder -> {
final BootstrapServiceRegistry bsrAnn = bsrAnnWrapper.get();
configureJavaServices( bsrAnn, bsrBuilder );
configureIntegrators( bsrAnn, bsrBuilder );
return bsrBuilder.enableAutoClose().build();
};
}
else {
producer = ssrb -> {
bsrProducer = BootstrapServiceRegistryBuilder::build;
}
final ServiceRegistryProducer ssrProducer;
if ( testInstance instanceof ServiceRegistryProducer ) {
ssrProducer = (ServiceRegistryProducer) testInstance;
}
else {
ssrProducer = ssrb -> {
if ( !context.getElement().isPresent() ) {
throw new RuntimeException( "Unable to determine how to handle given ExtensionContext : " + context.getDisplayName() );
}
final Optional<ServiceRegistry> serviceRegistryAnnWrapper = AnnotationSupport.findAnnotation(
final Optional<ServiceRegistry> ssrAnnWrapper = AnnotationSupport.findAnnotation(
context.getElement().get(),
ServiceRegistry.class
);
if ( serviceRegistryAnnWrapper.isPresent() ) {
final ServiceRegistry serviceRegistryAnn = serviceRegistryAnnWrapper.get();
if ( ssrAnnWrapper.isPresent() ) {
final ServiceRegistry serviceRegistryAnn = ssrAnnWrapper.get();
configureServices( serviceRegistryAnn, ssrb );
configureIntegrators(serviceRegistryAnn, scope);
}
return ssrb.build();
};
}
scope.createRegistry(producer);
scope.createRegistry( bsrProducer, ssrProducer );
locateExtensionStore( testInstance, context ).put( REGISTRY_KEY, scope );
@ -103,11 +125,52 @@ public class ServiceRegistryExtension
}
private static void configureIntegrators(
ServiceRegistry serviceRegistryAnn,
final ServiceRegistryScopeImpl serviceRegistryScope) {
for ( Class<? extends Integrator> integrator : serviceRegistryAnn.integrators() ) {
serviceRegistryScope.applyIntegrator( integrator );
BootstrapServiceRegistry bsrAnn,
final BootstrapServiceRegistryBuilder bsrBuilder) {
final Class<? extends Integrator>[] integrators = bsrAnn.integrators();
if ( integrators.length == 0 ) {
return;
}
for ( Class<? extends Integrator> integratorImpl : integrators ) {
assert integratorImpl != null;
try {
final Constructor<? extends Integrator> constructor = integratorImpl.getDeclaredConstructor();
final Integrator integrator = constructor.newInstance();
bsrBuilder.applyIntegrator( integrator );
}
catch (NoSuchMethodException e) {
throw new IllegalArgumentException( "Could not find no-arg constructor for Integrator : " + integratorImpl.getName(), e );
}
catch (IllegalAccessException e) {
throw new IllegalArgumentException( "Unable to access no-arg constructor for Integrator : " + integratorImpl.getName(), e );
}
catch (InstantiationException | InvocationTargetException e) {
throw new IllegalArgumentException( "Unable to instantiate Integrator : " + integratorImpl.getName(), e );
}
}
}
private static void configureJavaServices(BootstrapServiceRegistry bsrAnn, BootstrapServiceRegistryBuilder bsrBuilder) {
final BootstrapServiceRegistry.JavaService[] javaServiceAnns = bsrAnn.javaServices();
if ( javaServiceAnns.length == 0 ) {
return;
}
final List<JavaServiceDescriptor<?>> javaServiceDescriptors = new ArrayList<>( javaServiceAnns.length );
for ( int i = 0; i < javaServiceAnns.length; i++ ) {
final BootstrapServiceRegistry.JavaService javaServiceAnn = javaServiceAnns[ i ];
javaServiceDescriptors.add(
new JavaServiceDescriptor(
javaServiceAnn.role(),
javaServiceAnn.impl()
)
);
}
final ExtraJavaServicesClassLoaderService cls = new ExtraJavaServicesClassLoaderService( javaServiceDescriptors );
bsrBuilder.applyClassLoaderService( cls );
}
private static void configureServices(ServiceRegistry serviceRegistryAnn, StandardServiceRegistryBuilder ssrb) {
@ -171,36 +234,30 @@ public class ServiceRegistryExtension
}
private static class ServiceRegistryScopeImpl implements ServiceRegistryScope, ExtensionContext.Store.CloseableResource {
private ServiceRegistryProducer producer;
private BootstrapServiceRegistryProducer bsrProducer;
private ServiceRegistryProducer ssrProducer;
private StandardServiceRegistry registry;
private boolean active = true;
private List<Class<? extends Integrator>> integrators = new ArrayList<>();
public ServiceRegistryScopeImpl() {
}
public StandardServiceRegistry createRegistry(ServiceRegistryProducer producer) {
this.producer = producer;
verifyActive();
BootstrapServiceRegistryBuilder bootstrapServiceRegistryBuilder = new BootstrapServiceRegistryBuilder().enableAutoClose();
integrators.forEach(
integrator -> {
try {
bootstrapServiceRegistryBuilder.applyIntegrator( integrator.newInstance() );
}
catch (Exception e) {
throw new RuntimeException( "Could not configure BootstrapServiceRegistryBuilder", e );
}
}
);
public StandardServiceRegistry createRegistry(BootstrapServiceRegistryProducer bsrProducer, ServiceRegistryProducer ssrProducer) {
this.bsrProducer = bsrProducer;
this.ssrProducer = ssrProducer;
final StandardServiceRegistryBuilder ssrb = new StandardServiceRegistryBuilder(bootstrapServiceRegistryBuilder.build());
verifyActive();
BootstrapServiceRegistryBuilder bsrb = new BootstrapServiceRegistryBuilder().enableAutoClose();
final org.hibernate.boot.registry.BootstrapServiceRegistry bsr = bsrProducer.produceServiceRegistry( bsrb );
final StandardServiceRegistryBuilder ssrb = new StandardServiceRegistryBuilder( bsr );
// we will close it ourselves explicitly.
ssrb.disableAutoClose();
return producer.produceServiceRegistry( ssrb );
return ssrProducer.produceServiceRegistry( ssrb );
}
private void verifyActive() {
@ -209,16 +266,12 @@ public class ServiceRegistryExtension
}
}
public void applyIntegrator(Class<? extends Integrator> integrator) {
integrators.add( integrator );
}
@Override
public StandardServiceRegistry getRegistry() {
verifyActive();
if ( registry == null ) {
registry = createRegistry( producer );
registry = createRegistry( bsrProducer, ssrProducer );
}
return registry;

View File

@ -9,6 +9,7 @@ package org.hibernate.testing.orm.junit;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.Consumer;
import java.util.function.Function;
@ -20,17 +21,14 @@ import org.hibernate.boot.SessionFactoryBuilder;
import org.hibernate.boot.registry.StandardServiceRegistry;
import org.hibernate.boot.spi.MetadataImplementor;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.H2Dialect;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.resource.jdbc.spi.StatementInspector;
import org.hibernate.resource.transaction.spi.TransactionStatus;
import org.hibernate.tool.schema.Action;
import org.hibernate.tool.schema.spi.SchemaManagementToolCoordinator;
import org.hibernate.tool.schema.spi.SchemaManagementToolCoordinator.ActionGrouping;
import org.hibernate.testing.junit4.Helper;
import org.junit.jupiter.api.extension.AfterAllCallback;
import org.junit.jupiter.api.extension.ExtensionContext;
import org.junit.jupiter.api.extension.TestExecutionExceptionHandler;
@ -146,10 +144,10 @@ public class SessionFactoryExtension
boolean createSecondarySchemas) {
final Map<String, Object> baseProperties = sessionFactory.getProperties();
final ActionGrouping actions = ActionGrouping.interpret( baseProperties );
final Set<ActionGrouping> groupings = ActionGrouping.interpret( model, baseProperties );
// if there are explicit setting for auto schema tooling then skip the annotation
if ( actions.getDatabaseAction() != Action.NONE || actions.getScriptAction() != Action.NONE ) {
if ( ! groupings.isEmpty() ) {
// the properties contained explicit settings for auto schema tooling - skip the annotation
return;
}