HHH-14469 - Support schema-tooling on sub-sets of the relational model known to Hibernate

- @BootstrapServiceRegistry
- Support for filtering at schema-tooling level
This commit is contained in:
Steve Ebersole 2021-02-25 15:14:59 -06:00
parent 7b7597f40e
commit ddf434df7e
92 changed files with 2036 additions and 320 deletions

View File

@ -7,6 +7,7 @@
package org.hibernate.boot; package org.hibernate.boot;
import java.util.Map; import java.util.Map;
import java.util.Set;
import java.util.UUID; import java.util.UUID;
import java.util.function.Consumer; import java.util.function.Consumer;
@ -192,4 +193,9 @@ public interface Metadata extends Mapping {
java.util.Collection<Table> collectTableMappings(); java.util.Collection<Table> collectTableMappings();
Map<String, SqmFunctionDescriptor> getSqlFunctionMap(); Map<String, SqmFunctionDescriptor> getSqlFunctionMap();
/**
* All of the known model contributors
*/
Set<String> getContributors();
} }

View File

@ -54,14 +54,14 @@ import org.hibernate.boot.model.relational.Namespace;
import org.hibernate.boot.model.relational.QualifiedTableName; import org.hibernate.boot.model.relational.QualifiedTableName;
import org.hibernate.boot.model.source.internal.ImplicitColumnNamingSecondPass; import org.hibernate.boot.model.source.internal.ImplicitColumnNamingSecondPass;
import org.hibernate.boot.model.source.spi.LocalMetadataBuildingContext; import org.hibernate.boot.model.source.spi.LocalMetadataBuildingContext;
import org.hibernate.boot.spi.BootstrapContext;
import org.hibernate.boot.spi.InFlightMetadataCollector;
import org.hibernate.boot.spi.MetadataBuildingContext;
import org.hibernate.boot.spi.MetadataBuildingOptions;
import org.hibernate.boot.query.NamedHqlQueryDefinition; import org.hibernate.boot.query.NamedHqlQueryDefinition;
import org.hibernate.boot.query.NamedNativeQueryDefinition; import org.hibernate.boot.query.NamedNativeQueryDefinition;
import org.hibernate.boot.query.NamedProcedureCallDefinition; import org.hibernate.boot.query.NamedProcedureCallDefinition;
import org.hibernate.boot.query.NamedResultSetMappingDescriptor; import org.hibernate.boot.query.NamedResultSetMappingDescriptor;
import org.hibernate.boot.spi.BootstrapContext;
import org.hibernate.boot.spi.InFlightMetadataCollector;
import org.hibernate.boot.spi.MetadataBuildingContext;
import org.hibernate.boot.spi.MetadataBuildingOptions;
import org.hibernate.boot.spi.NaturalIdUniqueKeyBinder; import org.hibernate.boot.spi.NaturalIdUniqueKeyBinder;
import org.hibernate.cfg.AnnotatedClassType; import org.hibernate.cfg.AnnotatedClassType;
import org.hibernate.cfg.AvailableSettings; import org.hibernate.cfg.AvailableSettings;
@ -231,6 +231,11 @@ public class InFlightMetadataCollectorImpl implements InFlightMetadataCollector
return sqlFunctionMap; return sqlFunctionMap;
} }
@Override
public Set<String> getContributors() {
throw new UnsupportedOperationException();
}
@Override @Override
public void validate() throws MappingException { public void validate() throws MappingException {
// nothing to do // nothing to do
@ -744,7 +749,8 @@ public class InFlightMetadataCollectorImpl implements InFlightMetadataCollector
String catalogName, String catalogName,
String name, String name,
String subselectFragment, String subselectFragment,
boolean isAbstract) { boolean isAbstract,
MetadataBuildingContext buildingContext) {
final Namespace namespace = getDatabase().locateNamespace( final Namespace namespace = getDatabase().locateNamespace(
getDatabase().toIdentifier( catalogName ), getDatabase().toIdentifier( catalogName ),
getDatabase().toIdentifier( schemaName ) getDatabase().toIdentifier( schemaName )
@ -761,17 +767,21 @@ public class InFlightMetadataCollectorImpl implements InFlightMetadataCollector
} }
if ( subselectFragment != null ) { if ( subselectFragment != null ) {
return new Table( namespace, logicalName, subselectFragment, isAbstract ); return new Table( buildingContext.getCurrentContributorName(), namespace, logicalName, subselectFragment, isAbstract );
} }
else { else {
Table table = namespace.locateTable( logicalName ); final Table existing = namespace.locateTable( logicalName );
if ( table != null ) { if ( existing != null ) {
if ( !isAbstract ) { if ( !isAbstract ) {
table.setAbstract( false ); existing.setAbstract( false );
} }
return table; return existing;
} }
return namespace.createTable( logicalName, isAbstract );
return namespace.createTable(
logicalName,
(physicalName) -> new Table( buildingContext.getCurrentContributorName(), namespace, physicalName, isAbstract )
);
} }
} }
@ -782,7 +792,8 @@ public class InFlightMetadataCollectorImpl implements InFlightMetadataCollector
String name, String name,
boolean isAbstract, boolean isAbstract,
String subselectFragment, String subselectFragment,
Table includedTable) throws DuplicateMappingException { Table includedTable,
MetadataBuildingContext buildingContext) throws DuplicateMappingException {
final Namespace namespace = getDatabase().locateNamespace( final Namespace namespace = getDatabase().locateNamespace(
getDatabase().toIdentifier( catalogName ), getDatabase().toIdentifier( catalogName ),
getDatabase().toIdentifier( schemaName ) getDatabase().toIdentifier( schemaName )
@ -799,7 +810,17 @@ public class InFlightMetadataCollectorImpl implements InFlightMetadataCollector
} }
if ( subselectFragment != null ) { if ( subselectFragment != null ) {
return new DenormalizedTable( namespace, logicalName, subselectFragment, isAbstract, includedTable ); return namespace.createDenormalizedTable(
logicalName,
(physicalName) -> new DenormalizedTable(
buildingContext.getCurrentContributorName(),
namespace,
logicalName,
subselectFragment,
isAbstract,
includedTable
)
);
} }
else { else {
Table table = namespace.locateTable( logicalName ); Table table = namespace.locateTable( logicalName );
@ -807,7 +828,16 @@ public class InFlightMetadataCollectorImpl implements InFlightMetadataCollector
throw new DuplicateMappingException( DuplicateMappingException.Type.TABLE, logicalName.toString() ); throw new DuplicateMappingException( DuplicateMappingException.Type.TABLE, logicalName.toString() );
} }
else { else {
table = namespace.createDenormalizedTable( logicalName, isAbstract, includedTable ); table = namespace.createDenormalizedTable(
logicalName,
(physicalTableName) -> new DenormalizedTable(
buildingContext.getCurrentContributorName(),
namespace,
physicalTableName,
isAbstract,
includedTable
)
);
} }
return table; return table;
} }

View File

@ -16,9 +16,10 @@ import org.hibernate.boot.spi.MetadataBuildingContext;
import org.hibernate.boot.spi.MetadataBuildingOptions; import org.hibernate.boot.spi.MetadataBuildingOptions;
/** /**
* @author Steve Ebersole * Root MetadataBuildingContext
*/ */
public class MetadataBuildingContextRootImpl implements MetadataBuildingContext { public class MetadataBuildingContextRootImpl implements MetadataBuildingContext {
private final String contributor;
private final BootstrapContext bootstrapContext; private final BootstrapContext bootstrapContext;
private final MetadataBuildingOptions options; private final MetadataBuildingOptions options;
private final MappingDefaults mappingDefaults; private final MappingDefaults mappingDefaults;
@ -27,9 +28,11 @@ public class MetadataBuildingContextRootImpl implements MetadataBuildingContext
private final TypeDefinitionRegistryStandardImpl typeDefinitionRegistry; private final TypeDefinitionRegistryStandardImpl typeDefinitionRegistry;
public MetadataBuildingContextRootImpl( public MetadataBuildingContextRootImpl(
String contributor,
BootstrapContext bootstrapContext, BootstrapContext bootstrapContext,
MetadataBuildingOptions options, MetadataBuildingOptions options,
InFlightMetadataCollector metadataCollector) { InFlightMetadataCollector metadataCollector) {
this.contributor = contributor;
this.bootstrapContext = bootstrapContext; this.bootstrapContext = bootstrapContext;
this.options = options; this.options = options;
this.mappingDefaults = options.getMappingDefaults(); this.mappingDefaults = options.getMappingDefaults();
@ -77,4 +80,9 @@ public class MetadataBuildingContextRootImpl implements MetadataBuildingContext
public TypeDefinitionRegistryStandardImpl getTypeDefinitionRegistry() { public TypeDefinitionRegistryStandardImpl getTypeDefinitionRegistry() {
return typeDefinitionRegistry; return typeDefinitionRegistry;
} }
@Override
public String getCurrentContributorName() {
return contributor;
}
} }

View File

@ -26,6 +26,7 @@ import org.hibernate.boot.model.IdentifierGeneratorDefinition;
import org.hibernate.boot.model.TypeDefinition; import org.hibernate.boot.model.TypeDefinition;
import org.hibernate.boot.model.relational.Database; import org.hibernate.boot.model.relational.Database;
import org.hibernate.boot.model.relational.Namespace; import org.hibernate.boot.model.relational.Namespace;
import org.hibernate.boot.model.relational.Sequence;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService; import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.boot.spi.BootstrapContext; import org.hibernate.boot.spi.BootstrapContext;
import org.hibernate.boot.spi.MetadataBuildingOptions; import org.hibernate.boot.spi.MetadataBuildingOptions;
@ -310,6 +311,27 @@ public class MetadataImpl implements MetadataImplementor, Serializable {
return sqlFunctionMap; return sqlFunctionMap;
} }
@Override
public Set<String> getContributors() {
final HashSet<String> contributors = new HashSet<>();
entityBindingMap.forEach(
(s, persistentClass) -> contributors.add( persistentClass.getContributor() )
);
for ( Namespace namespace : database.getNamespaces() ) {
for ( Table table : namespace.getTables() ) {
contributors.add( table.getContributor() );
}
for ( Sequence sequence : namespace.getSequences() ) {
contributors.add( sequence.getContributor() );
}
}
return contributors;
}
@Override @Override
public java.util.Collection<Table> collectTableMappings() { public java.util.Collection<Table> collectTableMappings() {
ArrayList<Table> tables = new ArrayList<>(); ArrayList<Table> tables = new ArrayList<>();

View File

@ -29,6 +29,7 @@ import org.hibernate.boot.model.source.spi.MetadataSourceProcessor;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService; import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.boot.spi.AdditionalJaxbMappingProducer; import org.hibernate.boot.spi.AdditionalJaxbMappingProducer;
import org.hibernate.boot.spi.BootstrapContext; import org.hibernate.boot.spi.BootstrapContext;
import org.hibernate.boot.spi.MetadataBuildingContext;
import org.hibernate.boot.spi.MetadataBuildingOptions; import org.hibernate.boot.spi.MetadataBuildingOptions;
import org.hibernate.boot.spi.MetadataContributor; import org.hibernate.boot.spi.MetadataContributor;
import org.hibernate.boot.spi.MetadataImplementor; import org.hibernate.boot.spi.MetadataImplementor;
@ -133,6 +134,7 @@ public class MetadataBuildingProcess {
final ClassLoaderService classLoaderService = options.getServiceRegistry().getService( ClassLoaderService.class ); final ClassLoaderService classLoaderService = options.getServiceRegistry().getService( ClassLoaderService.class );
final MetadataBuildingContextRootImpl rootMetadataBuildingContext = new MetadataBuildingContextRootImpl( final MetadataBuildingContextRootImpl rootMetadataBuildingContext = new MetadataBuildingContextRootImpl(
"orm",
bootstrapContext, bootstrapContext,
options, options,
metadataCollector metadataCollector
@ -290,7 +292,7 @@ public class MetadataBuildingProcess {
metadataCollector.processSecondPasses( rootMetadataBuildingContext ); metadataCollector.processSecondPasses( rootMetadataBuildingContext );
if ( options.isXmlMappingEnabled() ) { if ( options.isXmlMappingEnabled() ) {
Iterable<AdditionalJaxbMappingProducer> producers = classLoaderService.loadJavaServices( AdditionalJaxbMappingProducer.class ); final Iterable<AdditionalJaxbMappingProducer> producers = classLoaderService.loadJavaServices( AdditionalJaxbMappingProducer.class );
if ( producers != null ) { if ( producers != null ) {
final EntityHierarchyBuilder hierarchyBuilder = new EntityHierarchyBuilder(); final EntityHierarchyBuilder hierarchyBuilder = new EntityHierarchyBuilder();
// final MappingBinder mappingBinder = new MappingBinder( true ); // final MappingBinder mappingBinder = new MappingBinder( true );

View File

@ -0,0 +1,15 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.boot.model.relational;
import org.hibernate.mapping.Contributable;
/**
* Contributable specialization for Tables and Sequences
*/
public interface ContributableDatabaseObject extends Contributable, Exportable {
}

View File

@ -10,6 +10,7 @@ import java.util.Collection;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.TreeMap; import java.util.TreeMap;
import java.util.function.Function;
import org.hibernate.HibernateException; import org.hibernate.HibernateException;
import org.hibernate.boot.model.naming.Identifier; import org.hibernate.boot.model.naming.Identifier;
@ -33,8 +34,8 @@ public class Namespace {
private final Name name; private final Name name;
private final Name physicalName; private final Name physicalName;
private Map<Identifier, Table> tables = new TreeMap<>(); private final Map<Identifier, Table> tables = new TreeMap<>();
private Map<Identifier, Sequence> sequences = new TreeMap<>(); private final Map<Identifier, Sequence> sequences = new TreeMap<>();
public Namespace(PhysicalNamingStrategy physicalNamingStrategy, JdbcEnvironment jdbcEnvironment, Name name) { public Namespace(PhysicalNamingStrategy physicalNamingStrategy, JdbcEnvironment jdbcEnvironment, Name name) {
this.physicalNamingStrategy = physicalNamingStrategy; this.physicalNamingStrategy = physicalNamingStrategy;
@ -89,28 +90,29 @@ public class Namespace {
* *
* @return the created table. * @return the created table.
*/ */
public Table createTable(Identifier logicalTableName, boolean isAbstract) { public Table createTable(Identifier logicalTableName, Function<Identifier,Table> creator) {
final Table existing = tables.get( logicalTableName ); final Table existing = tables.get( logicalTableName );
if ( existing != null ) { if ( existing != null ) {
return existing; return existing;
} }
final Identifier physicalTableName = physicalNamingStrategy.toPhysicalTableName( logicalTableName, jdbcEnvironment ); final Identifier physicalTableName = physicalNamingStrategy.toPhysicalTableName( logicalTableName, jdbcEnvironment );
Table table = new Table( this, physicalTableName, isAbstract ); final Table table = creator.apply( physicalTableName );
tables.put( logicalTableName, table ); tables.put( logicalTableName, table );
return table; return table;
} }
public DenormalizedTable createDenormalizedTable(Identifier logicalTableName, boolean isAbstract, Table includedTable) { public DenormalizedTable createDenormalizedTable(Identifier logicalTableName, Function<Identifier,DenormalizedTable> creator) {
final Table existing = tables.get( logicalTableName ); final Table existing = tables.get( logicalTableName );
if ( existing != null ) { if ( existing != null ) {
// for now assume it is
return (DenormalizedTable) existing; return (DenormalizedTable) existing;
} }
final Identifier physicalTableName = physicalNamingStrategy.toPhysicalTableName( logicalTableName, jdbcEnvironment ); final Identifier physicalTableName = physicalNamingStrategy.toPhysicalTableName( logicalTableName, jdbcEnvironment );
DenormalizedTable table = new DenormalizedTable( this, physicalTableName, isAbstract, includedTable ); final DenormalizedTable table = creator.apply( physicalTableName );
tables.put( logicalTableName, table ); tables.put( logicalTableName, table );
return table; return table;
} }
@ -118,21 +120,15 @@ public class Namespace {
return sequences.get( name ); return sequences.get( name );
} }
public Sequence createSequence(Identifier logicalName, int initialValue, int increment) { public Sequence createSequence(Identifier logicalName, Function<Identifier,Sequence> creator) {
if ( sequences.containsKey( logicalName ) ) { if ( sequences.containsKey( logicalName ) ) {
throw new HibernateException( "Sequence was already registered with that name [" + logicalName.toString() + "]" ); throw new HibernateException( "Sequence was already registered with that name [" + logicalName.toString() + "]" );
} }
final Identifier physicalName = physicalNamingStrategy.toPhysicalSequenceName( logicalName, jdbcEnvironment ); final Identifier physicalName = physicalNamingStrategy.toPhysicalSequenceName( logicalName, jdbcEnvironment );
final Sequence sequence = creator.apply( physicalName );
Sequence sequence = new Sequence(
this.physicalName.getCatalog(),
this.physicalName.getSchema(),
physicalName,
initialValue,
increment
);
sequences.put( logicalName, sequence ); sequences.put( logicalName, sequence );
return sequence; return sequence;
} }

View File

@ -14,7 +14,7 @@ import org.hibernate.boot.model.naming.Identifier;
* *
* @author Steve Ebersole * @author Steve Ebersole
*/ */
public class Sequence implements Exportable { public class Sequence implements ContributableDatabaseObject {
public static class Name extends QualifiedNameParser.NameParts { public static class Name extends QualifiedNameParser.NameParts {
public Name( public Name(
Identifier catalogIdentifier, Identifier catalogIdentifier,
@ -26,21 +26,33 @@ public class Sequence implements Exportable {
private final QualifiedSequenceName name; private final QualifiedSequenceName name;
private final String exportIdentifier; private final String exportIdentifier;
private final String contributor;
private int initialValue = 1; private int initialValue = 1;
private int incrementSize = 1; private int incrementSize = 1;
public Sequence(Identifier catalogName, Identifier schemaName, Identifier sequenceName) { public Sequence(
this.name = new QualifiedSequenceName( catalogName, schemaName, sequenceName ); String contributor,
Identifier catalogName,
Identifier schemaName,
Identifier sequenceName) {
this.contributor = contributor;
this.name = new QualifiedSequenceName(
catalogName,
schemaName,
sequenceName
);
this.exportIdentifier = name.render(); this.exportIdentifier = name.render();
} }
public Sequence( public Sequence(
String contributor,
Identifier catalogName, Identifier catalogName,
Identifier schemaName, Identifier schemaName,
Identifier sequenceName, Identifier sequenceName,
int initialValue, int initialValue,
int incrementSize) { int incrementSize) {
this( catalogName, schemaName, sequenceName ); this( contributor, catalogName, schemaName, sequenceName );
this.initialValue = initialValue; this.initialValue = initialValue;
this.incrementSize = incrementSize; this.incrementSize = incrementSize;
} }
@ -54,6 +66,11 @@ public class Sequence implements Exportable {
return exportIdentifier; return exportIdentifier;
} }
@Override
public String getContributor() {
return contributor;
}
public int getInitialValue() { public int getInitialValue() {
return initialValue; return initialValue;
} }

View File

@ -96,7 +96,10 @@ public class EntityHierarchyBuilder {
final RootEntitySourceImpl rootEntitySource = new RootEntitySourceImpl( mappingDocument, jaxbRootEntity ); final RootEntitySourceImpl rootEntitySource = new RootEntitySourceImpl( mappingDocument, jaxbRootEntity );
entitySourceByNameMap.put( rootEntitySource.getEntityNamingSource().getEntityName(), rootEntitySource ); entitySourceByNameMap.put( rootEntitySource.getEntityNamingSource().getEntityName(), rootEntitySource );
final EntityHierarchySourceImpl hierarchy = new EntityHierarchySourceImpl( rootEntitySource ); final EntityHierarchySourceImpl hierarchy = new EntityHierarchySourceImpl(
rootEntitySource,
mappingDocument
);
entityHierarchyList.add( hierarchy ); entityHierarchyList.add( hierarchy );
linkAnyWaiting( mappingDocument, rootEntitySource ); linkAnyWaiting( mappingDocument, rootEntitySource );

View File

@ -44,6 +44,7 @@ import org.hibernate.internal.util.StringHelper;
*/ */
public class EntityHierarchySourceImpl implements EntityHierarchySource { public class EntityHierarchySourceImpl implements EntityHierarchySource {
private final RootEntitySourceImpl rootEntitySource; private final RootEntitySourceImpl rootEntitySource;
private final MappingDocument rootEntityMappingDocument;
private final IdentifierSource identifierSource; private final IdentifierSource identifierSource;
private final VersionAttributeSource versionAttributeSource; private final VersionAttributeSource versionAttributeSource;
@ -57,8 +58,11 @@ public class EntityHierarchySourceImpl implements EntityHierarchySource {
private Set<String> collectedEntityNames = new HashSet<>(); private Set<String> collectedEntityNames = new HashSet<>();
public EntityHierarchySourceImpl(RootEntitySourceImpl rootEntitySource) { public EntityHierarchySourceImpl(
RootEntitySourceImpl rootEntitySource,
MappingDocument rootEntityMappingDocument) {
this.rootEntitySource = rootEntitySource; this.rootEntitySource = rootEntitySource;
this.rootEntityMappingDocument = rootEntityMappingDocument;
this.rootEntitySource.injectHierarchy( this ); this.rootEntitySource.injectHierarchy( this );
this.identifierSource = interpretIdentifierSource( rootEntitySource ); this.identifierSource = interpretIdentifierSource( rootEntitySource );
@ -74,6 +78,10 @@ public class EntityHierarchySourceImpl implements EntityHierarchySource {
collectedEntityNames.add( rootEntitySource.getEntityNamingSource().getEntityName() ); collectedEntityNames.add( rootEntitySource.getEntityNamingSource().getEntityName() );
} }
public MappingDocument getRootEntityMappingDocument() {
return rootEntityMappingDocument;
}
private static IdentifierSource interpretIdentifierSource(RootEntitySourceImpl rootEntitySource) { private static IdentifierSource interpretIdentifierSource(RootEntitySourceImpl rootEntitySource) {
final JaxbHbmSimpleIdType simpleId = rootEntitySource.jaxbEntityMapping().getId(); final JaxbHbmSimpleIdType simpleId = rootEntitySource.jaxbEntityMapping().getId();
final JaxbHbmCompositeIdType compositeId = rootEntitySource.jaxbEntityMapping().getCompositeId(); final JaxbHbmCompositeIdType compositeId = rootEntitySource.jaxbEntityMapping().getCompositeId();

View File

@ -55,6 +55,7 @@ public class HbmMetadataSourceProcessorImpl implements MetadataSourceProcessor {
} }
final MappingDocument mappingDocument = new MappingDocument( final MappingDocument mappingDocument = new MappingDocument(
"orm",
(JaxbHbmHibernateMapping) xmlBinding.getRoot(), (JaxbHbmHibernateMapping) xmlBinding.getRoot(),
xmlBinding.getOrigin(), xmlBinding.getOrigin(),
rootBuildingContext rootBuildingContext

View File

@ -54,11 +54,14 @@ public class MappingDocument implements HbmLocalMetadataBuildingContext, Metadat
private final TypeDefinitionRegistryStandardImpl typeDefinitionRegistry; private final TypeDefinitionRegistryStandardImpl typeDefinitionRegistry;
private final String contributor;
public MappingDocument( public MappingDocument(
String contributor,
JaxbHbmHibernateMapping documentRoot, JaxbHbmHibernateMapping documentRoot,
Origin origin, Origin origin,
MetadataBuildingContext rootBuildingContext) { MetadataBuildingContext rootBuildingContext) {
this.contributor = contributor;
this.documentRoot = documentRoot; this.documentRoot = documentRoot;
this.origin = origin; this.origin = origin;
this.rootBuildingContext = rootBuildingContext; this.rootBuildingContext = rootBuildingContext;
@ -163,6 +166,11 @@ public class MappingDocument implements HbmLocalMetadataBuildingContext, Metadat
return typeDefinitionRegistry; return typeDefinitionRegistry;
} }
@Override
public String getCurrentContributorName() {
return contributor;
}
@Override @Override
public void prepare() { public void prepare() {
// nothing to do here // nothing to do here

View File

@ -198,7 +198,7 @@ public class ModelBinder {
} }
public void bindEntityHierarchy(EntityHierarchySourceImpl hierarchySource) { public void bindEntityHierarchy(EntityHierarchySourceImpl hierarchySource) {
final RootClass rootEntityDescriptor = new RootClass( metadataBuildingContext ); final RootClass rootEntityDescriptor = new RootClass( hierarchySource.getRootEntityMappingDocument() );
bindRootEntity( hierarchySource, rootEntityDescriptor ); bindRootEntity( hierarchySource, rootEntityDescriptor );
hierarchySource.getRoot() hierarchySource.getRoot()
.getLocalMetadataBuildingContext() .getLocalMetadataBuildingContext()
@ -1764,12 +1764,15 @@ public class ModelBinder {
Table secondaryTable; Table secondaryTable;
final Identifier logicalTableName; final Identifier logicalTableName;
if ( TableSource.class.isInstance( secondaryTableSource.getTableSource() ) ) { if ( secondaryTableSource.getTableSource() instanceof TableSource ) {
final TableSource tableSource = (TableSource) secondaryTableSource.getTableSource(); final TableSource tableSource = (TableSource) secondaryTableSource.getTableSource();
logicalTableName = database.toIdentifier( tableSource.getExplicitTableName() ); logicalTableName = database.toIdentifier( tableSource.getExplicitTableName() );
secondaryTable = namespace.locateTable( logicalTableName ); secondaryTable = namespace.locateTable( logicalTableName );
if ( secondaryTable == null ) { if ( secondaryTable == null ) {
secondaryTable = namespace.createTable( logicalTableName, false ); secondaryTable = namespace.createTable(
logicalTableName,
(identifier) -> new Table( mappingDocument.getCurrentContributorName(), namespace, identifier, false )
);
} }
else { else {
secondaryTable.setAbstract( false ); secondaryTable.setAbstract( false );
@ -1780,6 +1783,7 @@ public class ModelBinder {
else { else {
final InLineViewSource inLineViewSource = (InLineViewSource) secondaryTableSource.getTableSource(); final InLineViewSource inLineViewSource = (InLineViewSource) secondaryTableSource.getTableSource();
secondaryTable = new Table( secondaryTable = new Table(
metadataBuildingContext.getCurrentContributorName(),
namespace, namespace,
inLineViewSource.getSelectStatement(), inLineViewSource.getSelectStatement(),
false false
@ -2951,13 +2955,26 @@ public class ModelBinder {
} }
if ( denormalizedSuperTable == null ) { if ( denormalizedSuperTable == null ) {
table = namespace.createTable( logicalTableName, isAbstract ); table = namespace.createTable(
logicalTableName,
(identifier) -> new Table(
mappingDocument.getCurrentContributorName(),
namespace,
identifier,
isAbstract
)
);
} }
else { else {
table = namespace.createDenormalizedTable( table = namespace.createDenormalizedTable(
logicalTableName, logicalTableName,
isAbstract, (physicalTableName) -> new DenormalizedTable(
denormalizedSuperTable mappingDocument.getCurrentContributorName(),
namespace,
physicalTableName,
isAbstract,
denormalizedSuperTable
)
); );
} }
} }
@ -2966,10 +2983,16 @@ public class ModelBinder {
subselect = inLineViewSource.getSelectStatement(); subselect = inLineViewSource.getSelectStatement();
logicalTableName = database.toIdentifier( inLineViewSource.getLogicalName() ); logicalTableName = database.toIdentifier( inLineViewSource.getLogicalName() );
if ( denormalizedSuperTable == null ) { if ( denormalizedSuperTable == null ) {
table = new Table( namespace, subselect, isAbstract ); table = new Table( mappingDocument.getCurrentContributorName(), namespace, subselect, isAbstract );
} }
else { else {
table = new DenormalizedTable( namespace, subselect, isAbstract, denormalizedSuperTable ); table = new DenormalizedTable(
mappingDocument.getCurrentContributorName(),
namespace,
subselect,
isAbstract,
denormalizedSuperTable
);
} }
table.setName( logicalTableName.render() ); table.setName( logicalTableName.render() );
} }
@ -3312,10 +3335,19 @@ public class ModelBinder {
.determineCollectionTableName( implicitNamingSource ); .determineCollectionTableName( implicitNamingSource );
} }
collectionTable = namespace.createTable( logicalName, false ); collectionTable = namespace.createTable(
logicalName,
(identifier) -> new Table(
metadataBuildingContext.getCurrentContributorName(),
namespace,
identifier,
false
)
);
} }
else { else {
collectionTable = new Table( collectionTable = new Table(
metadataBuildingContext.getCurrentContributorName(),
namespace, namespace,
( (InLineViewSource) tableSpecSource ).getSelectStatement(), ( (InLineViewSource) tableSpecSource ).getSelectStatement(),
false false

View File

@ -239,4 +239,9 @@ public abstract class AbstractDelegatingMetadata implements MetadataImplementor
public NamedObjectRepository buildNamedQueryRepository(SessionFactoryImplementor sessionFactory) { public NamedObjectRepository buildNamedQueryRepository(SessionFactoryImplementor sessionFactory) {
return delegate().buildNamedQueryRepository( sessionFactory ); return delegate().buildNamedQueryRepository( sessionFactory );
} }
@Override
public Set<String> getContributors() {
return delegate.getContributors();
}
} }

View File

@ -115,7 +115,13 @@ public interface InFlightMetadataCollector extends Mapping, MetadataImplementor
* *
* @return The created table metadata, or the existing reference. * @return The created table metadata, or the existing reference.
*/ */
Table addTable(String schema, String catalog, String name, String subselect, boolean isAbstract); Table addTable(
String schema,
String catalog,
String name,
String subselect,
boolean isAbstract,
MetadataBuildingContext buildingContext);
/** /**
* Adds a 'denormalized table' to this repository. * Adds a 'denormalized table' to this repository.
@ -126,7 +132,7 @@ public interface InFlightMetadataCollector extends Mapping, MetadataImplementor
* @param isAbstract Is the table abstract (i.e. not really existing in the DB)? * @param isAbstract Is the table abstract (i.e. not really existing in the DB)?
* @param subselect A select statement which defines a logical table, much * @param subselect A select statement which defines a logical table, much
* like a DB view. * like a DB view.
* @param includedTable ??? * @param includedTable The "common" table
* *
* @return The created table metadata. * @return The created table metadata.
* *
@ -138,7 +144,8 @@ public interface InFlightMetadataCollector extends Mapping, MetadataImplementor
String name, String name,
boolean isAbstract, boolean isAbstract,
String subselect, String subselect,
Table includedTable) throws DuplicateMappingException; Table includedTable,
MetadataBuildingContext buildingContext) throws DuplicateMappingException;
/** /**
* Adds metadata for a named query to this repository. * Adds metadata for a named query to this repository.

View File

@ -66,4 +66,9 @@ public interface MetadataBuildingContext {
} }
TypeDefinitionRegistry getTypeDefinitionRegistry(); TypeDefinitionRegistry getTypeDefinitionRegistry();
/**
* The name of the contributor whose mappings we are currently processing
*/
String getCurrentContributorName();
} }

View File

@ -491,7 +491,8 @@ public class TableBinder {
logicalName.render(), logicalName.render(),
isAbstract, isAbstract,
subselect, subselect,
denormalizedSuperTableXref.getPrimaryTable() denormalizedSuperTableXref.getPrimaryTable(),
buildingContext
); );
} }
else { else {
@ -500,7 +501,8 @@ public class TableBinder {
catalog, catalog,
logicalName.render(), logicalName.render(),
subselect, subselect,
isAbstract isAbstract,
buildingContext
); );
} }

View File

@ -47,6 +47,11 @@ public interface IdentifierGenerator {
*/ */
String GENERATOR_NAME = "GENERATOR_NAME"; String GENERATOR_NAME = "GENERATOR_NAME";
/**
* The contributor that contributed this generator
*/
String CONTRIBUTOR_NAME = "CONTRIBUTOR";
/** /**
* Generate a new identifier. * Generate a new identifier.
* *

View File

@ -92,6 +92,8 @@ public class MultipleHiLoPerTableGenerator implements PersistentIdentifierGenera
private static final String DEFAULT_PK_COLUMN = "sequence_name"; private static final String DEFAULT_PK_COLUMN = "sequence_name";
private static final String DEFAULT_VALUE_COLUMN = "sequence_next_hi_value"; private static final String DEFAULT_VALUE_COLUMN = "sequence_next_hi_value";
private String contributor;
private QualifiedName qualifiedTableName; private QualifiedName qualifiedTableName;
private String tableName; private String tableName;
private String segmentColumnName; private String segmentColumnName;
@ -273,6 +275,11 @@ public class MultipleHiLoPerTableGenerator implements PersistentIdentifierGenera
if ( maxLo >= 1 ) { if ( maxLo >= 1 ) {
hiloOptimizer = new LegacyHiLoAlgorithmOptimizer( returnClass, maxLo ); hiloOptimizer = new LegacyHiLoAlgorithmOptimizer( returnClass, maxLo );
} }
contributor = params.getProperty( CONTRIBUTOR_NAME );
if ( contributor == null ) {
contributor = "orm";
}
} }
protected QualifiedName determineGeneratorTableName(Properties params, JdbcEnvironment jdbcEnvironment) { protected QualifiedName determineGeneratorTableName(Properties params, JdbcEnvironment jdbcEnvironment) {
@ -316,7 +323,10 @@ public class MultipleHiLoPerTableGenerator implements PersistentIdentifierGenera
Table table = namespace.locateTable( qualifiedTableName.getObjectName() ); Table table = namespace.locateTable( qualifiedTableName.getObjectName() );
if ( table == null ) { if ( table == null ) {
table = namespace.createTable( qualifiedTableName.getObjectName(), false ); table = namespace.createTable(
qualifiedTableName.getObjectName(),
(identifier) -> new Table( contributor, namespace, identifier, false )
);
// todo : not sure the best solution here. do we add the columns if missing? other? // todo : not sure the best solution here. do we add the columns if missing? other?
table.setPrimaryKey( new PrimaryKey( table ) ); table.setPrimaryKey( new PrimaryKey( table ) );

View File

@ -65,6 +65,8 @@ public class SequenceGenerator
@Deprecated @Deprecated
public static final String PARAMETERS = "parameters"; public static final String PARAMETERS = "parameters";
private String contributor;
private QualifiedName logicalQualifiedSequenceName; private QualifiedName logicalQualifiedSequenceName;
private String sequenceName; private String sequenceName;
private Type identifierType; private Type identifierType;
@ -103,6 +105,13 @@ public class SequenceGenerator
"org.hibernate.id.enhanced.SequenceStyleGenerator generator instead." "org.hibernate.id.enhanced.SequenceStyleGenerator generator instead."
); );
} }
contributor = determineContributor( params );
}
private String determineContributor(Properties params) {
final String contributor = params.getProperty( CONTRIBUTOR_NAME );
return contributor == null ? "orm" : contributor;
} }
@Override @Override
@ -181,8 +190,14 @@ public class SequenceGenerator
else { else {
sequence = namespace.createSequence( sequence = namespace.createSequence(
logicalQualifiedSequenceName.getObjectName(), logicalQualifiedSequenceName.getObjectName(),
1, (physicalName) -> new Sequence(
1 contributor,
namespace.getPhysicalName().getCatalog(),
namespace.getPhysicalName().getSchema(),
physicalName,
1,
1
)
); );
} }

View File

@ -36,6 +36,7 @@ public class SequenceStructure implements DatabaseStructure {
SequenceStructure.class.getName() SequenceStructure.class.getName()
); );
private final String contributor;
private final QualifiedName logicalQualifiedSequenceName; private final QualifiedName logicalQualifiedSequenceName;
private final int initialValue; private final int initialValue;
private final int incrementSize; private final int incrementSize;
@ -48,10 +49,12 @@ public class SequenceStructure implements DatabaseStructure {
public SequenceStructure( public SequenceStructure(
JdbcEnvironment jdbcEnvironment, JdbcEnvironment jdbcEnvironment,
String contributor,
QualifiedName qualifiedSequenceName, QualifiedName qualifiedSequenceName,
int initialValue, int initialValue,
int incrementSize, int incrementSize,
Class numberType) { Class numberType) {
this.contributor = contributor;
this.logicalQualifiedSequenceName = qualifiedSequenceName; this.logicalQualifiedSequenceName = qualifiedSequenceName;
this.initialValue = initialValue; this.initialValue = initialValue;
@ -179,7 +182,17 @@ public class SequenceStructure implements DatabaseStructure {
sequence.validate( initialValue, sourceIncrementSize ); sequence.validate( initialValue, sourceIncrementSize );
} }
else { else {
sequence = namespace.createSequence( logicalQualifiedSequenceName.getObjectName(), initialValue, sourceIncrementSize ); sequence = namespace.createSequence(
logicalQualifiedSequenceName.getObjectName(),
(physicalName) -> new Sequence(
contributor,
namespace.getPhysicalName().getCatalog(),
namespace.getPhysicalName().getSchema(),
physicalName,
initialValue,
sourceIncrementSize
)
);
} }
this.sequenceName = database.getJdbcEnvironment().getQualifiedObjectNameFormatter().format( this.sequenceName = database.getJdbcEnvironment().getQualifiedObjectNameFormatter().format(

View File

@ -501,7 +501,14 @@ public class SequenceStyleGenerator
QualifiedName sequenceName, QualifiedName sequenceName,
int initialValue, int initialValue,
int incrementSize) { int incrementSize) {
return new SequenceStructure( jdbcEnvironment, sequenceName, initialValue, incrementSize, type.getReturnedClass() ); return new SequenceStructure(
jdbcEnvironment,
determineContributor( params ),
sequenceName,
initialValue,
incrementSize,
type.getReturnedClass()
);
} }
@SuppressWarnings("WeakerAccess") @SuppressWarnings("WeakerAccess")
@ -513,7 +520,23 @@ public class SequenceStyleGenerator
int initialValue, int initialValue,
int incrementSize) { int incrementSize) {
final Identifier valueColumnName = determineValueColumnName( params, jdbcEnvironment ); final Identifier valueColumnName = determineValueColumnName( params, jdbcEnvironment );
return new TableStructure( jdbcEnvironment, sequenceName, valueColumnName, initialValue, incrementSize, type.getReturnedClass() ); final String contributor = determineContributor( params );
return new TableStructure(
jdbcEnvironment,
contributor,
sequenceName,
valueColumnName,
initialValue,
incrementSize,
type.getReturnedClass()
);
}
private String determineContributor(Properties params) {
final String contributor = params.getProperty( IdentifierGenerator.CONTRIBUTOR_NAME );
return contributor == null ? "orm" : contributor;
} }

View File

@ -250,6 +250,8 @@ public class TableGenerator implements PersistentIdentifierGenerator, Configurab
private Optimizer optimizer; private Optimizer optimizer;
private long accessCount; private long accessCount;
private String contributor;
@Override @Override
public Object generatorKey() { public Object generatorKey() {
return qualifiedTableName.render(); return qualifiedTableName.render();
@ -386,6 +388,11 @@ public class TableGenerator implements PersistentIdentifierGenerator, Configurab
incrementSize, incrementSize,
optimizerInitialValue optimizerInitialValue
); );
contributor = params.getProperty( CONTRIBUTOR_NAME );
if ( contributor == null ) {
contributor = "orm";
}
} }
/** /**
@ -735,7 +742,10 @@ public class TableGenerator implements PersistentIdentifierGenerator, Configurab
Table table = namespace.locateTable( qualifiedTableName.getObjectName() ); Table table = namespace.locateTable( qualifiedTableName.getObjectName() );
if ( table == null ) { if ( table == null ) {
table = namespace.createTable( qualifiedTableName.getObjectName(), false ); table = namespace.createTable(
qualifiedTableName.getObjectName(),
(identifier) -> new Table( contributor, namespace, identifier, false )
);
// todo : not sure the best solution here. do we add the columns if missing? other? // todo : not sure the best solution here. do we add the columns if missing? other?
final Column segmentColumn = new ExportableColumn( final Column segmentColumn = new ExportableColumn(

View File

@ -55,6 +55,8 @@ public class TableStructure implements DatabaseStructure {
private final int incrementSize; private final int incrementSize;
private final Class numberType; private final Class numberType;
private String contributor;
private String tableNameText; private String tableNameText;
private String valueColumnNameText; private String valueColumnNameText;
@ -64,13 +66,16 @@ public class TableStructure implements DatabaseStructure {
private boolean applyIncrementSizeToSourceValues; private boolean applyIncrementSizeToSourceValues;
private int accessCounter; private int accessCounter;
public TableStructure( public TableStructure(
JdbcEnvironment jdbcEnvironment, JdbcEnvironment jdbcEnvironment,
String contributor,
QualifiedName qualifiedTableName, QualifiedName qualifiedTableName,
Identifier valueColumnNameIdentifier, Identifier valueColumnNameIdentifier,
int initialValue, int initialValue,
int incrementSize, int incrementSize,
Class numberType) { Class numberType) {
this.contributor = contributor;
this.logicalQualifiedTableName = qualifiedTableName; this.logicalQualifiedTableName = qualifiedTableName;
this.logicalValueColumnNameIdentifier = valueColumnNameIdentifier; this.logicalValueColumnNameIdentifier = valueColumnNameIdentifier;
@ -251,7 +256,10 @@ public class TableStructure implements DatabaseStructure {
Table table = namespace.locateTable( logicalQualifiedTableName.getObjectName() ); Table table = namespace.locateTable( logicalQualifiedTableName.getObjectName() );
boolean tableCreated = false; boolean tableCreated = false;
if ( table == null ) { if ( table == null ) {
table = namespace.createTable( logicalQualifiedTableName.getObjectName(), false ); table = namespace.createTable(
logicalQualifiedTableName.getObjectName(),
(identifier) -> new Table( contributor, namespace, identifier, false )
);
tableCreated = true; tableCreated = true;
} }

View File

@ -7,6 +7,7 @@
package org.hibernate.id.factory; package org.hibernate.id.factory;
import java.util.Properties; import java.util.Properties;
import org.hibernate.boot.spi.MetadataBuildingContext;
import org.hibernate.dialect.Dialect; import org.hibernate.dialect.Dialect;
import org.hibernate.id.IdentifierGenerator; import org.hibernate.id.IdentifierGenerator;
import org.hibernate.type.Type; import org.hibernate.type.Type;
@ -22,7 +23,7 @@ public interface IdentifierGeneratorFactory {
* *
* @return the dialect * @return the dialect
*/ */
public Dialect getDialect(); Dialect getDialect();
/** /**
* Allow injection of the dialect to use. * Allow injection of the dialect to use.
@ -33,7 +34,7 @@ public interface IdentifierGeneratorFactory {
* ctor injected. * ctor injected.
*/ */
@Deprecated @Deprecated
public void setDialect(Dialect dialect); void setDialect(Dialect dialect);
/** /**
* Given a strategy, retrieve the appropriate identifier generator instance. * Given a strategy, retrieve the appropriate identifier generator instance.
@ -44,7 +45,7 @@ public interface IdentifierGeneratorFactory {
* *
* @return The appropriate generator instance. * @return The appropriate generator instance.
*/ */
public IdentifierGenerator createIdentifierGenerator(String strategy, Type type, Properties config); IdentifierGenerator createIdentifierGenerator(String strategy, Type type, Properties config);
/** /**
* Retrieve the class that will be used as the {@link IdentifierGenerator} for the given strategy. * Retrieve the class that will be used as the {@link IdentifierGenerator} for the given strategy.
@ -52,5 +53,5 @@ public interface IdentifierGeneratorFactory {
* @param strategy The strategy * @param strategy The strategy
* @return The generator class. * @return The generator class.
*/ */
public Class getIdentifierGeneratorClass(String strategy); Class getIdentifierGeneratorClass(String strategy);
} }

View File

@ -0,0 +1,15 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.mapping;
/**
* Part of the mapping model that is associated with a contributor. ORM, Envers, Search, etc
* @author Steve Ebersole
*/
public interface Contributable {
String getContributor();
}

View File

@ -22,30 +22,36 @@ public class DenormalizedTable extends Table {
private final Table includedTable; private final Table includedTable;
public DenormalizedTable(Table includedTable) { public DenormalizedTable(
this.includedTable = includedTable; String contributor,
includedTable.setHasDenormalizedTables(); Namespace namespace,
} Identifier physicalTableName,
boolean isAbstract,
public DenormalizedTable(Namespace namespace, Identifier physicalTableName, boolean isAbstract, Table includedTable) { Table includedTable) {
super( namespace, physicalTableName, isAbstract ); super( contributor, namespace, physicalTableName, isAbstract );
this.includedTable = includedTable; this.includedTable = includedTable;
includedTable.setHasDenormalizedTables(); includedTable.setHasDenormalizedTables();
} }
public DenormalizedTable( public DenormalizedTable(
String contributor,
Namespace namespace, Namespace namespace,
Identifier physicalTableName, Identifier physicalTableName,
String subselectFragment, String subselectFragment,
boolean isAbstract, boolean isAbstract,
Table includedTable) { Table includedTable) {
super( namespace, physicalTableName, subselectFragment, isAbstract ); super( contributor, namespace, physicalTableName, subselectFragment, isAbstract );
this.includedTable = includedTable; this.includedTable = includedTable;
includedTable.setHasDenormalizedTables(); includedTable.setHasDenormalizedTables();
} }
public DenormalizedTable(Namespace namespace, String subselect, boolean isAbstract, Table includedTable) { public DenormalizedTable(
super( namespace, subselect, isAbstract ); String contributor,
Namespace namespace,
String subselect,
boolean isAbstract,
Table includedTable) {
super( contributor, namespace, subselect, isAbstract );
this.includedTable = includedTable; this.includedTable = includedTable;
includedTable.setHasDenormalizedTables(); includedTable.setHasDenormalizedTables();
} }

View File

@ -35,13 +35,14 @@ import org.hibernate.sql.Alias;
* *
* @author Gavin King * @author Gavin King
*/ */
public abstract class PersistentClass implements AttributeContainer, Serializable, Filterable, MetaAttributable { public abstract class PersistentClass implements AttributeContainer, Serializable, Filterable, MetaAttributable, Contributable {
private static final Alias PK_ALIAS = new Alias( 15, "PK" ); private static final Alias PK_ALIAS = new Alias( 15, "PK" );
public static final String NULL_DISCRIMINATOR_MAPPING = "null"; public static final String NULL_DISCRIMINATOR_MAPPING = "null";
public static final String NOT_NULL_DISCRIMINATOR_MAPPING = "not null"; public static final String NOT_NULL_DISCRIMINATOR_MAPPING = "not null";
private final MetadataBuildingContext metadataBuildingContext; private final MetadataBuildingContext metadataBuildingContext;
private final String contributor;
private String entityName; private String entityName;
@ -95,6 +96,11 @@ public abstract class PersistentClass implements AttributeContainer, Serializabl
public PersistentClass(MetadataBuildingContext metadataBuildingContext) { public PersistentClass(MetadataBuildingContext metadataBuildingContext) {
this.metadataBuildingContext = metadataBuildingContext; this.metadataBuildingContext = metadataBuildingContext;
this.contributor = metadataBuildingContext.getCurrentContributorName();
}
public String getContributor() {
return contributor;
} }
public ServiceRegistry getServiceRegistry() { public ServiceRegistry getServiceRegistry() {

View File

@ -334,6 +334,11 @@ public abstract class SimpleValue implements KeyValue {
AvailableSettings.PREFER_POOLED_VALUES_LO, AvailableSettings.PREFER_POOLED_VALUES_LO,
cs.getSetting( AvailableSettings.PREFER_POOLED_VALUES_LO, StandardConverters.BOOLEAN, false ) cs.getSetting( AvailableSettings.PREFER_POOLED_VALUES_LO, StandardConverters.BOOLEAN, false )
); );
params.put(
IdentifierGenerator.CONTRIBUTOR_NAME,
buildingContext.getCurrentContributorName()
);
if ( cs.getSettings().get( AvailableSettings.PREFERRED_POOLED_OPTIMIZER ) != null ) { if ( cs.getSettings().get( AvailableSettings.PREFERRED_POOLED_OPTIMIZER ) != null ) {
params.put( params.put(
AvailableSettings.PREFERRED_POOLED_OPTIMIZER, AvailableSettings.PREFERRED_POOLED_OPTIMIZER,
@ -342,7 +347,11 @@ public abstract class SimpleValue implements KeyValue {
} }
identifierGeneratorFactory.setDialect( dialect ); identifierGeneratorFactory.setDialect( dialect );
identifierGenerator = identifierGeneratorFactory.createIdentifierGenerator( identifierGeneratorStrategy, getType(), params ); identifierGenerator = identifierGeneratorFactory.createIdentifierGenerator(
identifierGeneratorStrategy,
getType(),
params
);
return identifierGenerator; return identifierGenerator;
} }

View File

@ -21,6 +21,7 @@ import org.hibernate.HibernateException;
import org.hibernate.MappingException; import org.hibernate.MappingException;
import org.hibernate.boot.Metadata; import org.hibernate.boot.Metadata;
import org.hibernate.boot.model.naming.Identifier; import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.boot.model.relational.ContributableDatabaseObject;
import org.hibernate.boot.model.relational.Exportable; import org.hibernate.boot.model.relational.Exportable;
import org.hibernate.boot.model.relational.InitCommand; import org.hibernate.boot.model.relational.InitCommand;
import org.hibernate.boot.model.relational.Namespace; import org.hibernate.boot.model.relational.Namespace;
@ -42,10 +43,12 @@ import org.jboss.logging.Logger;
* @author Gavin King * @author Gavin King
*/ */
@SuppressWarnings("deprecation") @SuppressWarnings("deprecation")
public class Table implements RelationalModel, Serializable, Exportable { public class Table implements RelationalModel, Serializable, ContributableDatabaseObject {
private static final Logger log = Logger.getLogger( Table.class ); private static final Logger log = Logger.getLogger( Table.class );
private static final Column[] EMPTY_COLUMN_ARRAY = new Column[0]; private static final Column[] EMPTY_COLUMN_ARRAY = new Column[0];
private final String contributor;
private Identifier catalog; private Identifier catalog;
private Identifier schema; private Identifier schema;
private Identifier name; private Identifier name;
@ -70,16 +73,24 @@ public class Table implements RelationalModel, Serializable, Exportable {
private List<InitCommand> initCommands; private List<InitCommand> initCommands;
public Table() { public Table() {
this( "orm" );
} }
public Table(String name) { public Table(String contributor) {
this( contributor, null );
}
public Table(String contributor, String name) {
this.contributor = contributor;
setName( name ); setName( name );
} }
public Table( public Table(
String contributor,
Namespace namespace, Namespace namespace,
Identifier physicalTableName, Identifier physicalTableName,
boolean isAbstract) { boolean isAbstract) {
this.contributor = contributor;
this.catalog = namespace.getPhysicalName().getCatalog(); this.catalog = namespace.getPhysicalName().getCatalog();
this.schema = namespace.getPhysicalName().getSchema(); this.schema = namespace.getPhysicalName().getSchema();
this.name = physicalTableName; this.name = physicalTableName;
@ -87,17 +98,12 @@ public class Table implements RelationalModel, Serializable, Exportable {
} }
public Table( public Table(
Identifier catalog, String contributor,
Identifier schema, Namespace namespace,
Identifier physicalTableName, Identifier physicalTableName,
String subselect,
boolean isAbstract) { boolean isAbstract) {
this.catalog = catalog; this.contributor = contributor;
this.schema = schema;
this.name = physicalTableName;
this.isAbstract = isAbstract;
}
public Table(Namespace namespace, Identifier physicalTableName, String subselect, boolean isAbstract) {
this.catalog = namespace.getPhysicalName().getCatalog(); this.catalog = namespace.getPhysicalName().getCatalog();
this.schema = namespace.getPhysicalName().getSchema(); this.schema = namespace.getPhysicalName().getSchema();
this.name = physicalTableName; this.name = physicalTableName;
@ -105,13 +111,19 @@ public class Table implements RelationalModel, Serializable, Exportable {
this.isAbstract = isAbstract; this.isAbstract = isAbstract;
} }
public Table(Namespace namespace, String subselect, boolean isAbstract) { public Table(String contributor, Namespace namespace, String subselect, boolean isAbstract) {
this.contributor = contributor;
this.catalog = namespace.getPhysicalName().getCatalog(); this.catalog = namespace.getPhysicalName().getCatalog();
this.schema = namespace.getPhysicalName().getSchema(); this.schema = namespace.getPhysicalName().getSchema();
this.subselect = subselect; this.subselect = subselect;
this.isAbstract = isAbstract; this.isAbstract = isAbstract;
} }
@Override
public String getContributor() {
return contributor;
}
/** /**
* @deprecated Should use {@link QualifiedObjectNameFormatter#format} on QualifiedObjectNameFormatter * @deprecated Should use {@link QualifiedObjectNameFormatter#format} on QualifiedObjectNameFormatter
* obtained from {@link org.hibernate.engine.jdbc.env.spi.JdbcEnvironment} * obtained from {@link org.hibernate.engine.jdbc.env.spi.JdbcEnvironment}

View File

@ -52,6 +52,11 @@ public interface EntityMappingType extends ManagedMappingType, EntityValuedModel
*/ */
EntityPersister getEntityPersister(); EntityPersister getEntityPersister();
default String getContributor() {
// todo (6.0) : needed for the HHH-14470 half related to HHH-14469
return "orm";
}
default EntityRepresentationStrategy getRepresentationStrategy() { default EntityRepresentationStrategy getRepresentationStrategy() {
return getEntityPersister().getRepresentationStrategy(); return getEntityPersister().getRepresentationStrategy();
} }

View File

@ -12,13 +12,14 @@ import java.util.function.Function;
import org.hibernate.boot.model.relational.Exportable; import org.hibernate.boot.model.relational.Exportable;
import org.hibernate.dialect.Dialect; import org.hibernate.dialect.Dialect;
import org.hibernate.mapping.Contributable;
import org.hibernate.metamodel.mapping.EntityMappingType; import org.hibernate.metamodel.mapping.EntityMappingType;
import org.hibernate.persister.entity.Joinable; import org.hibernate.persister.entity.Joinable;
/** /**
* @author Steve Ebersole * @author Steve Ebersole
*/ */
public class IdTable implements Exportable { public class IdTable implements Exportable, Contributable {
private final EntityMappingType entityDescriptor; private final EntityMappingType entityDescriptor;
private final String qualifiedTableName; private final String qualifiedTableName;
@ -77,9 +78,10 @@ public class IdTable implements Exportable {
} }
} }
@Override
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ public String getContributor() {
// Exportable return entityDescriptor.getContributor();
}
@Override @Override
public String getExportIdentifier() { public String getExportIdentifier() {

View File

@ -39,6 +39,7 @@ import org.hibernate.tool.schema.internal.ExceptionHandlerCollectingImpl;
import org.hibernate.tool.schema.internal.ExceptionHandlerHaltImpl; import org.hibernate.tool.schema.internal.ExceptionHandlerHaltImpl;
import org.hibernate.tool.schema.internal.Helper; import org.hibernate.tool.schema.internal.Helper;
import org.hibernate.tool.schema.internal.SchemaCreatorImpl; import org.hibernate.tool.schema.internal.SchemaCreatorImpl;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExceptionHandler; import org.hibernate.tool.schema.spi.ExceptionHandler;
import org.hibernate.tool.schema.spi.ExecutionOptions; import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaManagementException; import org.hibernate.tool.schema.spi.SchemaManagementException;
@ -60,7 +61,7 @@ import org.hibernate.tool.schema.spi.TargetDescriptor;
public class SchemaExport { public class SchemaExport {
private static final CoreMessageLogger LOG = CoreLogging.messageLogger( SchemaExport.class ); private static final CoreMessageLogger LOG = CoreLogging.messageLogger( SchemaExport.class );
public static enum Type { public enum Type {
CREATE( Action.CREATE ), CREATE( Action.CREATE ),
DROP( Action.DROP ), DROP( Action.DROP ),
NONE( Action.NONE ), NONE( Action.NONE ),
@ -81,7 +82,7 @@ public class SchemaExport {
} }
} }
public static enum Action { public enum Action {
/** /**
* None - duh :P * None - duh :P
*/ */
@ -228,7 +229,6 @@ public class SchemaExport {
execute( targetTypes, action, metadata, ( (MetadataImplementor) metadata ).getMetadataBuildingOptions().getServiceRegistry() ); execute( targetTypes, action, metadata, ( (MetadataImplementor) metadata ).getMetadataBuildingOptions().getServiceRegistry() );
} }
@SuppressWarnings("unchecked")
public void execute(EnumSet<TargetType> targetTypes, Action action, Metadata metadata, ServiceRegistry serviceRegistry) { public void execute(EnumSet<TargetType> targetTypes, Action action, Metadata metadata, ServiceRegistry serviceRegistry) {
if ( action == Action.NONE ) { if ( action == Action.NONE ) {
LOG.debug( "Skipping SchemaExport as Action.NONE was passed" ); LOG.debug( "Skipping SchemaExport as Action.NONE was passed" );
@ -255,7 +255,7 @@ public class SchemaExport {
Metadata metadata, Metadata metadata,
ServiceRegistry serviceRegistry, ServiceRegistry serviceRegistry,
TargetDescriptor targetDescriptor) { TargetDescriptor targetDescriptor) {
Map config = new HashMap( serviceRegistry.getService( ConfigurationService.class ).getSettings() ); Map<String,Object> config = new HashMap<>( serviceRegistry.getService( ConfigurationService.class ).getSettings() );
config.put( AvailableSettings.HBM2DDL_DELIMITER, delimiter ); config.put( AvailableSettings.HBM2DDL_DELIMITER, delimiter );
config.put( AvailableSettings.FORMAT_SQL, format ); config.put( AvailableSettings.FORMAT_SQL, format );
config.put( AvailableSettings.HBM2DDL_IMPORT_FILES, importFiles ); config.put( AvailableSettings.HBM2DDL_IMPORT_FILES, importFiles );
@ -287,6 +287,7 @@ public class SchemaExport {
tool.getSchemaDropper( config ).doDrop( tool.getSchemaDropper( config ).doDrop(
metadata, metadata,
executionOptions, executionOptions,
ContributableMatcher.ALL,
sourceDescriptor, sourceDescriptor,
targetDescriptor targetDescriptor
); );
@ -296,6 +297,7 @@ public class SchemaExport {
tool.getSchemaCreator( config ).doCreation( tool.getSchemaCreator( config ).doCreation(
metadata, metadata,
executionOptions, executionOptions,
ContributableMatcher.ALL,
sourceDescriptor, sourceDescriptor,
targetDescriptor targetDescriptor
); );
@ -396,7 +398,7 @@ public class SchemaExport {
private static MetadataImplementor buildMetadata( private static MetadataImplementor buildMetadata(
CommandLineArgs parsedArgs, CommandLineArgs parsedArgs,
StandardServiceRegistry serviceRegistry) throws Exception { StandardServiceRegistry serviceRegistry) {
final MetadataSources metadataSources = new MetadataSources( serviceRegistry ); final MetadataSources metadataSources = new MetadataSources( serviceRegistry );
for ( String filename : parsedArgs.hbmXmlFiles ) { for ( String filename : parsedArgs.hbmXmlFiles ) {

View File

@ -35,6 +35,7 @@ import org.hibernate.service.ServiceRegistry;
import org.hibernate.tool.schema.TargetType; import org.hibernate.tool.schema.TargetType;
import org.hibernate.tool.schema.internal.ExceptionHandlerCollectingImpl; import org.hibernate.tool.schema.internal.ExceptionHandlerCollectingImpl;
import org.hibernate.tool.schema.internal.ExceptionHandlerHaltImpl; import org.hibernate.tool.schema.internal.ExceptionHandlerHaltImpl;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExceptionHandler; import org.hibernate.tool.schema.spi.ExceptionHandler;
import org.hibernate.tool.schema.spi.ExecutionOptions; import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaManagementTool; import org.hibernate.tool.schema.spi.SchemaManagementTool;
@ -90,7 +91,7 @@ public class SchemaUpdate {
final TargetDescriptor targetDescriptor = SchemaExport.buildTargetDescriptor( targetTypes, outputFile, serviceRegistry ); final TargetDescriptor targetDescriptor = SchemaExport.buildTargetDescriptor( targetTypes, outputFile, serviceRegistry );
try { try {
tool.getSchemaMigrator( config ).doMigration( metadata, executionOptions, targetDescriptor ); tool.getSchemaMigrator( config ).doMigration( metadata, executionOptions, ContributableMatcher.ALL, targetDescriptor );
} }
finally { finally {
if ( exceptionHandler instanceof ExceptionHandlerCollectingImpl ) { if ( exceptionHandler instanceof ExceptionHandlerCollectingImpl ) {

View File

@ -31,6 +31,7 @@ import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.log.DeprecationLogger; import org.hibernate.internal.log.DeprecationLogger;
import org.hibernate.service.ServiceRegistry; import org.hibernate.service.ServiceRegistry;
import org.hibernate.tool.schema.internal.ExceptionHandlerHaltImpl; import org.hibernate.tool.schema.internal.ExceptionHandlerHaltImpl;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExecutionOptions; import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaManagementTool; import org.hibernate.tool.schema.spi.SchemaManagementTool;
import org.hibernate.tool.schema.spi.SchemaManagementToolCoordinator; import org.hibernate.tool.schema.spi.SchemaManagementToolCoordinator;
@ -61,7 +62,7 @@ public class SchemaValidator {
ExceptionHandlerHaltImpl.INSTANCE ExceptionHandlerHaltImpl.INSTANCE
); );
tool.getSchemaValidator( config ).doValidation( metadata, executionOptions ); tool.getSchemaValidator( config ).doValidation( metadata, executionOptions, ContributableMatcher.ALL );
} }
public static void main(String[] args) { public static void main(String[] args) {

View File

@ -103,7 +103,7 @@ public enum Action {
return NONE; return NONE;
} }
if ( Action.class.isInstance( value ) ) { if ( value instanceof Action ) {
return (Action) value; return (Action) value;
} }
@ -152,7 +152,7 @@ public enum Action {
return NONE; return NONE;
} }
if ( Action.class.isInstance( value ) ) { if ( value instanceof Action ) {
return hbm2ddlSetting( (Action) value ); return hbm2ddlSetting( (Action) value );
} }

View File

@ -47,6 +47,7 @@ import org.hibernate.tool.schema.extract.spi.TableInformation;
import org.hibernate.tool.schema.internal.exec.GenerationTarget; import org.hibernate.tool.schema.internal.exec.GenerationTarget;
import org.hibernate.tool.schema.internal.exec.JdbcContext; import org.hibernate.tool.schema.internal.exec.JdbcContext;
import org.hibernate.tool.schema.spi.CommandAcceptanceException; import org.hibernate.tool.schema.spi.CommandAcceptanceException;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExecutionOptions; import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.Exporter; import org.hibernate.tool.schema.spi.Exporter;
import org.hibernate.tool.schema.spi.SchemaFilter; import org.hibernate.tool.schema.spi.SchemaFilter;
@ -89,7 +90,11 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
} }
@Override @Override
public void doMigration(Metadata metadata, ExecutionOptions options, TargetDescriptor targetDescriptor) { public void doMigration(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
TargetDescriptor targetDescriptor) {
if ( !targetDescriptor.getTargetTypes().isEmpty() ) { if ( !targetDescriptor.getTargetTypes().isEmpty() ) {
final JdbcContext jdbcContext = tool.resolveJdbcContext( options.getConfigurationValues() ); final JdbcContext jdbcContext = tool.resolveJdbcContext( options.getConfigurationValues() );
final DdlTransactionIsolator ddlTransactionIsolator = tool.getDdlTransactionIsolator( jdbcContext ); final DdlTransactionIsolator ddlTransactionIsolator = tool.getDdlTransactionIsolator( jdbcContext );
@ -112,7 +117,7 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
} }
try { try {
performMigration( metadata, databaseInformation, options, jdbcContext.getDialect(), targets ); performMigration( metadata, databaseInformation, options, contributableInclusionFilter, jdbcContext.getDialect(), targets );
} }
finally { finally {
for ( GenerationTarget target : targets ) { for ( GenerationTarget target : targets ) {
@ -144,6 +149,7 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
Metadata metadata, Metadata metadata,
DatabaseInformation existingDatabase, DatabaseInformation existingDatabase,
ExecutionOptions options, ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
Dialect dialect, Dialect dialect,
Formatter formatter, Formatter formatter,
Set<String> exportIdentifiers, Set<String> exportIdentifiers,
@ -156,6 +162,7 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
Metadata metadata, Metadata metadata,
DatabaseInformation existingDatabase, DatabaseInformation existingDatabase,
ExecutionOptions options, ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
Dialect dialect, Dialect dialect,
GenerationTarget... targets) { GenerationTarget... targets) {
final boolean format = Helper.interpretFormattingEnabled( options.getConfigurationValues() ); final boolean format = Helper.interpretFormattingEnabled( options.getConfigurationValues() );
@ -209,6 +216,7 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
metadata, metadata,
existingDatabase, existingDatabase,
options, options,
contributableInclusionFilter,
dialect, dialect,
formatter, formatter,
exportIdentifiers, exportIdentifiers,
@ -219,8 +227,11 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
targets targets
); );
tablesInformation.put( namespace, nameSpaceTablesInformation ); tablesInformation.put( namespace, nameSpaceTablesInformation );
if ( schemaFilter.includeNamespace( namespace ) ) { if ( options.getSchemaFilter().includeNamespace( namespace ) ) {
for ( Sequence sequence : namespace.getSequences() ) { for ( Sequence sequence : namespace.getSequences() ) {
if ( ! contributableInclusionFilter.matches( sequence ) ) {
continue;
}
checkExportIdentifier( sequence, exportIdentifiers ); checkExportIdentifier( sequence, exportIdentifiers );
final SequenceInformation sequenceInformation = existingDatabase.getSequenceInformation( sequence.getName() ); final SequenceInformation sequenceInformation = existingDatabase.getSequenceInformation( sequence.getName() );
if ( sequenceInformation == null ) { if ( sequenceInformation == null ) {
@ -241,14 +252,19 @@ public abstract class AbstractSchemaMigrator implements SchemaMigrator {
//NOTE : Foreign keys must be created *after* all tables of all namespaces for cross namespace fks. see HHH-10420 //NOTE : Foreign keys must be created *after* all tables of all namespaces for cross namespace fks. see HHH-10420
for ( Namespace namespace : database.getNamespaces() ) { for ( Namespace namespace : database.getNamespaces() ) {
if ( schemaFilter.includeNamespace( namespace ) ) { if ( options.getSchemaFilter().includeNamespace( namespace ) ) {
final NameSpaceTablesInformation nameSpaceTablesInformation = tablesInformation.get( namespace ); final NameSpaceTablesInformation nameSpaceTablesInformation = tablesInformation.get( namespace );
for ( Table table : namespace.getTables() ) { for ( Table table : namespace.getTables() ) {
if ( schemaFilter.includeTable( table ) ) { if ( ! options.getSchemaFilter().includeTable( table ) ) {
final TableInformation tableInformation = nameSpaceTablesInformation.getTableInformation( table ); continue;
if ( tableInformation == null || tableInformation.isPhysicalTable() ) { }
applyForeignKeys( table, tableInformation, dialect, metadata, formatter, options, targets ); if ( ! contributableInclusionFilter.matches( table ) ) {
} continue;
}
final TableInformation tableInformation = nameSpaceTablesInformation.getTableInformation( table );
if ( tableInformation == null || tableInformation.isPhysicalTable() ) {
applyForeignKeys( table, tableInformation, dialect, metadata, formatter, options, targets );
} }
} }
} }

View File

@ -23,6 +23,7 @@ import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
import org.hibernate.tool.schema.extract.spi.SequenceInformation; import org.hibernate.tool.schema.extract.spi.SequenceInformation;
import org.hibernate.tool.schema.extract.spi.TableInformation; import org.hibernate.tool.schema.extract.spi.TableInformation;
import org.hibernate.tool.schema.internal.exec.JdbcContext; import org.hibernate.tool.schema.internal.exec.JdbcContext;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExecutionOptions; import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaFilter; import org.hibernate.tool.schema.spi.SchemaFilter;
import org.hibernate.tool.schema.spi.SchemaManagementException; import org.hibernate.tool.schema.spi.SchemaManagementException;
@ -53,7 +54,10 @@ public abstract class AbstractSchemaValidator implements SchemaValidator {
} }
@Override @Override
public void doValidation(Metadata metadata, ExecutionOptions options) { public void doValidation(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter) {
final JdbcContext jdbcContext = tool.resolveJdbcContext( options.getConfigurationValues() ); final JdbcContext jdbcContext = tool.resolveJdbcContext( options.getConfigurationValues() );
final DdlTransactionIsolator isolator = tool.getDdlTransactionIsolator( jdbcContext ); final DdlTransactionIsolator isolator = tool.getDdlTransactionIsolator( jdbcContext );
@ -65,7 +69,7 @@ public abstract class AbstractSchemaValidator implements SchemaValidator {
); );
try { try {
performValidation( metadata, databaseInformation, options, jdbcContext.getDialect() ); performValidation( metadata, databaseInformation, options, contributableInclusionFilter, jdbcContext.getDialect() );
} }
finally { finally {
try { try {
@ -83,22 +87,27 @@ public abstract class AbstractSchemaValidator implements SchemaValidator {
Metadata metadata, Metadata metadata,
DatabaseInformation databaseInformation, DatabaseInformation databaseInformation,
ExecutionOptions options, ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
Dialect dialect) { Dialect dialect) {
for ( Namespace namespace : metadata.getDatabase().getNamespaces() ) { for ( Namespace namespace : metadata.getDatabase().getNamespaces() ) {
if ( schemaFilter.includeNamespace( namespace ) ) { if ( options.getSchemaFilter().includeNamespace( namespace ) ) {
validateTables( metadata, databaseInformation, options, dialect, namespace ); validateTables( metadata, databaseInformation, options, contributableInclusionFilter, dialect, namespace );
} }
} }
for ( Namespace namespace : metadata.getDatabase().getNamespaces() ) { for ( Namespace namespace : metadata.getDatabase().getNamespaces() ) {
if ( schemaFilter.includeNamespace( namespace ) ) { if ( options.getSchemaFilter().includeNamespace( namespace ) ) {
for ( Sequence sequence : namespace.getSequences() ) { for ( Sequence sequence : namespace.getSequences() ) {
if ( schemaFilter.includeSequence( sequence ) ) { if ( ! options.getSchemaFilter().includeSequence( sequence ) ) {
final SequenceInformation sequenceInformation = databaseInformation.getSequenceInformation( continue;
sequence.getName()
);
validateSequence( sequence, sequenceInformation );
} }
if ( ! contributableInclusionFilter.matches( sequence ) ) {
continue;
}
final SequenceInformation sequenceInformation = databaseInformation.getSequenceInformation( sequence.getName() );
validateSequence( sequence, sequenceInformation );
} }
} }
} }
@ -108,6 +117,7 @@ public abstract class AbstractSchemaValidator implements SchemaValidator {
Metadata metadata, Metadata metadata,
DatabaseInformation databaseInformation, DatabaseInformation databaseInformation,
ExecutionOptions options, ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
Dialect dialect, Namespace namespace); Dialect dialect, Namespace namespace);
protected void validateTable( protected void validateTable(

View File

@ -18,6 +18,7 @@ import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
import org.hibernate.tool.schema.extract.spi.NameSpaceTablesInformation; import org.hibernate.tool.schema.extract.spi.NameSpaceTablesInformation;
import org.hibernate.tool.schema.extract.spi.TableInformation; import org.hibernate.tool.schema.extract.spi.TableInformation;
import org.hibernate.tool.schema.internal.exec.GenerationTarget; import org.hibernate.tool.schema.internal.exec.GenerationTarget;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExecutionOptions; import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaFilter; import org.hibernate.tool.schema.spi.SchemaFilter;
@ -40,6 +41,7 @@ public class GroupedSchemaMigratorImpl extends AbstractSchemaMigrator {
Metadata metadata, Metadata metadata,
DatabaseInformation existingDatabase, DatabaseInformation existingDatabase,
ExecutionOptions options, ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
Dialect dialect, Dialect dialect,
Formatter formatter, Formatter formatter,
Set<String> exportIdentifiers, Set<String> exportIdentifiers,
@ -50,7 +52,7 @@ public class GroupedSchemaMigratorImpl extends AbstractSchemaMigrator {
final NameSpaceTablesInformation tablesInformation = final NameSpaceTablesInformation tablesInformation =
new NameSpaceTablesInformation( metadata.getDatabase().getJdbcEnvironment().getIdentifierHelper() ); new NameSpaceTablesInformation( metadata.getDatabase().getJdbcEnvironment().getIdentifierHelper() );
if ( schemaFilter.includeNamespace( namespace ) ) { if ( options.getSchemaFilter().includeNamespace( namespace ) ) {
createSchemaAndCatalog( createSchemaAndCatalog(
existingDatabase, existingDatabase,
options, options,
@ -62,9 +64,12 @@ public class GroupedSchemaMigratorImpl extends AbstractSchemaMigrator {
namespace, namespace,
targets targets
); );
final NameSpaceTablesInformation tables = existingDatabase.getTablesInformation( namespace ); final NameSpaceTablesInformation tables = existingDatabase.getTablesInformation( namespace );
for ( Table table : namespace.getTables() ) { for ( Table table : namespace.getTables() ) {
if ( schemaFilter.includeTable( table ) && table.isPhysicalTable() ) { if ( options.getSchemaFilter().includeTable( table )
&& table.isPhysicalTable()
&& contributableInclusionFilter.matches( table ) ) {
checkExportIdentifier( table, exportIdentifiers ); checkExportIdentifier( table, exportIdentifiers );
final TableInformation tableInformation = tables.getTableInformation( table ); final TableInformation tableInformation = tables.getTableInformation( table );
if ( tableInformation == null ) { if ( tableInformation == null ) {
@ -78,7 +83,9 @@ public class GroupedSchemaMigratorImpl extends AbstractSchemaMigrator {
} }
for ( Table table : namespace.getTables() ) { for ( Table table : namespace.getTables() ) {
if ( schemaFilter.includeTable( table ) && table.isPhysicalTable() ) { if ( options.getSchemaFilter().includeTable( table )
&& table.isPhysicalTable()
&& contributableInclusionFilter.matches( table ) ) {
final TableInformation tableInformation = tablesInformation.getTableInformation( table ); final TableInformation tableInformation = tablesInformation.getTableInformation( table );
if ( tableInformation == null || tableInformation.isPhysicalTable() ) { if ( tableInformation == null || tableInformation.isPhysicalTable() ) {
applyIndexes( table, tableInformation, dialect, metadata, formatter, options, targets ); applyIndexes( table, tableInformation, dialect, metadata, formatter, options, targets );

View File

@ -12,6 +12,7 @@ import org.hibernate.dialect.Dialect;
import org.hibernate.mapping.Table; import org.hibernate.mapping.Table;
import org.hibernate.tool.schema.extract.spi.DatabaseInformation; import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
import org.hibernate.tool.schema.extract.spi.NameSpaceTablesInformation; import org.hibernate.tool.schema.extract.spi.NameSpaceTablesInformation;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExecutionOptions; import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaFilter; import org.hibernate.tool.schema.spi.SchemaFilter;
@ -34,11 +35,14 @@ public class GroupedSchemaValidatorImpl extends AbstractSchemaValidator {
Metadata metadata, Metadata metadata,
DatabaseInformation databaseInformation, DatabaseInformation databaseInformation,
ExecutionOptions options, ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
Dialect dialect, Namespace namespace) { Dialect dialect, Namespace namespace) {
final NameSpaceTablesInformation tables = databaseInformation.getTablesInformation( namespace ); final NameSpaceTablesInformation tables = databaseInformation.getTablesInformation( namespace );
for ( Table table : namespace.getTables() ) { for ( Table table : namespace.getTables() ) {
if ( schemaFilter.includeTable( table ) && table.isPhysicalTable() ) { if ( options.getSchemaFilter().includeTable( table )
&& table.isPhysicalTable()
&& contributableInclusionFilter.matches( table ) ) {
validateTable( validateTable(
table, table,
tables.getTableInformation( table ), tables.getTableInformation( table ),

View File

@ -18,6 +18,7 @@ import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
import org.hibernate.tool.schema.extract.spi.NameSpaceTablesInformation; import org.hibernate.tool.schema.extract.spi.NameSpaceTablesInformation;
import org.hibernate.tool.schema.extract.spi.TableInformation; import org.hibernate.tool.schema.extract.spi.TableInformation;
import org.hibernate.tool.schema.internal.exec.GenerationTarget; import org.hibernate.tool.schema.internal.exec.GenerationTarget;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExecutionOptions; import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaFilter; import org.hibernate.tool.schema.spi.SchemaFilter;
@ -40,6 +41,7 @@ public class IndividuallySchemaMigratorImpl extends AbstractSchemaMigrator {
Metadata metadata, Metadata metadata,
DatabaseInformation existingDatabase, DatabaseInformation existingDatabase,
ExecutionOptions options, ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
Dialect dialect, Dialect dialect,
Formatter formatter, Formatter formatter,
Set<String> exportIdentifiers, Set<String> exportIdentifiers,
@ -51,7 +53,7 @@ public class IndividuallySchemaMigratorImpl extends AbstractSchemaMigrator {
final NameSpaceTablesInformation tablesInformation = final NameSpaceTablesInformation tablesInformation =
new NameSpaceTablesInformation( metadata.getDatabase().getJdbcEnvironment().getIdentifierHelper() ); new NameSpaceTablesInformation( metadata.getDatabase().getJdbcEnvironment().getIdentifierHelper() );
if ( schemaFilter.includeNamespace( namespace ) ) { if ( options.getSchemaFilter().includeNamespace( namespace ) ) {
createSchemaAndCatalog( createSchemaAndCatalog(
existingDatabase, existingDatabase,
options, options,
@ -64,7 +66,9 @@ public class IndividuallySchemaMigratorImpl extends AbstractSchemaMigrator {
targets targets
); );
for ( Table table : namespace.getTables() ) { for ( Table table : namespace.getTables() ) {
if ( schemaFilter.includeTable( table ) && table.isPhysicalTable() ) { if ( options.getSchemaFilter().includeTable( table )
&& table.isPhysicalTable()
&& contributableInclusionFilter.matches( table ) ) {
checkExportIdentifier( table, exportIdentifiers ); checkExportIdentifier( table, exportIdentifiers );
final TableInformation tableInformation = existingDatabase.getTableInformation( table.getQualifiedTableName() ); final TableInformation tableInformation = existingDatabase.getTableInformation( table.getQualifiedTableName() );
if ( tableInformation == null ) { if ( tableInformation == null ) {
@ -78,7 +82,9 @@ public class IndividuallySchemaMigratorImpl extends AbstractSchemaMigrator {
} }
for ( Table table : namespace.getTables() ) { for ( Table table : namespace.getTables() ) {
if ( schemaFilter.includeTable( table ) && table.isPhysicalTable() ) { if ( options.getSchemaFilter().includeTable( table )
&& table.isPhysicalTable()
&& contributableInclusionFilter.matches( table ) ) {
final TableInformation tableInformation = tablesInformation.getTableInformation( table ); final TableInformation tableInformation = tablesInformation.getTableInformation( table );
if ( tableInformation == null || tableInformation.isPhysicalTable() ) { if ( tableInformation == null || tableInformation.isPhysicalTable() ) {
applyIndexes( table, tableInformation, dialect, metadata, formatter, options, targets ); applyIndexes( table, tableInformation, dialect, metadata, formatter, options, targets );

View File

@ -12,6 +12,7 @@ import org.hibernate.dialect.Dialect;
import org.hibernate.mapping.Table; import org.hibernate.mapping.Table;
import org.hibernate.tool.schema.extract.spi.DatabaseInformation; import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
import org.hibernate.tool.schema.extract.spi.TableInformation; import org.hibernate.tool.schema.extract.spi.TableInformation;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExecutionOptions; import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaFilter; import org.hibernate.tool.schema.spi.SchemaFilter;
@ -34,10 +35,13 @@ public class IndividuallySchemaValidatorImpl extends AbstractSchemaValidator {
Metadata metadata, Metadata metadata,
DatabaseInformation databaseInformation, DatabaseInformation databaseInformation,
ExecutionOptions options, ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
Dialect dialect, Dialect dialect,
Namespace namespace) { Namespace namespace) {
for ( Table table : namespace.getTables() ) { for ( Table table : namespace.getTables() ) {
if ( schemaFilter.includeTable( table ) && table.isPhysicalTable() ) { if ( options.getSchemaFilter().includeTable( table )
&& table.isPhysicalTable()
&& contributableInclusionFilter.matches( table ) ) {
final TableInformation tableInformation = databaseInformation.getTableInformation( final TableInformation tableInformation = databaseInformation.getTableInformation(
table.getQualifiedTableName() table.getQualifiedTableName()
); );

View File

@ -15,6 +15,7 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import org.hibernate.Internal;
import org.hibernate.boot.Metadata; import org.hibernate.boot.Metadata;
import org.hibernate.boot.model.naming.Identifier; import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.boot.model.relational.AuxiliaryDatabaseObject; import org.hibernate.boot.model.relational.AuxiliaryDatabaseObject;
@ -48,6 +49,7 @@ import org.hibernate.tool.schema.internal.exec.JdbcContext;
import org.hibernate.tool.schema.internal.exec.ScriptSourceInputFromUrl; import org.hibernate.tool.schema.internal.exec.ScriptSourceInputFromUrl;
import org.hibernate.tool.schema.internal.exec.ScriptSourceInputNonExistentImpl; import org.hibernate.tool.schema.internal.exec.ScriptSourceInputNonExistentImpl;
import org.hibernate.tool.schema.spi.CommandAcceptanceException; import org.hibernate.tool.schema.spi.CommandAcceptanceException;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExceptionHandler; import org.hibernate.tool.schema.spi.ExceptionHandler;
import org.hibernate.tool.schema.spi.ExecutionOptions; import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaCreator; import org.hibernate.tool.schema.spi.SchemaCreator;
@ -105,6 +107,7 @@ public class SchemaCreatorImpl implements SchemaCreator {
public void doCreation( public void doCreation(
Metadata metadata, Metadata metadata,
ExecutionOptions options, ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
SourceDescriptor sourceDescriptor, SourceDescriptor sourceDescriptor,
TargetDescriptor targetDescriptor) { TargetDescriptor targetDescriptor) {
if ( targetDescriptor.getTargetTypes().isEmpty() ) { if ( targetDescriptor.getTargetTypes().isEmpty() ) {
@ -119,13 +122,15 @@ public class SchemaCreatorImpl implements SchemaCreator {
true true
); );
doCreation( metadata, jdbcContext.getDialect(), options, sourceDescriptor, targets ); doCreation( metadata, jdbcContext.getDialect(), options, contributableInclusionFilter, sourceDescriptor, targets );
} }
@Internal
public void doCreation( public void doCreation(
Metadata metadata, Metadata metadata,
Dialect dialect, Dialect dialect,
ExecutionOptions options, ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
SourceDescriptor sourceDescriptor, SourceDescriptor sourceDescriptor,
GenerationTarget... targets) { GenerationTarget... targets) {
for ( GenerationTarget target : targets ) { for ( GenerationTarget target : targets ) {
@ -133,7 +138,7 @@ public class SchemaCreatorImpl implements SchemaCreator {
} }
try { try {
performCreation( metadata, dialect, options, sourceDescriptor, targets ); performCreation( metadata, dialect, options, contributableInclusionFilter, sourceDescriptor, targets );
} }
finally { finally {
for ( GenerationTarget target : targets ) { for ( GenerationTarget target : targets ) {
@ -151,6 +156,7 @@ public class SchemaCreatorImpl implements SchemaCreator {
Metadata metadata, Metadata metadata,
Dialect dialect, Dialect dialect,
ExecutionOptions options, ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
SourceDescriptor sourceDescriptor, SourceDescriptor sourceDescriptor,
GenerationTarget... targets) { GenerationTarget... targets) {
final SqlScriptCommandExtractor commandExtractor = tool.getServiceRegistry().getService( SqlScriptCommandExtractor.class ); final SqlScriptCommandExtractor commandExtractor = tool.getServiceRegistry().getService( SqlScriptCommandExtractor.class );
@ -164,17 +170,17 @@ public class SchemaCreatorImpl implements SchemaCreator {
break; break;
} }
case METADATA: { case METADATA: {
createFromMetadata( metadata, options, dialect, formatter, targets ); createFromMetadata( metadata, options, contributableInclusionFilter, dialect, formatter, targets );
break; break;
} }
case METADATA_THEN_SCRIPT: { case METADATA_THEN_SCRIPT: {
createFromMetadata( metadata, options, dialect, formatter, targets ); createFromMetadata( metadata, options, contributableInclusionFilter, dialect, formatter, targets );
createFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets ); createFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets );
break; break;
} }
case SCRIPT_THEN_METADATA: { case SCRIPT_THEN_METADATA: {
createFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets ); createFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets );
createFromMetadata( metadata, options, dialect, formatter, targets ); createFromMetadata( metadata, options, contributableInclusionFilter, dialect, formatter, targets );
} }
} }
@ -197,12 +203,31 @@ public class SchemaCreatorImpl implements SchemaCreator {
} }
} }
@Internal
public void createFromMetadata( public void createFromMetadata(
Metadata metadata, Metadata metadata,
ExecutionOptions options, ExecutionOptions options,
Dialect dialect, Dialect dialect,
Formatter formatter, Formatter formatter,
GenerationTarget... targets) { GenerationTarget... targets) {
createFromMetadata(
metadata,
options,
(contributed) -> true,
dialect,
formatter,
targets
);
}
@Internal
public void createFromMetadata(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionMatcher,
Dialect dialect,
Formatter formatter,
GenerationTarget... targets) {
boolean tryToCreateCatalogs = false; boolean tryToCreateCatalogs = false;
boolean tryToCreateSchemas = false; boolean tryToCreateSchemas = false;
if ( options.shouldManageNamespaces() ) { if ( options.shouldManageNamespaces() ) {
@ -223,7 +248,7 @@ public class SchemaCreatorImpl implements SchemaCreator {
Set<Identifier> exportedCatalogs = new HashSet<>(); Set<Identifier> exportedCatalogs = new HashSet<>();
for ( Namespace namespace : database.getNamespaces() ) { for ( Namespace namespace : database.getNamespaces() ) {
if ( !schemaFilter.includeNamespace( namespace ) ) { if ( ! options.getSchemaFilter().includeNamespace( namespace ) ) {
continue; continue;
} }
@ -276,16 +301,22 @@ public class SchemaCreatorImpl implements SchemaCreator {
// then, create all schema objects (tables, sequences, constraints, etc) in each schema // then, create all schema objects (tables, sequences, constraints, etc) in each schema
for ( Namespace namespace : database.getNamespaces() ) { for ( Namespace namespace : database.getNamespaces() ) {
if ( !schemaFilter.includeNamespace( namespace ) ) { if ( ! options.getSchemaFilter().includeNamespace( namespace ) ) {
continue; continue;
} }
// sequences // sequences
for ( Sequence sequence : namespace.getSequences() ) { for ( Sequence sequence : namespace.getSequences() ) {
if ( !schemaFilter.includeSequence( sequence ) ) { if ( ! options.getSchemaFilter().includeSequence( sequence ) ) {
continue; continue;
} }
if ( ! contributableInclusionMatcher.matches( sequence ) ) {
continue;
}
checkExportIdentifier( sequence, exportIdentifiers ); checkExportIdentifier( sequence, exportIdentifiers );
applySqlStrings( applySqlStrings(
dialect.getSequenceExporter().getSqlCreateStrings( dialect.getSequenceExporter().getSqlCreateStrings(
sequence, sequence,
@ -307,10 +338,17 @@ public class SchemaCreatorImpl implements SchemaCreator {
if ( !table.isPhysicalTable() ){ if ( !table.isPhysicalTable() ){
continue; continue;
} }
if ( !schemaFilter.includeTable( table ) ) {
if ( ! options.getSchemaFilter().includeTable( table ) ) {
continue; continue;
} }
if ( ! contributableInclusionMatcher.matches( table ) ) {
continue;
}
checkExportIdentifier( table, exportIdentifiers ); checkExportIdentifier( table, exportIdentifiers );
applySqlStrings( applySqlStrings(
dialect.getTableExporter().getSqlCreateStrings( table, metadata ), dialect.getTableExporter().getSqlCreateStrings( table, metadata ),
formatter, formatter,
@ -324,9 +362,14 @@ public class SchemaCreatorImpl implements SchemaCreator {
if ( !table.isPhysicalTable() ){ if ( !table.isPhysicalTable() ){
continue; continue;
} }
if ( !schemaFilter.includeTable( table ) ) { if ( ! options.getSchemaFilter().includeTable( table ) ) {
continue; continue;
} }
if ( ! contributableInclusionMatcher.matches( table ) ) {
continue;
}
// indexes // indexes
final Iterator indexItr = table.getIndexIterator(); final Iterator indexItr = table.getIndexIterator();
while ( indexItr.hasNext() ) { while ( indexItr.hasNext() ) {
@ -359,14 +402,19 @@ public class SchemaCreatorImpl implements SchemaCreator {
for ( Namespace namespace : database.getNamespaces() ) { for ( Namespace namespace : database.getNamespaces() ) {
// NOTE : Foreign keys must be created *after* unique keys for numerous DBs. See HHH-8390 // NOTE : Foreign keys must be created *after* unique keys for numerous DBs. See HHH-8390
if ( !schemaFilter.includeNamespace( namespace ) ) { if ( ! options.getSchemaFilter().includeNamespace( namespace ) ) {
continue; continue;
} }
for ( Table table : namespace.getTables() ) { for ( Table table : namespace.getTables() ) {
if ( !schemaFilter.includeTable( table ) ) { if ( ! options.getSchemaFilter().includeTable( table ) ) {
continue; continue;
} }
if ( ! contributableInclusionMatcher.matches( table ) ) {
continue;
}
// foreign keys // foreign keys
final Iterator fkItr = table.getForeignKeyIterator(); final Iterator fkItr = table.getForeignKeyIterator();
while ( fkItr.hasNext() ) { while ( fkItr.hasNext() ) {
@ -540,6 +588,11 @@ public class SchemaCreatorImpl implements SchemaCreator {
public ExceptionHandler getExceptionHandler() { public ExceptionHandler getExceptionHandler() {
return ExceptionHandlerHaltImpl.INSTANCE; return ExceptionHandlerHaltImpl.INSTANCE;
} }
@Override
public SchemaFilter getSchemaFilter() {
return schemaFilter;
}
}; };
createFromMetadata( metadata, options, dialect, FormatStyle.NONE.getFormatter(), target ); createFromMetadata( metadata, options, dialect, FormatStyle.NONE.getFormatter(), target );
@ -547,7 +600,10 @@ public class SchemaCreatorImpl implements SchemaCreator {
return target.commands; return target.commands;
} }
/**
* Intended for use from tests
*/
@Internal
public void doCreation( public void doCreation(
Metadata metadata, Metadata metadata,
final boolean manageNamespaces, final boolean manageNamespaces,
@ -562,6 +618,10 @@ public class SchemaCreatorImpl implements SchemaCreator {
); );
} }
/**
* Intended for use from tests
*/
@Internal
public void doCreation( public void doCreation(
Metadata metadata, Metadata metadata,
final ServiceRegistry serviceRegistry, final ServiceRegistry serviceRegistry,
@ -586,7 +646,13 @@ public class SchemaCreatorImpl implements SchemaCreator {
public ExceptionHandler getExceptionHandler() { public ExceptionHandler getExceptionHandler() {
return ExceptionHandlerLoggedImpl.INSTANCE; return ExceptionHandlerLoggedImpl.INSTANCE;
} }
@Override
public SchemaFilter getSchemaFilter() {
return schemaFilter;
}
}, },
(contributed) -> true,
new SourceDescriptor() { new SourceDescriptor() {
@Override @Override
public SourceType getSourceType() { public SourceType getSourceType() {

View File

@ -15,6 +15,7 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import org.hibernate.Internal;
import org.hibernate.boot.Metadata; import org.hibernate.boot.Metadata;
import org.hibernate.boot.model.naming.Identifier; import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.boot.model.relational.AuxiliaryDatabaseObject; import org.hibernate.boot.model.relational.AuxiliaryDatabaseObject;
@ -46,6 +47,7 @@ import org.hibernate.tool.schema.internal.exec.GenerationTarget;
import org.hibernate.tool.schema.internal.exec.GenerationTargetToDatabase; import org.hibernate.tool.schema.internal.exec.GenerationTargetToDatabase;
import org.hibernate.tool.schema.internal.exec.JdbcContext; import org.hibernate.tool.schema.internal.exec.JdbcContext;
import org.hibernate.tool.schema.spi.CommandAcceptanceException; import org.hibernate.tool.schema.spi.CommandAcceptanceException;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.DelayedDropAction; import org.hibernate.tool.schema.spi.DelayedDropAction;
import org.hibernate.tool.schema.spi.ExceptionHandler; import org.hibernate.tool.schema.spi.ExceptionHandler;
import org.hibernate.tool.schema.spi.ExecutionOptions; import org.hibernate.tool.schema.spi.ExecutionOptions;
@ -100,6 +102,7 @@ public class SchemaDropperImpl implements SchemaDropper {
public void doDrop( public void doDrop(
Metadata metadata, Metadata metadata,
ExecutionOptions options, ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
SourceDescriptor sourceDescriptor, SourceDescriptor sourceDescriptor,
TargetDescriptor targetDescriptor) { TargetDescriptor targetDescriptor) {
@ -110,21 +113,39 @@ public class SchemaDropperImpl implements SchemaDropper {
final JdbcContext jdbcContext = tool.resolveJdbcContext( options.getConfigurationValues() ); final JdbcContext jdbcContext = tool.resolveJdbcContext( options.getConfigurationValues() );
final GenerationTarget[] targets = tool.buildGenerationTargets( targetDescriptor, jdbcContext, options.getConfigurationValues(), true ); final GenerationTarget[] targets = tool.buildGenerationTargets( targetDescriptor, jdbcContext, options.getConfigurationValues(), true );
doDrop( metadata, options, jdbcContext.getDialect(), sourceDescriptor, targets ); doDrop( metadata, options, contributableInclusionFilter, jdbcContext.getDialect(), sourceDescriptor, targets );
} }
/**
* For use from testing
*/
@Internal
public void doDrop( public void doDrop(
Metadata metadata, Metadata metadata,
ExecutionOptions options, ExecutionOptions options,
Dialect dialect, Dialect dialect,
SourceDescriptor sourceDescriptor, SourceDescriptor sourceDescriptor,
GenerationTarget... targets) { GenerationTarget... targets) {
doDrop( metadata, options, (contributed) -> true, dialect, sourceDescriptor, targets );
}
/**
* For use from testing
*/
@Internal
public void doDrop(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
Dialect dialect,
SourceDescriptor sourceDescriptor,
GenerationTarget... targets) {
for ( GenerationTarget target : targets ) { for ( GenerationTarget target : targets ) {
target.prepare(); target.prepare();
} }
try { try {
performDrop( metadata, options, dialect, sourceDescriptor, targets ); performDrop( metadata, options, contributableInclusionFilter, dialect, sourceDescriptor, targets );
} }
finally { finally {
for ( GenerationTarget target : targets ) { for ( GenerationTarget target : targets ) {
@ -141,6 +162,7 @@ public class SchemaDropperImpl implements SchemaDropper {
private void performDrop( private void performDrop(
Metadata metadata, Metadata metadata,
ExecutionOptions options, ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
Dialect dialect, Dialect dialect,
SourceDescriptor sourceDescriptor, SourceDescriptor sourceDescriptor,
GenerationTarget... targets) { GenerationTarget... targets) {
@ -152,15 +174,15 @@ public class SchemaDropperImpl implements SchemaDropper {
dropFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets ); dropFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets );
} }
else if ( sourceDescriptor.getSourceType() == SourceType.METADATA ) { else if ( sourceDescriptor.getSourceType() == SourceType.METADATA ) {
dropFromMetadata( metadata, options, dialect, formatter, targets ); dropFromMetadata( metadata, options, contributableInclusionFilter, dialect, formatter, targets );
} }
else if ( sourceDescriptor.getSourceType() == SourceType.METADATA_THEN_SCRIPT ) { else if ( sourceDescriptor.getSourceType() == SourceType.METADATA_THEN_SCRIPT ) {
dropFromMetadata( metadata, options, dialect, formatter, targets ); dropFromMetadata( metadata, options, contributableInclusionFilter, dialect, formatter, targets );
dropFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets ); dropFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets );
} }
else { else {
dropFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets ); dropFromScript( sourceDescriptor.getScriptSourceInput(), commandExtractor, formatter, dialect, options, targets );
dropFromMetadata( metadata, options, dialect, formatter, targets ); dropFromMetadata( metadata, options, contributableInclusionFilter, dialect, formatter, targets );
} }
} }
@ -182,6 +204,7 @@ public class SchemaDropperImpl implements SchemaDropper {
private void dropFromMetadata( private void dropFromMetadata(
Metadata metadata, Metadata metadata,
ExecutionOptions options, ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
Dialect dialect, Dialect dialect,
Formatter formatter, Formatter formatter,
GenerationTarget... targets) { GenerationTarget... targets) {
@ -221,30 +244,38 @@ public class SchemaDropperImpl implements SchemaDropper {
for ( Namespace namespace : database.getNamespaces() ) { for ( Namespace namespace : database.getNamespaces() ) {
if ( !schemaFilter.includeNamespace( namespace ) ) { if ( ! options.getSchemaFilter().includeNamespace( namespace ) ) {
continue; continue;
} }
// we need to drop all constraints/indexes prior to dropping the tables // we need to drop all constraints/indexes prior to dropping the tables
applyConstraintDropping( namespace, metadata, formatter, options, targets ); applyConstraintDropping( namespace, metadata, formatter, options, contributableInclusionFilter, targets );
// now it's safe to drop the tables // now it's safe to drop the tables
for ( Table table : namespace.getTables() ) { for ( Table table : namespace.getTables() ) {
if ( !table.isPhysicalTable() ) { if ( ! table.isPhysicalTable() ) {
continue; continue;
} }
if ( !schemaFilter.includeTable( table ) ) { if ( ! options.getSchemaFilter().includeTable( table ) ) {
continue;
}
if ( ! contributableInclusionFilter.matches( table ) ) {
continue; continue;
} }
checkExportIdentifier( table, exportIdentifiers ); checkExportIdentifier( table, exportIdentifiers );
applySqlStrings( dialect.getTableExporter().getSqlDropStrings( table, metadata ), formatter, options,targets ); applySqlStrings( dialect.getTableExporter().getSqlDropStrings( table, metadata ), formatter, options,targets );
} }
for ( Sequence sequence : namespace.getSequences() ) { for ( Sequence sequence : namespace.getSequences() ) {
if ( !schemaFilter.includeSequence( sequence ) ) { if ( ! options.getSchemaFilter().includeSequence( sequence ) ) {
continue;
}
if ( ! contributableInclusionFilter.matches( sequence ) ) {
continue; continue;
} }
checkExportIdentifier( sequence, exportIdentifiers ); checkExportIdentifier( sequence, exportIdentifiers );
applySqlStrings( dialect.getSequenceExporter().getSqlDropStrings( sequence, metadata ), formatter, options, targets ); applySqlStrings( dialect.getSequenceExporter().getSqlDropStrings( sequence, metadata ), formatter, options, targets );
} }
} }
@ -270,7 +301,7 @@ public class SchemaDropperImpl implements SchemaDropper {
for ( Namespace namespace : database.getNamespaces() ) { for ( Namespace namespace : database.getNamespaces() ) {
if ( !schemaFilter.includeNamespace( namespace ) ) { if ( ! options.getSchemaFilter().includeNamespace( namespace ) ) {
continue; continue;
} }
@ -309,6 +340,7 @@ public class SchemaDropperImpl implements SchemaDropper {
Metadata metadata, Metadata metadata,
Formatter formatter, Formatter formatter,
ExecutionOptions options, ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
GenerationTarget... targets) { GenerationTarget... targets) {
final Dialect dialect = metadata.getDatabase().getJdbcEnvironment().getDialect(); final Dialect dialect = metadata.getDatabase().getJdbcEnvironment().getDialect();
@ -320,7 +352,10 @@ public class SchemaDropperImpl implements SchemaDropper {
if ( !table.isPhysicalTable() ) { if ( !table.isPhysicalTable() ) {
continue; continue;
} }
if ( !schemaFilter.includeTable( table ) ) { if ( ! options.getSchemaFilter().includeTable( table ) ) {
continue;
}
if ( ! contributableInclusionFilter.matches( table ) ) {
continue; continue;
} }
@ -408,9 +443,14 @@ public class SchemaDropperImpl implements SchemaDropper {
public ExceptionHandler getExceptionHandler() { public ExceptionHandler getExceptionHandler() {
return ExceptionHandlerHaltImpl.INSTANCE; return ExceptionHandlerHaltImpl.INSTANCE;
} }
@Override
public SchemaFilter getSchemaFilter() {
return schemaFilter;
}
}; };
dropFromMetadata( metadata, options, dialect, FormatStyle.NONE.getFormatter(), target ); dropFromMetadata( metadata, options, (contributed) -> true, dialect, FormatStyle.NONE.getFormatter(), target );
return target.commands; return target.commands;
} }
@ -419,9 +459,13 @@ public class SchemaDropperImpl implements SchemaDropper {
public DelayedDropAction buildDelayedAction( public DelayedDropAction buildDelayedAction(
Metadata metadata, Metadata metadata,
ExecutionOptions options, ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
SourceDescriptor sourceDescriptor) { SourceDescriptor sourceDescriptor) {
final JournalingGenerationTarget target = new JournalingGenerationTarget(); final JournalingGenerationTarget target = new JournalingGenerationTarget();
doDrop( metadata, options, tool.getServiceRegistry().getService( JdbcEnvironment.class ).getDialect(), sourceDescriptor, target );
final Dialect dialect = tool.getServiceRegistry().getService( JdbcEnvironment.class ).getDialect();
doDrop( metadata, options, contributableInclusionFilter, dialect, sourceDescriptor, target );
return new DelayedDropActionImpl( target.commands, tool.getCustomDatabaseGenerationTarget() ); return new DelayedDropActionImpl( target.commands, tool.getCustomDatabaseGenerationTarget() );
} }
@ -475,7 +519,13 @@ public class SchemaDropperImpl implements SchemaDropper {
public ExceptionHandler getExceptionHandler() { public ExceptionHandler getExceptionHandler() {
return ExceptionHandlerLoggedImpl.INSTANCE; return ExceptionHandlerLoggedImpl.INSTANCE;
} }
@Override
public SchemaFilter getSchemaFilter() {
return schemaFilter;
}
}, },
(contributed) -> true,
serviceRegistry.getService( JdbcEnvironment.class ).getDialect(), serviceRegistry.getService( JdbcEnvironment.class ).getDialect(),
new SourceDescriptor() { new SourceDescriptor() {
@Override @Override

View File

@ -0,0 +1,30 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.tool.schema.spi;
import org.hibernate.boot.model.relational.ContributableDatabaseObject;
/**
* Matcher for whether tables and sequences should be included based on its
* {@link ContributableDatabaseObject#getContributor()}
*/
@FunctionalInterface
public interface ContributableMatcher {
/**
* Matches everything
*/
ContributableMatcher ALL = contributed -> true;
/**
* Matches nothing
*/
ContributableMatcher NONE = contributed -> false;
/**
* Does the given `contributed` match this matcher?
*/
boolean matches(ContributableDatabaseObject contributed);
}

View File

@ -9,6 +9,7 @@ package org.hibernate.tool.schema.spi;
import java.util.Map; import java.util.Map;
import org.hibernate.Incubating; import org.hibernate.Incubating;
import org.hibernate.boot.model.relational.Exportable;
/** /**
* Parameter object representing options for schema management tool execution * Parameter object representing options for schema management tool execution
@ -18,6 +19,10 @@ import org.hibernate.Incubating;
@Incubating @Incubating
public interface ExecutionOptions { public interface ExecutionOptions {
Map getConfigurationValues(); Map getConfigurationValues();
boolean shouldManageNamespaces(); boolean shouldManageNamespaces();
ExceptionHandler getExceptionHandler(); ExceptionHandler getExceptionHandler();
SchemaFilter getSchemaFilter();
} }

View File

@ -14,9 +14,7 @@ import org.hibernate.boot.Metadata;
* <p/> * <p/>
* The actual contract here is kind of convoluted with the design * The actual contract here is kind of convoluted with the design
* idea of allowing this to work in ORM (JDBC) as well as in non-JDBC * idea of allowing this to work in ORM (JDBC) as well as in non-JDBC
* environments (OGM, e.g.) simultaneously. ExecutionContext allows * environments (OGM, e.g.) simultaneously.
*
* @author Steve Ebersole
*/ */
@Incubating @Incubating
public interface SchemaCreator { public interface SchemaCreator {
@ -25,8 +23,14 @@ public interface SchemaCreator {
* *
* @param metadata Represents the schema to be created. * @param metadata Represents the schema to be created.
* @param options Options for executing the creation * @param options Options for executing the creation
* @param contributableInclusionFilter Filter for Contributable instances to use
* @param sourceDescriptor description of the source(s) of creation commands * @param sourceDescriptor description of the source(s) of creation commands
* @param targetDescriptor description of the target(s) for the creation commands * @param targetDescriptor description of the target(s) for the creation commands
*/ */
void doCreation(Metadata metadata, ExecutionOptions options, SourceDescriptor sourceDescriptor, TargetDescriptor targetDescriptor); void doCreation(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
SourceDescriptor sourceDescriptor,
TargetDescriptor targetDescriptor);
} }

View File

@ -11,8 +11,6 @@ import org.hibernate.boot.Metadata;
/** /**
* Service delegate for handling schema dropping. * Service delegate for handling schema dropping.
*
* @author Steve Ebersole
*/ */
@Incubating @Incubating
public interface SchemaDropper { public interface SchemaDropper {
@ -21,10 +19,16 @@ public interface SchemaDropper {
* *
* @param metadata Represents the schema to be dropped. * @param metadata Represents the schema to be dropped.
* @param options Options for executing the drop * @param options Options for executing the drop
* @param contributableInclusionFilter Filter for Contributable instances to use
* @param sourceDescriptor description of the source(s) of drop commands * @param sourceDescriptor description of the source(s) of drop commands
* @param targetDescriptor description of the target(s) for the drop commands * @param targetDescriptor description of the target(s) for the drop commands
*/ */
void doDrop(Metadata metadata, ExecutionOptions options, SourceDescriptor sourceDescriptor, TargetDescriptor targetDescriptor); void doDrop(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
SourceDescriptor sourceDescriptor,
TargetDescriptor targetDescriptor);
/** /**
* Build a delayed Runnable for performing schema dropping. This implicitly * Build a delayed Runnable for performing schema dropping. This implicitly
@ -32,9 +36,14 @@ public interface SchemaDropper {
* *
* @param metadata The metadata to drop * @param metadata The metadata to drop
* @param options The drop options * @param options The drop options
* @param contributableInclusionFilter Filter for Contributable instances to use
* @param sourceDescriptor For access to the {@link SourceDescriptor#getScriptSourceInput()} * @param sourceDescriptor For access to the {@link SourceDescriptor#getScriptSourceInput()}
* *
* @return The Runnable * @return The Runnable
*/ */
DelayedDropAction buildDelayedAction(Metadata metadata, ExecutionOptions options, SourceDescriptor sourceDescriptor); DelayedDropAction buildDelayedAction(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
SourceDescriptor sourceDescriptor);
} }

View File

@ -51,4 +51,24 @@ public interface SchemaFilter {
*/ */
boolean includeSequence(Sequence sequence); boolean includeSequence(Sequence sequence);
/**
* Matches everything
*/
SchemaFilter ALL = new SchemaFilter() {
@Override
public boolean includeNamespace( Namespace namespace ) {
return true;
}
@Override
public boolean includeTable( Table table ) {
return true;
}
@Override
public boolean includeSequence( Sequence sequence ) {
return true;
}
};
} }

View File

@ -7,8 +7,13 @@
package org.hibernate.tool.schema.spi; package org.hibernate.tool.schema.spi;
import java.util.EnumSet; import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map; import java.util.Map;
import java.util.Objects;
import java.util.Set;
import org.hibernate.Internal;
import org.hibernate.boot.Metadata; import org.hibernate.boot.Metadata;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService; import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.cfg.AvailableSettings; import org.hibernate.cfg.AvailableSettings;
@ -18,6 +23,7 @@ import org.hibernate.service.ServiceRegistry;
import org.hibernate.tool.schema.Action; import org.hibernate.tool.schema.Action;
import org.hibernate.tool.schema.SourceType; import org.hibernate.tool.schema.SourceType;
import org.hibernate.tool.schema.TargetType; import org.hibernate.tool.schema.TargetType;
import org.hibernate.tool.schema.internal.DefaultSchemaFilter;
import org.hibernate.tool.schema.internal.ExceptionHandlerHaltImpl; import org.hibernate.tool.schema.internal.ExceptionHandlerHaltImpl;
import org.hibernate.tool.schema.internal.ExceptionHandlerLoggedImpl; import org.hibernate.tool.schema.internal.ExceptionHandlerLoggedImpl;
import org.hibernate.tool.schema.internal.Helper; import org.hibernate.tool.schema.internal.Helper;
@ -48,48 +54,124 @@ public class SchemaManagementToolCoordinator {
public static void process( public static void process(
final Metadata metadata, final Metadata metadata,
final ServiceRegistry serviceRegistry, final ServiceRegistry serviceRegistry,
final Map configurationValues, final Map<?,?> configurationValues,
DelayedDropRegistry delayedDropRegistry) { DelayedDropRegistry delayedDropRegistry) {
final ActionGrouping actions = ActionGrouping.interpret( configurationValues ); final Set<ActionGrouping> groupings = ActionGrouping.interpret( metadata, configurationValues );
if ( actions.getDatabaseAction() == Action.NONE && actions.getScriptAction() == Action.NONE ) { if ( groupings.isEmpty() ) {
// no actions specified // no actions specified
log.debug( "No actions specified; doing nothing" ); log.debug( "No actions found; doing nothing" );
return; return;
} }
Map<Action,Set<String>> databaseActionMap = null;
Map<Action,Set<String>> scriptActionMap = null;
for ( ActionGrouping grouping : groupings ) {
// for database action
if ( grouping.databaseAction != Action.NONE ) {
final Set<String> contributors;
if ( databaseActionMap == null ) {
databaseActionMap = new HashMap<>();
contributors = new HashSet<>();
databaseActionMap.put( grouping.databaseAction, contributors );
}
else {
contributors = databaseActionMap.computeIfAbsent(
grouping.databaseAction,
action -> new HashSet<>()
);
}
contributors.add( grouping.contributor );
}
// for script action
if ( grouping.scriptAction != Action.NONE ) {
final Set<String> contributors;
if ( scriptActionMap == null ) {
scriptActionMap = new HashMap<>();
contributors = new HashSet<>();
scriptActionMap.put( grouping.scriptAction, contributors );
}
else {
contributors = scriptActionMap.computeIfAbsent(
grouping.scriptAction,
action -> new HashSet<>()
);
}
contributors.add( grouping.contributor );
}
}
final SchemaManagementTool tool = serviceRegistry.getService( SchemaManagementTool.class ); final SchemaManagementTool tool = serviceRegistry.getService( SchemaManagementTool.class );
final ConfigurationService configService = serviceRegistry.getService( ConfigurationService.class ); final ConfigurationService configService = serviceRegistry.getService( ConfigurationService.class );
boolean haltOnError = configService.getSetting( AvailableSettings.HBM2DDL_HALT_ON_ERROR, StandardConverters.BOOLEAN, false); final boolean haltOnError = configService.getSetting(
AvailableSettings.HBM2DDL_HALT_ON_ERROR,
StandardConverters.BOOLEAN,
false
);
final ExceptionHandler exceptionHandler = haltOnError ? ExceptionHandlerHaltImpl.INSTANCE : ExceptionHandlerLoggedImpl.INSTANCE;
final ExecutionOptions executionOptions = buildExecutionOptions( final ExecutionOptions executionOptions = buildExecutionOptions(
configurationValues, configurationValues,
haltOnError ? ExceptionHandlerHaltImpl.INSTANCE : exceptionHandler
ExceptionHandlerLoggedImpl.INSTANCE
); );
performScriptAction( actions.getScriptAction(), metadata, tool, serviceRegistry, executionOptions ); if ( databaseActionMap != null ) {
performDatabaseAction( actions.getDatabaseAction(), metadata, tool, serviceRegistry, executionOptions ); databaseActionMap.forEach(
(action, contributors) -> {
if ( actions.getDatabaseAction() == Action.CREATE_DROP ) { performDatabaseAction(
//noinspection unchecked action,
delayedDropRegistry.registerOnCloseAction( metadata,
tool.getSchemaDropper( configurationValues ).buildDelayedAction( tool,
metadata, serviceRegistry,
executionOptions, executionOptions,
buildDatabaseTargetDescriptor( (exportable) -> contributors.contains( exportable.getContributor() )
configurationValues, );
DropSettingSelector.INSTANCE,
serviceRegistry if ( action == Action.CREATE_DROP ) {
) delayedDropRegistry.registerOnCloseAction(
) tool.getSchemaDropper( configurationValues ).buildDelayedAction(
metadata,
executionOptions,
(exportable) -> contributors.contains( exportable.getContributor() ),
buildDatabaseTargetDescriptor(
configurationValues,
DropSettingSelector.INSTANCE,
serviceRegistry
)
)
);
}
}
);
}
if ( scriptActionMap != null ) {
scriptActionMap.forEach(
(action, contributors) -> {
performScriptAction( action, metadata, tool, serviceRegistry, executionOptions );
}
); );
} }
} }
public static ExecutionOptions buildExecutionOptions( public static ExecutionOptions buildExecutionOptions(
final Map configurationValues, final Map<?,?> configurationValues,
final ExceptionHandler exceptionHandler) {
return buildExecutionOptions(
configurationValues,
DefaultSchemaFilter.INSTANCE,
exceptionHandler
);
}
public static ExecutionOptions buildExecutionOptions(
final Map<?,?> configurationValues,
final SchemaFilter schemaFilter,
final ExceptionHandler exceptionHandler) { final ExceptionHandler exceptionHandler) {
return new ExecutionOptions() { return new ExecutionOptions() {
@Override @Override
@ -98,7 +180,7 @@ public class SchemaManagementToolCoordinator {
} }
@Override @Override
public Map getConfigurationValues() { public Map<?,?> getConfigurationValues() {
return configurationValues; return configurationValues;
} }
@ -106,16 +188,21 @@ public class SchemaManagementToolCoordinator {
public ExceptionHandler getExceptionHandler() { public ExceptionHandler getExceptionHandler() {
return exceptionHandler; return exceptionHandler;
} }
@Override
public SchemaFilter getSchemaFilter() {
return schemaFilter;
}
}; };
} }
@SuppressWarnings("unchecked")
private static void performDatabaseAction( private static void performDatabaseAction(
final Action action, final Action action,
Metadata metadata, Metadata metadata,
SchemaManagementTool tool, SchemaManagementTool tool,
ServiceRegistry serviceRegistry, ServiceRegistry serviceRegistry,
final ExecutionOptions executionOptions) { final ExecutionOptions executionOptions,
ContributableMatcher contributableInclusionFilter) {
// IMPL NOTE : JPA binds source and target info.. // IMPL NOTE : JPA binds source and target info..
@ -130,6 +217,7 @@ public class SchemaManagementToolCoordinator {
tool.getSchemaCreator( executionOptions.getConfigurationValues() ).doCreation( tool.getSchemaCreator( executionOptions.getConfigurationValues() ).doCreation(
metadata, metadata,
executionOptions, executionOptions,
contributableInclusionFilter,
createDescriptor, createDescriptor,
createDescriptor createDescriptor
); );
@ -145,6 +233,7 @@ public class SchemaManagementToolCoordinator {
tool.getSchemaDropper( executionOptions.getConfigurationValues() ).doDrop( tool.getSchemaDropper( executionOptions.getConfigurationValues() ).doDrop(
metadata, metadata,
executionOptions, executionOptions,
contributableInclusionFilter,
dropDescriptor, dropDescriptor,
dropDescriptor dropDescriptor
); );
@ -156,6 +245,7 @@ public class SchemaManagementToolCoordinator {
tool.getSchemaCreator( executionOptions.getConfigurationValues() ).doCreation( tool.getSchemaCreator( executionOptions.getConfigurationValues() ).doCreation(
metadata, metadata,
executionOptions, executionOptions,
contributableInclusionFilter,
createDescriptor, createDescriptor,
createDescriptor createDescriptor
); );
@ -170,6 +260,7 @@ public class SchemaManagementToolCoordinator {
tool.getSchemaDropper( executionOptions.getConfigurationValues() ).doDrop( tool.getSchemaDropper( executionOptions.getConfigurationValues() ).doDrop(
metadata, metadata,
executionOptions, executionOptions,
contributableInclusionFilter,
dropDescriptor, dropDescriptor,
dropDescriptor dropDescriptor
); );
@ -184,6 +275,7 @@ public class SchemaManagementToolCoordinator {
tool.getSchemaMigrator( executionOptions.getConfigurationValues() ).doMigration( tool.getSchemaMigrator( executionOptions.getConfigurationValues() ).doMigration(
metadata, metadata,
executionOptions, executionOptions,
contributableInclusionFilter,
migrateDescriptor migrateDescriptor
); );
break; break;
@ -191,7 +283,8 @@ public class SchemaManagementToolCoordinator {
case VALIDATE: { case VALIDATE: {
tool.getSchemaValidator( executionOptions.getConfigurationValues() ).doValidation( tool.getSchemaValidator( executionOptions.getConfigurationValues() ).doValidation(
metadata, metadata,
executionOptions executionOptions,
contributableInclusionFilter
); );
break; break;
} }
@ -199,7 +292,7 @@ public class SchemaManagementToolCoordinator {
} }
private static JpaTargetAndSourceDescriptor buildDatabaseTargetDescriptor( private static JpaTargetAndSourceDescriptor buildDatabaseTargetDescriptor(
Map configurationValues, Map<?,?> configurationValues,
SettingSelector settingSelector, SettingSelector settingSelector,
ServiceRegistry serviceRegistry) { ServiceRegistry serviceRegistry) {
final Object scriptSourceSetting = settingSelector.getScriptSourceSetting( configurationValues ); final Object scriptSourceSetting = settingSelector.getScriptSourceSetting( configurationValues );
@ -215,8 +308,8 @@ public class SchemaManagementToolCoordinator {
); );
} }
final ScriptSourceInput scriptSourceInput = includesScripts ? final ScriptSourceInput scriptSourceInput = includesScripts
Helper.interpretScriptSourceSetting( ? Helper.interpretScriptSourceSetting(
scriptSourceSetting, scriptSourceSetting,
serviceRegistry.getService( ClassLoaderService.class ), serviceRegistry.getService( ClassLoaderService.class ),
(String) configurationValues.get( AvailableSettings.HBM2DDL_CHARSET_NAME ) (String) configurationValues.get( AvailableSettings.HBM2DDL_CHARSET_NAME )
@ -246,7 +339,6 @@ public class SchemaManagementToolCoordinator {
}; };
} }
@SuppressWarnings("unchecked")
private static void performScriptAction( private static void performScriptAction(
Action scriptAction, Action scriptAction,
Metadata metadata, Metadata metadata,
@ -263,6 +355,7 @@ public class SchemaManagementToolCoordinator {
tool.getSchemaCreator( executionOptions.getConfigurationValues() ).doCreation( tool.getSchemaCreator( executionOptions.getConfigurationValues() ).doCreation(
metadata, metadata,
executionOptions, executionOptions,
(contributed) -> true,
createDescriptor, createDescriptor,
createDescriptor createDescriptor
); );
@ -278,6 +371,7 @@ public class SchemaManagementToolCoordinator {
tool.getSchemaDropper( executionOptions.getConfigurationValues() ).doDrop( tool.getSchemaDropper( executionOptions.getConfigurationValues() ).doDrop(
metadata, metadata,
executionOptions, executionOptions,
(contributed) -> true,
dropDescriptor, dropDescriptor,
dropDescriptor dropDescriptor
); );
@ -289,6 +383,7 @@ public class SchemaManagementToolCoordinator {
tool.getSchemaCreator( executionOptions.getConfigurationValues() ).doCreation( tool.getSchemaCreator( executionOptions.getConfigurationValues() ).doCreation(
metadata, metadata,
executionOptions, executionOptions,
(contributed) -> true,
createDescriptor, createDescriptor,
createDescriptor createDescriptor
); );
@ -303,6 +398,7 @@ public class SchemaManagementToolCoordinator {
tool.getSchemaDropper( executionOptions.getConfigurationValues() ).doDrop( tool.getSchemaDropper( executionOptions.getConfigurationValues() ).doDrop(
metadata, metadata,
executionOptions, executionOptions,
(contributed) -> true,
dropDescriptor, dropDescriptor,
dropDescriptor dropDescriptor
); );
@ -317,6 +413,7 @@ public class SchemaManagementToolCoordinator {
tool.getSchemaMigrator( executionOptions.getConfigurationValues() ).doMigration( tool.getSchemaMigrator( executionOptions.getConfigurationValues() ).doMigration(
metadata, metadata,
executionOptions, executionOptions,
(contributed) -> true,
migrateDescriptor migrateDescriptor
); );
break; break;
@ -328,7 +425,7 @@ public class SchemaManagementToolCoordinator {
} }
private static JpaTargetAndSourceDescriptor buildScriptTargetDescriptor( private static JpaTargetAndSourceDescriptor buildScriptTargetDescriptor(
Map configurationValues, Map<?,?> configurationValues,
SettingSelector settingSelector, SettingSelector settingSelector,
ServiceRegistry serviceRegistry) { ServiceRegistry serviceRegistry) {
final Object scriptSourceSetting = settingSelector.getScriptSourceSetting( configurationValues ); final Object scriptSourceSetting = settingSelector.getScriptSourceSetting( configurationValues );
@ -381,9 +478,9 @@ public class SchemaManagementToolCoordinator {
private interface SettingSelector { private interface SettingSelector {
Object getSourceTypeSetting(Map configurationValues); Object getSourceTypeSetting(Map<?,?> configurationValues);
Object getScriptSourceSetting(Map configurationValues); Object getScriptSourceSetting(Map<?,?> configurationValues);
Object getScriptTargetSetting(Map configurationValues); Object getScriptTargetSetting(Map<?,?> configurationValues);
} }
private static class CreateSettingSelector implements SettingSelector { private static class CreateSettingSelector implements SettingSelector {
@ -393,17 +490,17 @@ public class SchemaManagementToolCoordinator {
public static final CreateSettingSelector INSTANCE = new CreateSettingSelector(); public static final CreateSettingSelector INSTANCE = new CreateSettingSelector();
@Override @Override
public Object getSourceTypeSetting(Map configurationValues) { public Object getSourceTypeSetting(Map<?,?> configurationValues) {
return configurationValues.get( HBM2DDL_CREATE_SOURCE ); return configurationValues.get( HBM2DDL_CREATE_SOURCE );
} }
@Override @Override
public Object getScriptSourceSetting(Map configurationValues) { public Object getScriptSourceSetting(Map<?,?> configurationValues) {
return configurationValues.get( HBM2DDL_CREATE_SCRIPT_SOURCE ); return configurationValues.get( HBM2DDL_CREATE_SCRIPT_SOURCE );
} }
@Override @Override
public Object getScriptTargetSetting(Map configurationValues) { public Object getScriptTargetSetting(Map<?,?> configurationValues) {
return configurationValues.get( HBM2DDL_SCRIPTS_CREATE_TARGET ); return configurationValues.get( HBM2DDL_SCRIPTS_CREATE_TARGET );
} }
} }
@ -415,17 +512,17 @@ public class SchemaManagementToolCoordinator {
public static final DropSettingSelector INSTANCE = new DropSettingSelector(); public static final DropSettingSelector INSTANCE = new DropSettingSelector();
@Override @Override
public Object getSourceTypeSetting(Map configurationValues) { public Object getSourceTypeSetting(Map<?,?> configurationValues) {
return configurationValues.get( HBM2DDL_DROP_SOURCE ); return configurationValues.get( HBM2DDL_DROP_SOURCE );
} }
@Override @Override
public Object getScriptSourceSetting(Map configurationValues) { public Object getScriptSourceSetting(Map<?,?> configurationValues) {
return configurationValues.get( HBM2DDL_DROP_SCRIPT_SOURCE ); return configurationValues.get( HBM2DDL_DROP_SCRIPT_SOURCE );
} }
@Override @Override
public Object getScriptTargetSetting(Map configurationValues) { public Object getScriptTargetSetting(Map<?,?> configurationValues) {
return configurationValues.get( HBM2DDL_SCRIPTS_DROP_TARGET ); return configurationValues.get( HBM2DDL_SCRIPTS_DROP_TARGET );
} }
} }
@ -440,19 +537,19 @@ public class SchemaManagementToolCoordinator {
// for now we reuse the CREATE settings where applicable // for now we reuse the CREATE settings where applicable
@Override @Override
public Object getSourceTypeSetting(Map configurationValues) { public Object getSourceTypeSetting(Map<?,?> configurationValues) {
// for now, don't allow script source // for now, don't allow script source
return SourceType.METADATA; return SourceType.METADATA;
} }
@Override @Override
public Object getScriptSourceSetting(Map configurationValues) { public Object getScriptSourceSetting(Map<?,?> configurationValues) {
// for now, don't allow script source // for now, don't allow script source
return null; return null;
} }
@Override @Override
public Object getScriptTargetSetting(Map configurationValues) { public Object getScriptTargetSetting(Map<?,?> configurationValues) {
// for now, reuse the CREATE script target setting // for now, reuse the CREATE script target setting
return configurationValues.get( HBM2DDL_SCRIPTS_CREATE_TARGET ); return configurationValues.get( HBM2DDL_SCRIPTS_CREATE_TARGET );
} }
@ -464,14 +561,20 @@ public class SchemaManagementToolCoordinator {
* simultaneously * simultaneously
*/ */
public static class ActionGrouping { public static class ActionGrouping {
private final String contributor;
private final Action databaseAction; private final Action databaseAction;
private final Action scriptAction; private final Action scriptAction;
public ActionGrouping(Action databaseAction, Action scriptAction) { public ActionGrouping(String contributor, Action databaseAction, Action scriptAction) {
this.contributor = contributor;
this.databaseAction = databaseAction; this.databaseAction = databaseAction;
this.scriptAction = scriptAction; this.scriptAction = scriptAction;
} }
public String getContributor() {
return contributor;
}
public Action getDatabaseAction() { public Action getDatabaseAction() {
return databaseAction; return databaseAction;
} }
@ -480,6 +583,10 @@ public class SchemaManagementToolCoordinator {
return scriptAction; return scriptAction;
} }
/**
* For test use
*/
@Internal
public static ActionGrouping interpret(Map configurationValues) { public static ActionGrouping interpret(Map configurationValues) {
// interpret the JPA settings first // interpret the JPA settings first
Action databaseAction = Action.interpretJpaSetting( configurationValues.get( HBM2DDL_DATABASE_ACTION ) ); Action databaseAction = Action.interpretJpaSetting( configurationValues.get( HBM2DDL_DATABASE_ACTION ) );
@ -493,7 +600,69 @@ public class SchemaManagementToolCoordinator {
} }
} }
return new ActionGrouping( databaseAction, scriptAction ); return new ActionGrouping( "orm", databaseAction, scriptAction );
}
public static Set<ActionGrouping> interpret(Metadata metadata, Map<?,?> configurationValues) {
// these represent the base (non-contributor-specific) values
final Action rootDatabaseAction = Action.interpretJpaSetting( configurationValues.get( HBM2DDL_DATABASE_ACTION ) );
final Action rootScriptAction = Action.interpretJpaSetting( configurationValues.get( HBM2DDL_SCRIPTS_ACTION ) );
final Action rootExportAction = Action.interpretHbm2ddlSetting( configurationValues.get( HBM2DDL_AUTO ) );
final Set<String> contributors = metadata.getContributors();
final Set<ActionGrouping> groupings = new HashSet<>( contributors.size() );
// for each contributor, look for specific tooling config values
for ( String contributor : contributors ) {
final Object contributorDatabaseActionSetting = configurationValues.get( HBM2DDL_DATABASE_ACTION + "." + contributor );
final Object contributorScriptActionSetting = configurationValues.get( HBM2DDL_SCRIPTS_ACTION + "." + contributor );
final Object contributorExportActionSetting = configurationValues.get( HBM2DDL_AUTO + "." + contributor );
final Action contributorDatabaseAction = contributorDatabaseActionSetting == null
? rootDatabaseAction
: Action.interpretJpaSetting( contributorDatabaseActionSetting );
final Action contributorScriptAction = contributorScriptActionSetting == null
? rootScriptAction
: Action.interpretJpaSetting( contributorScriptActionSetting );
final Action contributorExportAction = contributorExportActionSetting == null
? rootExportAction
: Action.interpretJpaSetting( contributorExportActionSetting );
Action databaseAction = contributorDatabaseAction;
if ( databaseAction == Action.NONE && contributorScriptAction == Action.NONE ) {
if ( contributorExportAction != Action.NONE ) {
databaseAction = contributorExportAction;
}
if ( databaseAction == Action.NONE ) {
log.debugf( "No schema actions specified for contributor `%s`; doing nothing", contributor );
continue;
}
}
groupings.add( new ActionGrouping( contributor, databaseAction, contributorScriptAction ) );
}
return groupings;
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( o == null || getClass() != o.getClass() ) {
return false;
}
ActionGrouping that = (ActionGrouping) o;
return contributor.equals( that.contributor ) &&
databaseAction == that.databaseAction &&
scriptAction == that.scriptAction;
}
@Override
public int hashCode() {
return Objects.hash( contributor );
} }
} }
} }

View File

@ -21,7 +21,12 @@ public interface SchemaMigrator {
* *
* @param metadata Represents the schema to be altered. * @param metadata Represents the schema to be altered.
* @param options Options for executing the alteration * @param options Options for executing the alteration
* @param contributableInclusionFilter Filter for Contributable instances to use
* @param targetDescriptor description of the target(s) for the alteration commands * @param targetDescriptor description of the target(s) for the alteration commands
*/ */
void doMigration(Metadata metadata, ExecutionOptions options, TargetDescriptor targetDescriptor); void doMigration(
Metadata metadata,
ExecutionOptions options,
ContributableMatcher contributableInclusionFilter,
TargetDescriptor targetDescriptor);
} }

View File

@ -11,8 +11,6 @@ import org.hibernate.boot.Metadata;
/** /**
* Service delegate for handling schema validations * Service delegate for handling schema validations
*
* @author Steve Ebersole
*/ */
@Incubating @Incubating
public interface SchemaValidator { public interface SchemaValidator {
@ -21,6 +19,7 @@ public interface SchemaValidator {
* *
* @param metadata Represents the schema to be validated * @param metadata Represents the schema to be validated
* @param options Options for executing the validation * @param options Options for executing the validation
* @param contributableInclusionFilter Filter for Contributable instances to use
*/ */
void doValidation(Metadata metadata, ExecutionOptions options); void doValidation(Metadata metadata, ExecutionOptions options, ContributableMatcher contributableInclusionFilter);
} }

View File

@ -11,6 +11,7 @@ import java.util.Properties;
import org.hibernate.cfg.AvailableSettings; import org.hibernate.cfg.AvailableSettings;
import org.hibernate.tool.schema.Action; import org.hibernate.tool.schema.Action;
import org.hibernate.tool.schema.spi.SchemaManagementToolCoordinator; import org.hibernate.tool.schema.spi.SchemaManagementToolCoordinator;
import org.hibernate.tool.schema.spi.SchemaManagementToolCoordinator.ActionGrouping;
import org.junit.Test; import org.junit.Test;
@ -26,7 +27,7 @@ public class SchemaToolingAutoActionTests {
final Properties props = new Properties(); final Properties props = new Properties();
props.put( AvailableSettings.HBM2DDL_AUTO, Action.CREATE_DROP ); props.put( AvailableSettings.HBM2DDL_AUTO, Action.CREATE_DROP );
final SchemaManagementToolCoordinator.ActionGrouping actionGrouping = SchemaManagementToolCoordinator.ActionGrouping.interpret( props ); final ActionGrouping actionGrouping = ActionGrouping.interpret( props );
assertThat( actionGrouping.getDatabaseAction(), is( Action.CREATE_DROP ) ); assertThat( actionGrouping.getDatabaseAction(), is( Action.CREATE_DROP ) );

View File

@ -175,6 +175,10 @@ public class GeneratedValueTests extends BaseUnitTestCase {
.getDefaultNamespace() .getDefaultNamespace()
.locateSequence( Identifier.toIdentifier( "my_db_sequence" ) ); .locateSequence( Identifier.toIdentifier( "my_db_sequence" ) );
assertThat( sequence, notNullValue() ); assertThat( sequence, notNullValue() );
assertThat( sequence.getName().getSequenceName().getText(), is( "my_db_sequence" ) );
assertThat( sequence.getInitialValue(), is( 100 ) );
assertThat( sequence.getIncrementSize(), is( 500 ) );
final String[] sqlCreateStrings = new H2Dialect().getSequenceExporter().getSqlCreateStrings( final String[] sqlCreateStrings = new H2Dialect().getSequenceExporter().getSqlCreateStrings(
sequence, sequence,
bootModel bootModel
@ -320,7 +324,7 @@ public class GeneratedValueTests extends BaseUnitTestCase {
@Entity @Entity
public static class ExplicitSequenceGeneratorImplicitNameEntity { public static class ExplicitSequenceGeneratorImplicitNameEntity {
/** /**
* This entity does not have explicit {@link SequenceGenerator} defined * This entity does have explicit {@link SequenceGenerator} defined
*/ */
@Id @Id
@GeneratedValue( strategy = GenerationType.SEQUENCE, generator = "my_db_sequence" ) @GeneratedValue( strategy = GenerationType.SEQUENCE, generator = "my_db_sequence" )

View File

@ -23,6 +23,7 @@ import org.hibernate.service.spi.SessionFactoryServiceRegistry;
import org.hibernate.stat.spi.StatisticsImplementor; import org.hibernate.stat.spi.StatisticsImplementor;
import org.hibernate.testing.TestForIssue; import org.hibernate.testing.TestForIssue;
import org.hibernate.testing.orm.junit.BootstrapServiceRegistry;
import org.hibernate.testing.orm.junit.DomainModel; import org.hibernate.testing.orm.junit.DomainModel;
import org.hibernate.testing.orm.junit.ServiceRegistry; import org.hibernate.testing.orm.junit.ServiceRegistry;
import org.hibernate.testing.orm.junit.SessionFactory; import org.hibernate.testing.orm.junit.SessionFactory;
@ -41,7 +42,7 @@ import static org.junit.jupiter.api.Assertions.fail;
"org/hibernate/orm/test/keymanytoone/bidir/component/EagerMapping.hbm.xml" "org/hibernate/orm/test/keymanytoone/bidir/component/EagerMapping.hbm.xml"
}) })
@SessionFactory(generateStatistics = true) @SessionFactory(generateStatistics = true)
@ServiceRegistry(integrators = EagerKeyManyToOneTest.CustomLoadIntegrator.class) @BootstrapServiceRegistry( integrators = EagerKeyManyToOneTest.CustomLoadIntegrator.class )
public class EagerKeyManyToOneTest { public class EagerKeyManyToOneTest {
public static class CustomLoadIntegrator implements Integrator { public static class CustomLoadIntegrator implements Integrator {

View File

@ -0,0 +1,23 @@
<?xml version="1.0"?>
<!--
~ Hibernate, Relational Persistence for Idiomatic Java
~
~ License: GNU Lesser General Public License (LGPL), version 2.1 or later
~ See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
-->
<!DOCTYPE hibernate-mapping PUBLIC
"-//Hibernate/Hibernate Mapping DTD 3.0//EN"
"http://www.hibernate.org/dtd/hibernate-mapping-3.0.dtd">
<hibernate-mapping>
<class entity-name="DynamicEntity">
<id name="id" type="integer"/>
<natural-id>
<property name="referenceCode" type="string"/>
</natural-id>
<property name="data" type="string"/>
</class>
</hibernate-mapping>

View File

@ -0,0 +1,260 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.orm.test.metamodel.contributed;
import java.io.InputStream;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import org.hibernate.boot.jaxb.Origin;
import org.hibernate.boot.jaxb.SourceType;
import org.hibernate.boot.jaxb.hbm.spi.JaxbHbmHibernateMapping;
import org.hibernate.boot.jaxb.internal.MappingBinder;
import org.hibernate.boot.jaxb.spi.Binding;
import org.hibernate.boot.model.source.internal.hbm.MappingDocument;
import org.hibernate.boot.registry.StandardServiceRegistry;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.boot.spi.AdditionalJaxbMappingProducer;
import org.hibernate.boot.spi.MetadataBuildingContext;
import org.hibernate.boot.spi.MetadataImplementor;
import org.hibernate.dialect.Dialect;
import org.hibernate.dialect.H2Dialect;
import org.hibernate.engine.config.spi.ConfigurationService;
import org.hibernate.tool.schema.internal.SchemaCreatorImpl;
import org.hibernate.tool.schema.internal.SchemaDropperImpl;
import org.hibernate.tool.schema.spi.ExceptionHandler;
import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaFilter;
import org.hibernate.tool.schema.spi.SchemaManagementTool;
import org.hibernate.tool.schema.spi.ScriptSourceInput;
import org.hibernate.tool.schema.spi.SourceDescriptor;
import org.hibernate.testing.hamcrest.CaseInsensitiveContainsMatcher;
import org.hibernate.testing.hamcrest.CaseInsensitiveStartsWithMatcher;
import org.hibernate.testing.hamcrest.CollectionElementMatcher;
import org.hibernate.testing.orm.JournalingGenerationTarget;
import org.hibernate.testing.orm.junit.BootstrapServiceRegistry;
import org.hibernate.testing.orm.junit.BootstrapServiceRegistry.JavaService;
import org.hibernate.testing.orm.junit.DomainModel;
import org.hibernate.testing.orm.junit.DomainModelScope;
import org.junit.jupiter.api.Test;
import org.jboss.jandex.IndexView;
import org.hamcrest.Matchers;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.not;
/**
* @author Steve Ebersole
*/
@BootstrapServiceRegistry(
javaServices = @JavaService( role = AdditionalJaxbMappingProducer.class, impl = BasicContributorTests.Contributor.class )
)
@DomainModel( annotatedClasses = BasicContributorTests.MainEntity.class )
public class BasicContributorTests {
@Test
public void testContributorFiltering(DomainModelScope scope) {
final MetadataImplementor metadata = scope.getDomainModel();
assertThat( metadata.getEntityBindings().size(), Matchers.is( 2 ) );
final StandardServiceRegistry serviceRegistry = metadata
.getMetadataBuildingOptions()
.getServiceRegistry();
final Map settings = serviceRegistry.getService( ConfigurationService.class ).getSettings();
ExecutionOptions options = new ExecutionOptions() {
@Override
public Map getConfigurationValues() {
return settings;
}
@Override
public boolean shouldManageNamespaces() {
return false;
}
@Override
public ExceptionHandler getExceptionHandler() {
return Throwable::printStackTrace;
}
@Override
public SchemaFilter getSchemaFilter() {
return SchemaFilter.ALL;
}
};
final SchemaManagementTool schemaManagementTool = serviceRegistry.getService( SchemaManagementTool.class );
final SourceDescriptor sourceDescriptor = new SourceDescriptor() {
@Override
public org.hibernate.tool.schema.SourceType getSourceType() {
return org.hibernate.tool.schema.SourceType.METADATA;
}
@Override
public ScriptSourceInput getScriptSourceInput() {
return null;
}
};
testDropping( metadata, settings, schemaManagementTool, sourceDescriptor, options );
testCreating( metadata, settings, schemaManagementTool, sourceDescriptor, options );
}
private void testCreating(
MetadataImplementor metadata,
Map settings,
SchemaManagementTool schemaManagementTool,
SourceDescriptor sourceDescriptor,
ExecutionOptions options) {
final SchemaCreatorImpl schemaCreator = (SchemaCreatorImpl) schemaManagementTool.getSchemaCreator( settings );
final Dialect dialect = new H2Dialect();
final JournalingGenerationTarget targetDescriptor = new JournalingGenerationTarget();
// first, unfiltered
targetDescriptor.clear();
schemaCreator.doCreation( metadata, dialect, options, contributed -> true, sourceDescriptor, targetDescriptor );
assertThat(
targetDescriptor.getCommands(),
CollectionElementMatcher.hasAllOf(
CaseInsensitiveContainsMatcher.contains( "main_table" ),
CaseInsensitiveContainsMatcher.contains( "DynamicEntity" )
)
);
// filter by `orm`
targetDescriptor.clear();
schemaCreator.doCreation( metadata, dialect, options, contributed -> "orm".equals( contributed.getContributor() ), sourceDescriptor, targetDescriptor );
assertThat(
targetDescriptor.getCommands(),
CollectionElementMatcher.hasAllOf( not( CaseInsensitiveContainsMatcher.contains( "DynamicEntity" ) ) )
);
// filter by `test`
targetDescriptor.clear();
schemaCreator.doCreation( metadata, dialect, options, contributed -> "test".equals( contributed.getContributor() ), sourceDescriptor, targetDescriptor );
assertThat(
targetDescriptor.getCommands(),
CollectionElementMatcher.hasAllOf( not( CaseInsensitiveContainsMatcher.contains( "main_table" ) ) )
);
}
private void testDropping(
MetadataImplementor metadata,
Map settings,
SchemaManagementTool schemaManagementTool,
SourceDescriptor sourceDescriptor, ExecutionOptions options) {
final SchemaDropperImpl schemaDropper = (SchemaDropperImpl) schemaManagementTool.getSchemaDropper( settings );
final JournalingGenerationTarget targetDescriptor = new JournalingGenerationTarget();
final Dialect dialect = new H2Dialect();
// first, unfiltered
targetDescriptor.clear();
schemaDropper.doDrop( metadata, options, contributed -> true, dialect, sourceDescriptor, targetDescriptor );
assertThat(
targetDescriptor.getCommands(),
CollectionElementMatcher.hasAllOf(
CaseInsensitiveStartsWithMatcher.startsWith( "drop table main_table" ),
CaseInsensitiveStartsWithMatcher.startsWith( "drop table DynamicEntity" )
)
);
// filter by `orm`
targetDescriptor.clear();
schemaDropper.doDrop( metadata, options, contributed -> "orm".equals( contributed.getContributor() ), dialect, sourceDescriptor, targetDescriptor );
assertThat(
targetDescriptor.getCommands(),
CollectionElementMatcher.hasAllOf( not( CaseInsensitiveStartsWithMatcher.startsWith( "drop table DynamicEntity" ) ) )
);
// filter by `test`
targetDescriptor.clear();
schemaDropper.doDrop( metadata, options, contributed -> "test".equals( contributed.getContributor() ), dialect, sourceDescriptor, targetDescriptor );
assertThat(
targetDescriptor.getCommands(),
CollectionElementMatcher.hasAllOf( not( CaseInsensitiveStartsWithMatcher.startsWith( "drop table main_table" ) ) )
);
}
@Entity( name = "MainEntity" )
@Table( name = "main_table" )
static class MainEntity {
@Id
private Integer id;
String name;
private MainEntity() {
}
public MainEntity(Integer id, String name) {
this.id = id;
this.name = name;
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
public static class Contributor implements AdditionalJaxbMappingProducer {
public Contributor() {
}
@Override
public Collection<MappingDocument> produceAdditionalMappings(
MetadataImplementor metadata,
IndexView jandexIndex,
MappingBinder mappingBinder,
MetadataBuildingContext buildingContext) {
return Collections.singletonList( createMappingDocument( mappingBinder, buildingContext ) );
}
private MappingDocument createMappingDocument(MappingBinder mappingBinder, MetadataBuildingContext buildingContext) {
final Origin origin = new Origin( SourceType.OTHER, "test" );
final ClassLoaderService classLoaderService = buildingContext.getBootstrapContext()
.getServiceRegistry()
.getService( ClassLoaderService.class );
final InputStream inputStream = classLoaderService.locateResourceStream( "org/hibernate/orm/test/metamodel/contributed/BasicContributorTests.hbm.xml" );
final Binding<JaxbHbmHibernateMapping> jaxbBinding = mappingBinder.bind( inputStream, origin );
final JaxbHbmHibernateMapping jaxbRoot = jaxbBinding.getRoot();
return new MappingDocument(
"test",
jaxbRoot,
origin,
buildingContext
);
}
}
}

View File

@ -0,0 +1,56 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.orm.test.metamodel.contributed;
import org.hibernate.boot.spi.AdditionalJaxbMappingProducer;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.metamodel.RuntimeMetamodels;
import org.hibernate.metamodel.model.domain.EntityDomainType;
import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.testing.orm.junit.BootstrapServiceRegistry;
import org.hibernate.testing.orm.junit.DomainModel;
import org.hibernate.testing.orm.junit.NotImplementedYet;
import org.hibernate.testing.orm.junit.ServiceRegistry;
import org.hibernate.testing.orm.junit.SessionFactory;
import org.hibernate.testing.orm.junit.SessionFactoryScope;
import org.hibernate.testing.orm.junit.Setting;
import org.junit.jupiter.api.Test;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.nullValue;
/**
* @author Steve Ebersole
*/
@BootstrapServiceRegistry(
javaServices = @BootstrapServiceRegistry.JavaService( role = AdditionalJaxbMappingProducer.class, impl = BasicContributorTests.Contributor.class )
)
@ServiceRegistry(
settings = @Setting(
name = AvailableSettings.JPA_METAMODEL_POPULATION,
value = "ignoreUnsupported"
)
)
@DomainModel( annotatedClasses = BasicContributorTests.MainEntity.class )
@SessionFactory
public class EntityHidingTests {
@Test
@NotImplementedYet( reason = "Contributed entity hiding is not yet implemented", strict = false )
public void testModel(SessionFactoryScope scope) {
final SessionFactoryImplementor sessionFactory = scope.getSessionFactory();
final RuntimeMetamodels runtimeMetamodels = sessionFactory.getRuntimeMetamodels();
final EntityDomainType<Object> jpaModelDescriptor = runtimeMetamodels.getJpaMetamodel().entity( "DynamicEntity" );
assertThat( jpaModelDescriptor, nullValue() );
final EntityPersister mappingModelDescriptor = runtimeMetamodels.getMappingMetamodel()
.findEntityDescriptor( "DynamicEntity" );
assertThat( mappingModelDescriptor, nullValue() );
}
}

View File

@ -24,9 +24,11 @@ import org.hibernate.tool.schema.TargetType;
import org.hibernate.tool.schema.internal.ExceptionHandlerLoggedImpl; import org.hibernate.tool.schema.internal.ExceptionHandlerLoggedImpl;
import org.hibernate.tool.schema.internal.SchemaCreatorImpl; import org.hibernate.tool.schema.internal.SchemaCreatorImpl;
import org.hibernate.tool.schema.internal.script.MultiLineSqlScriptExtracter; import org.hibernate.tool.schema.internal.script.MultiLineSqlScriptExtracter;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExceptionHandler; import org.hibernate.tool.schema.spi.ExceptionHandler;
import org.hibernate.tool.schema.spi.ExecutionOptions; import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaCreator; import org.hibernate.tool.schema.spi.SchemaCreator;
import org.hibernate.tool.schema.spi.SchemaFilter;
import org.hibernate.tool.schema.spi.ScriptSourceInput; import org.hibernate.tool.schema.spi.ScriptSourceInput;
import org.hibernate.tool.schema.spi.ScriptTargetOutput; import org.hibernate.tool.schema.spi.ScriptTargetOutput;
import org.hibernate.tool.schema.spi.SourceDescriptor; import org.hibernate.tool.schema.spi.SourceDescriptor;
@ -87,6 +89,7 @@ public class StatementsWithoutTerminalCharsImportFileTest extends BaseUnitTestCa
schemaCreator.doCreation( schemaCreator.doCreation(
buildMappings( ssr ), buildMappings( ssr ),
this, this,
ContributableMatcher.ALL,
SourceDescriptorImpl.INSTANCE, SourceDescriptorImpl.INSTANCE,
TargetDescriptorImpl.INSTANCE TargetDescriptorImpl.INSTANCE
); );
@ -119,6 +122,11 @@ public class StatementsWithoutTerminalCharsImportFileTest extends BaseUnitTestCa
return ExceptionHandlerLoggedImpl.INSTANCE; return ExceptionHandlerLoggedImpl.INSTANCE;
} }
@Override
public SchemaFilter getSchemaFilter() {
return SchemaFilter.ALL;
}
private static class SourceDescriptorImpl implements SourceDescriptor { private static class SourceDescriptorImpl implements SourceDescriptor {
/** /**
* Singleton access * Singleton access

View File

@ -60,7 +60,7 @@ public class ValueVisitorTest extends BaseUnitTestCase {
final MetadataImplementor metadata = final MetadataImplementor metadata =
(MetadataImplementor) new MetadataSources( serviceRegistry ) (MetadataImplementor) new MetadataSources( serviceRegistry )
.buildMetadata(); .buildMetadata();
final Table tbl = new Table(); final Table tbl = new Table( "orm" );
final RootClass rootClass = new RootClass( metadataBuildingContext ); final RootClass rootClass = new RootClass( metadataBuildingContext );
ValueVisitor vv = new ValueVisitorValidator(); ValueVisitor vv = new ValueVisitorValidator();

View File

@ -85,7 +85,7 @@ public class ExportIdentifierTest extends BaseUnitTestCase {
Set<String> exportIdentifierSet) { Set<String> exportIdentifierSet) {
for ( Namespace namespace : namespaces ) { for ( Namespace namespace : namespaces ) {
final Table table = new Table( namespace, Identifier.toIdentifier( name ), false ); final Table table = new Table( "orm", namespace, Identifier.toIdentifier( name ), false );
addExportIdentifier( table, exportIdentifierList, exportIdentifierSet ); addExportIdentifier( table, exportIdentifierList, exportIdentifierSet );
final ForeignKey foreignKey = new ForeignKey(); final ForeignKey foreignKey = new ForeignKey();
@ -118,6 +118,7 @@ public class ExportIdentifierTest extends BaseUnitTestCase {
for ( Namespace namespace : namespaces ) { for ( Namespace namespace : namespaces ) {
addExportIdentifier( addExportIdentifier(
new Sequence( new Sequence(
"orm",
namespace.getName().getCatalog(), namespace.getName().getCatalog(),
namespace.getName().getSchema(), namespace.getName().getSchema(),
Identifier.toIdentifier( name ) Identifier.toIdentifier( name )
@ -138,7 +139,7 @@ public class ExportIdentifierTest extends BaseUnitTestCase {
namespace, namespace,
"create", "create",
"drop", "drop",
Collections.<String>emptySet() Collections.emptySet()
), ),
exportIdentifierList, exportIdentifierList,
exportIdentifierSet exportIdentifierSet
@ -158,7 +159,7 @@ public class ExportIdentifierTest extends BaseUnitTestCase {
namespace, namespace,
"create", "create",
"drop", "drop",
Collections.<String>emptySet() Collections.emptySet()
), ),
exportIdentifierList, exportIdentifierList,
exportIdentifierSet exportIdentifierSet

View File

@ -22,9 +22,11 @@ import org.hibernate.service.ServiceRegistry;
import org.hibernate.tool.schema.SourceType; import org.hibernate.tool.schema.SourceType;
import org.hibernate.tool.schema.TargetType; import org.hibernate.tool.schema.TargetType;
import org.hibernate.tool.schema.spi.CommandAcceptanceException; import org.hibernate.tool.schema.spi.CommandAcceptanceException;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExceptionHandler; import org.hibernate.tool.schema.spi.ExceptionHandler;
import org.hibernate.tool.schema.spi.ExecutionOptions; import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaDropper; import org.hibernate.tool.schema.spi.SchemaDropper;
import org.hibernate.tool.schema.spi.SchemaFilter;
import org.hibernate.tool.schema.spi.SchemaManagementTool; import org.hibernate.tool.schema.spi.SchemaManagementTool;
import org.hibernate.tool.schema.spi.ScriptSourceInput; import org.hibernate.tool.schema.spi.ScriptSourceInput;
import org.hibernate.tool.schema.spi.ScriptTargetOutput; import org.hibernate.tool.schema.spi.ScriptTargetOutput;
@ -58,8 +60,13 @@ public class SchemaDropTest extends BaseUnitTestCase implements ExecutionOptions
@Test @Test
public void testDropSequence() { public void testDropSequence() {
getSchemaDropper() getSchemaDropper().doDrop(
.doDrop( metadata, this, getSourceDescriptor(), getTargetDescriptor() ); metadata,
this,
ContributableMatcher.ALL,
getSourceDescriptor(),
getTargetDescriptor()
);
} }
private SchemaDropper getSchemaDropper() { private SchemaDropper getSchemaDropper() {
@ -109,6 +116,11 @@ public class SchemaDropTest extends BaseUnitTestCase implements ExecutionOptions
return this; return this;
} }
@Override
public SchemaFilter getSchemaFilter() {
return SchemaFilter.ALL;
}
@Override @Override
public void handleException(CommandAcceptanceException exception) { public void handleException(CommandAcceptanceException exception) {
throw exception; throw exception;

View File

@ -25,8 +25,10 @@ import org.hibernate.mapping.Table;
import org.hibernate.tool.hbm2ddl.SchemaExport; import org.hibernate.tool.hbm2ddl.SchemaExport;
import org.hibernate.tool.schema.TargetType; import org.hibernate.tool.schema.TargetType;
import org.hibernate.tool.schema.internal.ExceptionHandlerLoggedImpl; import org.hibernate.tool.schema.internal.ExceptionHandlerLoggedImpl;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExceptionHandler; import org.hibernate.tool.schema.spi.ExceptionHandler;
import org.hibernate.tool.schema.spi.ExecutionOptions; import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaFilter;
import org.hibernate.tool.schema.spi.SchemaManagementTool; import org.hibernate.tool.schema.spi.SchemaManagementTool;
import org.hibernate.tool.schema.spi.ScriptTargetOutput; import org.hibernate.tool.schema.spi.ScriptTargetOutput;
import org.hibernate.tool.schema.spi.TargetDescriptor; import org.hibernate.tool.schema.spi.TargetDescriptor;
@ -66,6 +68,7 @@ public class SchemaUpdateTableBackedSequenceTest extends BaseUnitTestCase {
TableStructure tableStructure = new TableStructure( TableStructure tableStructure = new TableStructure(
database.getJdbcEnvironment(), database.getJdbcEnvironment(),
"orm",
new QualifiedTableName( null, null, Identifier.toIdentifier( "test_seq" ) ), new QualifiedTableName( null, null, Identifier.toIdentifier( "test_seq" ) ),
Identifier.toIdentifier( "nextval" ), Identifier.toIdentifier( "nextval" ),
20, 20,
@ -98,7 +101,13 @@ public class SchemaUpdateTableBackedSequenceTest extends BaseUnitTestCase {
public ExceptionHandler getExceptionHandler() { public ExceptionHandler getExceptionHandler() {
return ExceptionHandlerLoggedImpl.INSTANCE; return ExceptionHandlerLoggedImpl.INSTANCE;
} }
@Override
public SchemaFilter getSchemaFilter() {
return SchemaFilter.ALL;
}
}, },
ContributableMatcher.ALL,
new TargetDescriptor() { new TargetDescriptor() {
@Override @Override
public EnumSet<TargetType> getTargetTypes() { public EnumSet<TargetType> getTargetTypes() {

View File

@ -35,8 +35,10 @@ import org.hibernate.tool.schema.internal.SchemaDropperImpl;
import org.hibernate.tool.schema.internal.IndividuallySchemaMigratorImpl; import org.hibernate.tool.schema.internal.IndividuallySchemaMigratorImpl;
import org.hibernate.tool.schema.internal.exec.GenerationTarget; import org.hibernate.tool.schema.internal.exec.GenerationTarget;
import org.hibernate.tool.schema.internal.exec.GenerationTargetToStdout; import org.hibernate.tool.schema.internal.exec.GenerationTargetToStdout;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExceptionHandler; import org.hibernate.tool.schema.spi.ExceptionHandler;
import org.hibernate.tool.schema.spi.ExecutionOptions; import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaFilter;
import org.hibernate.tool.schema.spi.SchemaManagementTool; import org.hibernate.tool.schema.spi.SchemaManagementTool;
import org.hibernate.tool.schema.spi.ScriptSourceInput; import org.hibernate.tool.schema.spi.ScriptSourceInput;
import org.hibernate.tool.schema.spi.ScriptTargetOutput; import org.hibernate.tool.schema.spi.ScriptTargetOutput;
@ -155,17 +157,24 @@ public class CrossSchemaForeignKeyGenerationTest extends BaseUnitTestCase {
public ExceptionHandler getExceptionHandler() { public ExceptionHandler getExceptionHandler() {
return ExceptionHandlerLoggedImpl.INSTANCE; return ExceptionHandlerLoggedImpl.INSTANCE;
} }
@Override
public SchemaFilter getSchemaFilter() {
return SchemaFilter.ALL;
}
}; };
new IndividuallySchemaMigratorImpl( tool, DefaultSchemaFilter.INSTANCE ).doMigration( new IndividuallySchemaMigratorImpl( tool, DefaultSchemaFilter.INSTANCE ).doMigration(
metadata, metadata,
options, options,
ContributableMatcher.ALL,
TargetDescriptorImpl.INSTANCE TargetDescriptorImpl.INSTANCE
); );
new IndividuallySchemaMigratorImpl( tool, DefaultSchemaFilter.INSTANCE ).doMigration( new IndividuallySchemaMigratorImpl( tool, DefaultSchemaFilter.INSTANCE ).doMigration(
metadata, metadata,
options, options,
ContributableMatcher.ALL,
TargetDescriptorImpl.INSTANCE TargetDescriptorImpl.INSTANCE
); );
@ -216,11 +225,17 @@ public class CrossSchemaForeignKeyGenerationTest extends BaseUnitTestCase {
public ExceptionHandler getExceptionHandler() { public ExceptionHandler getExceptionHandler() {
return ExceptionHandlerLoggedImpl.INSTANCE; return ExceptionHandlerLoggedImpl.INSTANCE;
} }
@Override
public SchemaFilter getSchemaFilter() {
return SchemaFilter.ALL;
}
}; };
new GroupedSchemaMigratorImpl( tool, DefaultSchemaFilter.INSTANCE ).doMigration( new GroupedSchemaMigratorImpl( tool, DefaultSchemaFilter.INSTANCE ).doMigration(
metadata, metadata,
options, options,
ContributableMatcher.ALL,
TargetDescriptorImpl.INSTANCE TargetDescriptorImpl.INSTANCE
); );

View File

@ -31,8 +31,10 @@ import org.hibernate.tool.schema.internal.ExceptionHandlerLoggedImpl;
import org.hibernate.tool.schema.internal.HibernateSchemaManagementTool; import org.hibernate.tool.schema.internal.HibernateSchemaManagementTool;
import org.hibernate.tool.schema.internal.IndividuallySchemaMigratorImpl; import org.hibernate.tool.schema.internal.IndividuallySchemaMigratorImpl;
import org.hibernate.tool.schema.internal.exec.ScriptTargetOutputToFile; import org.hibernate.tool.schema.internal.exec.ScriptTargetOutputToFile;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExceptionHandler; import org.hibernate.tool.schema.spi.ExceptionHandler;
import org.hibernate.tool.schema.spi.ExecutionOptions; import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaFilter;
import org.hibernate.tool.schema.spi.SchemaManagementTool; import org.hibernate.tool.schema.spi.SchemaManagementTool;
import org.hibernate.tool.schema.spi.ScriptTargetOutput; import org.hibernate.tool.schema.spi.ScriptTargetOutput;
import org.hibernate.tool.schema.spi.TargetDescriptor; import org.hibernate.tool.schema.spi.TargetDescriptor;
@ -86,6 +88,11 @@ public class UniqueConstraintDropTest {
public ExceptionHandler getExceptionHandler() { public ExceptionHandler getExceptionHandler() {
return ExceptionHandlerLoggedImpl.INSTANCE; return ExceptionHandlerLoggedImpl.INSTANCE;
} }
@Override
public SchemaFilter getSchemaFilter() {
return SchemaFilter.ALL;
}
}; };
} }
@ -102,6 +109,7 @@ public class UniqueConstraintDropTest {
.doMigration( .doMigration(
metadata, metadata,
options, options,
ContributableMatcher.ALL,
new TargetDescriptorImpl() new TargetDescriptorImpl()
); );

View File

@ -25,8 +25,10 @@ import org.hibernate.tool.schema.JdbcMetadaAccessStrategy;
import org.hibernate.tool.schema.SourceType; import org.hibernate.tool.schema.SourceType;
import org.hibernate.tool.schema.TargetType; import org.hibernate.tool.schema.TargetType;
import org.hibernate.tool.schema.internal.ExceptionHandlerLoggedImpl; import org.hibernate.tool.schema.internal.ExceptionHandlerLoggedImpl;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExceptionHandler; import org.hibernate.tool.schema.spi.ExceptionHandler;
import org.hibernate.tool.schema.spi.ExecutionOptions; import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaFilter;
import org.hibernate.tool.schema.spi.SchemaManagementTool; import org.hibernate.tool.schema.spi.SchemaManagementTool;
import org.hibernate.tool.schema.spi.ScriptSourceInput; import org.hibernate.tool.schema.spi.ScriptSourceInput;
import org.hibernate.tool.schema.spi.ScriptTargetOutput; import org.hibernate.tool.schema.spi.ScriptTargetOutput;
@ -94,7 +96,8 @@ public class LongVarcharValidationTest implements ExecutionOptions {
private void doValidation(MetadataImplementor metadata) { private void doValidation(MetadataImplementor metadata) {
ssr.getService( SchemaManagementTool.class ).getSchemaValidator( null ).doValidation( ssr.getService( SchemaManagementTool.class ).getSchemaValidator( null ).doValidation(
metadata, metadata,
this this,
ContributableMatcher.ALL
); );
} }
@ -102,6 +105,7 @@ public class LongVarcharValidationTest implements ExecutionOptions {
ssr.getService( SchemaManagementTool.class ).getSchemaCreator( null ).doCreation( ssr.getService( SchemaManagementTool.class ).getSchemaCreator( null ).doCreation(
metadata, metadata,
this, this,
ContributableMatcher.ALL,
new SourceDescriptor() { new SourceDescriptor() {
@Override @Override
public SourceType getSourceType() { public SourceType getSourceType() {
@ -131,6 +135,7 @@ public class LongVarcharValidationTest implements ExecutionOptions {
ssr.getService( SchemaManagementTool.class ).getSchemaDropper( null ).doDrop( ssr.getService( SchemaManagementTool.class ).getSchemaDropper( null ).doDrop(
metadata, metadata,
this, this,
ContributableMatcher.ALL,
new SourceDescriptor() { new SourceDescriptor() {
@Override @Override
public SourceType getSourceType() { public SourceType getSourceType() {
@ -179,4 +184,9 @@ public class LongVarcharValidationTest implements ExecutionOptions {
public ExceptionHandler getExceptionHandler() { public ExceptionHandler getExceptionHandler() {
return ExceptionHandlerLoggedImpl.INSTANCE; return ExceptionHandlerLoggedImpl.INSTANCE;
} }
@Override
public SchemaFilter getSchemaFilter() {
return SchemaFilter.ALL;
}
} }

View File

@ -26,8 +26,10 @@ import org.hibernate.tool.schema.JdbcMetadaAccessStrategy;
import org.hibernate.tool.schema.SourceType; import org.hibernate.tool.schema.SourceType;
import org.hibernate.tool.schema.TargetType; import org.hibernate.tool.schema.TargetType;
import org.hibernate.tool.schema.internal.ExceptionHandlerLoggedImpl; import org.hibernate.tool.schema.internal.ExceptionHandlerLoggedImpl;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExceptionHandler; import org.hibernate.tool.schema.spi.ExceptionHandler;
import org.hibernate.tool.schema.spi.ExecutionOptions; import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaFilter;
import org.hibernate.tool.schema.spi.SchemaManagementTool; import org.hibernate.tool.schema.spi.SchemaManagementTool;
import org.hibernate.tool.schema.spi.ScriptSourceInput; import org.hibernate.tool.schema.spi.ScriptSourceInput;
import org.hibernate.tool.schema.spi.ScriptTargetOutput; import org.hibernate.tool.schema.spi.ScriptTargetOutput;
@ -35,6 +37,7 @@ import org.hibernate.tool.schema.spi.SourceDescriptor;
import org.hibernate.tool.schema.spi.TargetDescriptor; import org.hibernate.tool.schema.spi.TargetDescriptor;
import org.hibernate.testing.TestForIssue; import org.hibernate.testing.TestForIssue;
import org.hibernate.test.legacy.S;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
@ -102,13 +105,14 @@ public class NumericValidationTest implements ExecutionOptions {
private void doValidation() { private void doValidation() {
ssr.getService( SchemaManagementTool.class ).getSchemaValidator( null ) ssr.getService( SchemaManagementTool.class ).getSchemaValidator( null )
.doValidation( metadata, this ); .doValidation( metadata, this, ContributableMatcher.ALL );
} }
private void createSchema() { private void createSchema() {
ssr.getService( SchemaManagementTool.class ).getSchemaCreator( null ).doCreation( ssr.getService( SchemaManagementTool.class ).getSchemaCreator( null ).doCreation(
metadata, metadata,
this, this,
ContributableMatcher.ALL,
new SourceDescriptor() { new SourceDescriptor() {
@Override @Override
public SourceType getSourceType() { public SourceType getSourceType() {
@ -162,4 +166,9 @@ public class NumericValidationTest implements ExecutionOptions {
public ExceptionHandler getExceptionHandler() { public ExceptionHandler getExceptionHandler() {
return ExceptionHandlerLoggedImpl.INSTANCE; return ExceptionHandlerLoggedImpl.INSTANCE;
} }
@Override
public SchemaFilter getSchemaFilter() {
return SchemaFilter.ALL;
}
} }

View File

@ -75,6 +75,10 @@ public class StoredProcedureResultSetMappingTest extends BaseUnitTestCase {
} }
public static class ProcedureDefinition implements AuxiliaryDatabaseObject { public static class ProcedureDefinition implements AuxiliaryDatabaseObject {
public ProcedureDefinition() {
}
@Override @Override
public boolean appliesToDialect(Dialect dialect) { public boolean appliesToDialect(Dialect dialect) {
return true; return true;

View File

@ -12,6 +12,7 @@ import java.util.Map;
import org.hibernate.tool.schema.spi.CommandAcceptanceException; import org.hibernate.tool.schema.spi.CommandAcceptanceException;
import org.hibernate.tool.schema.spi.ExceptionHandler; import org.hibernate.tool.schema.spi.ExceptionHandler;
import org.hibernate.tool.schema.spi.ExecutionOptions; import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaFilter;
/** /**
* @author Steve Ebersole * @author Steve Ebersole
@ -38,6 +39,11 @@ public class ExecutionOptionsTestImpl implements ExecutionOptions, ExceptionHand
return this; return this;
} }
@Override
public SchemaFilter getSchemaFilter() {
return SchemaFilter.ALL;
}
@Override @Override
public void handleException(CommandAcceptanceException exception) { public void handleException(CommandAcceptanceException exception) {
throw exception; throw exception;

View File

@ -10,6 +10,7 @@ import org.hibernate.boot.spi.MetadataImplementor;
import org.hibernate.dialect.H2Dialect; import org.hibernate.dialect.H2Dialect;
import org.hibernate.tool.schema.internal.DefaultSchemaFilter; import org.hibernate.tool.schema.internal.DefaultSchemaFilter;
import org.hibernate.tool.schema.internal.GroupedSchemaValidatorImpl; import org.hibernate.tool.schema.internal.GroupedSchemaValidatorImpl;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.testing.RequiresDialect; import org.hibernate.testing.RequiresDialect;
import org.hibernate.testing.TestForIssue; import org.hibernate.testing.TestForIssue;
@ -23,6 +24,6 @@ public class GroupedSchemaValidatorImplTest extends IndividuallySchemaValidatorI
@Override @Override
protected void getSchemaValidator(MetadataImplementor metadata) { protected void getSchemaValidator(MetadataImplementor metadata) {
new GroupedSchemaValidatorImpl( tool, DefaultSchemaFilter.INSTANCE ) new GroupedSchemaValidatorImpl( tool, DefaultSchemaFilter.INSTANCE )
.doValidation( metadata, executionOptions ); .doValidation( metadata, executionOptions, ContributableMatcher.ALL );
} }
} }

View File

@ -36,8 +36,10 @@ import org.hibernate.tool.schema.internal.IndividuallySchemaValidatorImpl;
import org.hibernate.tool.schema.internal.SchemaCreatorImpl; import org.hibernate.tool.schema.internal.SchemaCreatorImpl;
import org.hibernate.tool.schema.internal.SchemaDropperImpl; import org.hibernate.tool.schema.internal.SchemaDropperImpl;
import org.hibernate.tool.schema.internal.exec.GenerationTargetToDatabase; import org.hibernate.tool.schema.internal.exec.GenerationTargetToDatabase;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExceptionHandler; import org.hibernate.tool.schema.spi.ExceptionHandler;
import org.hibernate.tool.schema.spi.ExecutionOptions; import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaFilter;
import org.hibernate.tool.schema.spi.SchemaManagementException; import org.hibernate.tool.schema.spi.SchemaManagementException;
import org.hibernate.tool.schema.spi.SchemaManagementTool; import org.hibernate.tool.schema.spi.SchemaManagementTool;
import org.hibernate.tool.schema.spi.SchemaValidator; import org.hibernate.tool.schema.spi.SchemaValidator;
@ -114,6 +116,11 @@ public class IndividuallySchemaValidatorImplConnectionTest extends BaseUnitTestC
public ExceptionHandler getExceptionHandler() { public ExceptionHandler getExceptionHandler() {
return ExceptionHandlerLoggedImpl.INSTANCE; return ExceptionHandlerLoggedImpl.INSTANCE;
} }
@Override
public SchemaFilter getSchemaFilter() {
return SchemaFilter.ALL;
}
}; };
} }
@ -171,7 +178,7 @@ public class IndividuallySchemaValidatorImplConnectionTest extends BaseUnitTestC
SchemaValidator schemaValidator = new IndividuallySchemaValidatorImpl( tool, DefaultSchemaFilter.INSTANCE ); SchemaValidator schemaValidator = new IndividuallySchemaValidatorImpl( tool, DefaultSchemaFilter.INSTANCE );
assertFalse( connection.getAutoCommit() ); assertFalse( connection.getAutoCommit() );
schemaValidator.doValidation( metadata, executionOptions ); schemaValidator.doValidation( metadata, executionOptions, ContributableMatcher.ALL );
assertFalse( connection.getAutoCommit() ); assertFalse( connection.getAutoCommit() );
} }
finally { finally {

View File

@ -33,8 +33,10 @@ import org.hibernate.tool.schema.internal.SchemaCreatorImpl;
import org.hibernate.tool.schema.internal.SchemaDropperImpl; import org.hibernate.tool.schema.internal.SchemaDropperImpl;
import org.hibernate.tool.schema.internal.IndividuallySchemaValidatorImpl; import org.hibernate.tool.schema.internal.IndividuallySchemaValidatorImpl;
import org.hibernate.tool.schema.internal.exec.GenerationTargetToDatabase; import org.hibernate.tool.schema.internal.exec.GenerationTargetToDatabase;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExceptionHandler; import org.hibernate.tool.schema.spi.ExceptionHandler;
import org.hibernate.tool.schema.spi.ExecutionOptions; import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.hibernate.tool.schema.spi.SchemaFilter;
import org.hibernate.tool.schema.spi.SchemaManagementException; import org.hibernate.tool.schema.spi.SchemaManagementException;
import org.hibernate.tool.schema.spi.SchemaManagementTool; import org.hibernate.tool.schema.spi.SchemaManagementTool;
@ -96,6 +98,11 @@ public class IndividuallySchemaValidatorImplTest extends BaseUnitTestCase {
public ExceptionHandler getExceptionHandler() { public ExceptionHandler getExceptionHandler() {
return ExceptionHandlerLoggedImpl.INSTANCE; return ExceptionHandlerLoggedImpl.INSTANCE;
} }
@Override
public SchemaFilter getSchemaFilter() {
return SchemaFilter.ALL;
}
}; };
} }
@ -250,7 +257,7 @@ public class IndividuallySchemaValidatorImplTest extends BaseUnitTestCase {
protected void getSchemaValidator(MetadataImplementor metadata) { protected void getSchemaValidator(MetadataImplementor metadata) {
new IndividuallySchemaValidatorImpl( tool, DefaultSchemaFilter.INSTANCE ) new IndividuallySchemaValidatorImpl( tool, DefaultSchemaFilter.INSTANCE )
.doValidation( metadata, executionOptions ); .doValidation( metadata, executionOptions, ContributableMatcher.ALL );
} }
protected Properties properties() { protected Properties properties() {

View File

@ -6,7 +6,6 @@
*/ */
package org.hibernate.test.tool.schema; package org.hibernate.test.tool.schema;
import java.sql.SQLSyntaxErrorException;
import java.util.Collections; import java.util.Collections;
import java.util.EnumSet; import java.util.EnumSet;
import javax.persistence.Entity; import javax.persistence.Entity;
@ -24,6 +23,7 @@ import org.hibernate.resource.transaction.spi.TransactionCoordinatorBuilder;
import org.hibernate.tool.schema.SourceType; import org.hibernate.tool.schema.SourceType;
import org.hibernate.tool.schema.TargetType; import org.hibernate.tool.schema.TargetType;
import org.hibernate.tool.schema.spi.CommandAcceptanceException; import org.hibernate.tool.schema.spi.CommandAcceptanceException;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.SchemaCreator; import org.hibernate.tool.schema.spi.SchemaCreator;
import org.hibernate.tool.schema.spi.SchemaDropper; import org.hibernate.tool.schema.spi.SchemaDropper;
import org.hibernate.tool.schema.spi.SchemaManagementTool; import org.hibernate.tool.schema.spi.SchemaManagementTool;
@ -95,6 +95,7 @@ public class SchemaToolTransactionHandlingTest extends BaseUnitTestCase {
schemaDropper.doDrop( schemaDropper.doDrop(
mappings, mappings,
ExecutionOptionsTestImpl.INSTANCE, ExecutionOptionsTestImpl.INSTANCE,
ContributableMatcher.ALL,
SourceDescriptorImpl.INSTANCE, SourceDescriptorImpl.INSTANCE,
TargetDescriptorImpl.INSTANCE TargetDescriptorImpl.INSTANCE
); );
@ -104,6 +105,7 @@ public class SchemaToolTransactionHandlingTest extends BaseUnitTestCase {
schemaCreator.doCreation( schemaCreator.doCreation(
mappings, mappings,
ExecutionOptionsTestImpl.INSTANCE, ExecutionOptionsTestImpl.INSTANCE,
ContributableMatcher.ALL,
SourceDescriptorImpl.INSTANCE, SourceDescriptorImpl.INSTANCE,
TargetDescriptorImpl.INSTANCE TargetDescriptorImpl.INSTANCE
); );
@ -113,6 +115,7 @@ public class SchemaToolTransactionHandlingTest extends BaseUnitTestCase {
schemaDropper.doDrop( schemaDropper.doDrop(
mappings, mappings,
ExecutionOptionsTestImpl.INSTANCE, ExecutionOptionsTestImpl.INSTANCE,
ContributableMatcher.ALL,
SourceDescriptorImpl.INSTANCE, SourceDescriptorImpl.INSTANCE,
TargetDescriptorImpl.INSTANCE TargetDescriptorImpl.INSTANCE
); );
@ -166,6 +169,7 @@ public class SchemaToolTransactionHandlingTest extends BaseUnitTestCase {
smt.getSchemaCreator( Collections.emptyMap() ).doCreation( smt.getSchemaCreator( Collections.emptyMap() ).doCreation(
mappings, mappings,
ExecutionOptionsTestImpl.INSTANCE, ExecutionOptionsTestImpl.INSTANCE,
ContributableMatcher.ALL,
SourceDescriptorImpl.INSTANCE, SourceDescriptorImpl.INSTANCE,
TargetDescriptorImpl.INSTANCE TargetDescriptorImpl.INSTANCE
); );
@ -177,7 +181,8 @@ public class SchemaToolTransactionHandlingTest extends BaseUnitTestCase {
try { try {
smt.getSchemaValidator( Collections.emptyMap() ).doValidation( smt.getSchemaValidator( Collections.emptyMap() ).doValidation(
mappings, mappings,
ExecutionOptionsTestImpl.INSTANCE ExecutionOptionsTestImpl.INSTANCE,
ContributableMatcher.ALL
); );
} }
finally { finally {
@ -185,6 +190,7 @@ public class SchemaToolTransactionHandlingTest extends BaseUnitTestCase {
smt.getSchemaDropper( Collections.emptyMap() ).doDrop( smt.getSchemaDropper( Collections.emptyMap() ).doDrop(
mappings, mappings,
ExecutionOptionsTestImpl.INSTANCE, ExecutionOptionsTestImpl.INSTANCE,
ContributableMatcher.ALL,
SourceDescriptorImpl.INSTANCE, SourceDescriptorImpl.INSTANCE,
TargetDescriptorImpl.INSTANCE TargetDescriptorImpl.INSTANCE
); );

View File

@ -30,6 +30,7 @@ import org.hibernate.tool.schema.extract.spi.NameSpaceTablesInformation;
import org.hibernate.tool.schema.extract.spi.TableInformation; import org.hibernate.tool.schema.extract.spi.TableInformation;
import org.hibernate.tool.schema.internal.AbstractSchemaMigrator; import org.hibernate.tool.schema.internal.AbstractSchemaMigrator;
import org.hibernate.tool.schema.internal.exec.GenerationTarget; import org.hibernate.tool.schema.internal.exec.GenerationTarget;
import org.hibernate.tool.schema.spi.ContributableMatcher;
import org.hibernate.tool.schema.spi.ExecutionOptions; import org.hibernate.tool.schema.spi.ExecutionOptions;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
@ -53,10 +54,19 @@ public class CheckForExistingForeignKeyTest {
* Needed implementation. Not used in test. * Needed implementation. Not used in test.
*/ */
@Override @Override
protected NameSpaceTablesInformation performTablesMigration(Metadata metadata, DatabaseInformation existingDatabase, ExecutionOptions options, protected NameSpaceTablesInformation performTablesMigration(
Metadata metadata,
DatabaseInformation existingDatabase,
ExecutionOptions options,
ContributableMatcher inclusionFilter,
Dialect dialect, Dialect dialect,
Formatter formatter, Set<String> exportIdentifiers, boolean tryToCreateCatalogs, boolean tryToCreateSchemas, Formatter formatter,
Set<Identifier> exportedCatalogs, Namespace namespace, GenerationTarget[] targets) { Set<String> exportIdentifiers,
boolean tryToCreateCatalogs,
boolean tryToCreateSchemas,
Set<Identifier> exportedCatalogs,
Namespace namespace,
GenerationTarget[] targets) {
return null; return null;
} }
} }
@ -193,7 +203,7 @@ public class CheckForExistingForeignKeyTest {
ForeignKey foreignKey = new ForeignKey(); ForeignKey foreignKey = new ForeignKey();
foreignKey.setName( "objectId2id" ); foreignKey.setName( "objectId2id" );
foreignKey.addColumn( new Column( "id" ) ); foreignKey.addColumn( new Column( "id" ) );
foreignKey.setReferencedTable( new Table( "table2" ) ); foreignKey.setReferencedTable( new Table( "orm", "table2" ) );
InformationExtractor informationExtractor = Mockito.mock( InformationExtractor.class ); InformationExtractor informationExtractor = Mockito.mock( InformationExtractor.class );
IdentifierHelper identifierHelper = new IdentifierHelperImpl(); IdentifierHelper identifierHelper = new IdentifierHelperImpl();
@ -230,7 +240,7 @@ public class CheckForExistingForeignKeyTest {
ForeignKey foreignKey = new ForeignKey(); ForeignKey foreignKey = new ForeignKey();
foreignKey.setName( "objectId2id_1" ); foreignKey.setName( "objectId2id_1" );
foreignKey.addColumn( new Column( "id" ) ); foreignKey.addColumn( new Column( "id" ) );
foreignKey.setReferencedTable( new Table( "table2" ) ); foreignKey.setReferencedTable( new Table( "orm", "table2" ) );
InformationExtractor informationExtractor = Mockito.mock( InformationExtractor.class ); InformationExtractor informationExtractor = Mockito.mock( InformationExtractor.class );
IdentifierHelper identifierHelper = new IdentifierHelperImpl(); IdentifierHelper identifierHelper = new IdentifierHelperImpl();
@ -267,7 +277,7 @@ public class CheckForExistingForeignKeyTest {
ForeignKey foreignKey = new ForeignKey(); ForeignKey foreignKey = new ForeignKey();
foreignKey.setName( "objectId2id_1" ); // Make sure the match is not successful based on key name foreignKey.setName( "objectId2id_1" ); // Make sure the match is not successful based on key name
foreignKey.addColumn( new Column( "id" ) ); foreignKey.addColumn( new Column( "id" ) );
foreignKey.setReferencedTable( new Table( "table2" ) ); foreignKey.setReferencedTable( new Table( "orm", "table2" ) );
Name schemaName = new Name( new Identifier( "-", false ), new Identifier( "-", false ) ); Name schemaName = new Name( new Identifier( "-", false ), new Identifier( "-", false ) );
InformationExtractor informationExtractor = Mockito.mock( InformationExtractor.class ); InformationExtractor informationExtractor = Mockito.mock( InformationExtractor.class );
@ -305,7 +315,7 @@ public class CheckForExistingForeignKeyTest {
ForeignKey foreignKey = new ForeignKey(); ForeignKey foreignKey = new ForeignKey();
foreignKey.setName( "objectId2id_1" ); // Make sure the match is not successful based on key name foreignKey.setName( "objectId2id_1" ); // Make sure the match is not successful based on key name
foreignKey.addColumn( new Column( "id" ) ); foreignKey.addColumn( new Column( "id" ) );
foreignKey.setReferencedTable( new Table( "table2" ) ); foreignKey.setReferencedTable( new Table( "orm", "table2" ) );
Name schemaName = new Name( new Identifier( "-", false ), new Identifier( "-", false ) ); Name schemaName = new Name( new Identifier( "-", false ), new Identifier( "-", false ) );
InformationExtractor informationExtractor = Mockito.mock( InformationExtractor.class ); InformationExtractor informationExtractor = Mockito.mock( InformationExtractor.class );

View File

@ -65,29 +65,26 @@ public class AdditionalJaxbMappingProducerImpl implements AdditionalJaxbMappingP
// atm we do not have distinct origin info for envers // atm we do not have distinct origin info for envers
final Origin origin = new Origin( SourceType.OTHER, "envers" ); final Origin origin = new Origin( SourceType.OTHER, "envers" );
final MappingCollector mappingCollector = new MappingCollector() { final MappingCollector mappingCollector = (document) -> {
@Override logXml( document );
public void addDocument(Document document) throws DocumentException {
logXml( document );
final ByteArrayOutputStream baos = new ByteArrayOutputStream(); final ByteArrayOutputStream baos = new ByteArrayOutputStream();
try { try {
final Writer w = new BufferedWriter( new OutputStreamWriter( baos, "UTF-8" ) ); final Writer w = new BufferedWriter( new OutputStreamWriter( baos, "UTF-8" ) );
final XMLWriter xw = new XMLWriter( w, new OutputFormat( " ", true ) ); final XMLWriter xw = new XMLWriter( w, new OutputFormat( " ", true ) );
xw.write( document ); xw.write( document );
w.flush(); w.flush();
}
catch (IOException e) {
throw new HibernateException( "Unable to bind Envers-generated XML", e );
}
ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream( baos.toByteArray() );
BufferedInputStream bufferedInputStream = new BufferedInputStream( byteArrayInputStream );
final Binding<JaxbHbmHibernateMapping> jaxbBinding = mappingBinder.bind( bufferedInputStream, origin );
final JaxbHbmHibernateMapping jaxbRoot = jaxbBinding.getRoot();
additionalMappingDocuments.add( new MappingDocument( jaxbRoot, origin, buildingContext ) );
} }
catch (IOException e) {
throw new HibernateException( "Unable to bind Envers-generated XML", e );
}
ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream( baos.toByteArray() );
BufferedInputStream bufferedInputStream = new BufferedInputStream( byteArrayInputStream );
final Binding<JaxbHbmHibernateMapping> jaxbBinding = mappingBinder.bind( bufferedInputStream, origin );
final JaxbHbmHibernateMapping jaxbRoot = jaxbBinding.getRoot();
additionalMappingDocuments.add( new MappingDocument( "envers", jaxbRoot, origin, buildingContext ) );
}; };
enversService.initialize( metadata, mappingCollector ); enversService.initialize( metadata, mappingCollector );

View File

@ -24,15 +24,15 @@ public class OrderedSequenceStructure extends SequenceStructure {
private static final String ORDER = " ORDER"; private static final String ORDER = " ORDER";
private AuxiliaryDatabaseObject sequenceObject; private final AuxiliaryDatabaseObject sequenceObject;
public OrderedSequenceStructure( public OrderedSequenceStructure(
JdbcEnvironment jdbcEnvironment, JdbcEnvironment jdbcEnvironment,
QualifiedName qualifiedSequenceName, QualifiedName qualifiedSequenceName,
int initialValue, int initialValue,
int incrementSize, int incrementSize,
Class numberType) { Class<?> numberType) {
super( jdbcEnvironment, qualifiedSequenceName, initialValue, incrementSize, numberType ); super( jdbcEnvironment, "envers", qualifiedSequenceName, initialValue, incrementSize, numberType );
this.sequenceObject = new OrderedSequence(); this.sequenceObject = new OrderedSequence();
} }
@ -83,6 +83,7 @@ public class OrderedSequenceStructure extends SequenceStructure {
getSourceIncrementSize() getSourceIncrementSize()
); );
//noinspection deprecation
if ( dialect instanceof Oracle8iDialect ) { if ( dialect instanceof Oracle8iDialect ) {
for ( int i = 0; i < createStrings.length; ++i ) { for ( int i = 0; i < createStrings.length; ++i ) {
createStrings[ i ] = createStrings[ i ] + ORDER; createStrings[ i ] = createStrings[ i ] + ORDER;
@ -94,7 +95,7 @@ public class OrderedSequenceStructure extends SequenceStructure {
@Override @Override
public String[] sqlDropStrings(Dialect dialect) { public String[] sqlDropStrings(Dialect dialect) {
return dialect.getDropSequenceStrings( getName() ); return dialect.getSequenceSupport().getDropSequenceStrings( getName() );
} }
} }
} }

View File

@ -51,6 +51,7 @@ public class ExportIdentifierTest extends BaseUnitTestCase {
for ( Namespace namespace : database.getNamespaces() ) { for ( Namespace namespace : database.getNamespaces() ) {
final SequenceStructure sequenceStructure = new SequenceStructure( final SequenceStructure sequenceStructure = new SequenceStructure(
ssr.getService( JdbcEnvironment.class ), ssr.getService( JdbcEnvironment.class ),
"envers",
new QualifiedNameImpl( new QualifiedNameImpl(
namespace.getName(), namespace.getName(),
Identifier.toIdentifier( "aSequence" ) Identifier.toIdentifier( "aSequence" )

View File

@ -0,0 +1,97 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.orm.test.envers;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import org.hibernate.boot.spi.MetadataImplementor;
import org.hibernate.envers.Audited;
import org.hibernate.envers.DefaultRevisionEntity;
import org.hibernate.mapping.PersistentClass;
import org.hibernate.testing.orm.junit.DomainModel;
import org.hibernate.testing.orm.junit.DomainModelScope;
import org.junit.jupiter.api.Test;
import org.hamcrest.Matchers;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
/**
* @author Steve Ebersole
*/
@DomainModel(
annotatedClasses = ModelContributorSmokeTests.SimpleEntity.class
)
public class ModelContributorSmokeTests {
@Test
public void simpleModelContributorTest(DomainModelScope scope) {
final MetadataImplementor domainModel = scope.getDomainModel();
// Should be 3
// 1) SimpleEntity
// 2) Enver's DefaultRevisionEntity
// 3) Enver's "shadow" of the domain entity (SimpleEntity_AUD)
assertThat( domainModel.getEntityBindings().size(), is( 3 ) );
checkModel(
domainModel.getEntityBinding( SimpleEntity.class.getName() ),
"orm"
);
checkModel(
domainModel.getEntityBinding( DefaultRevisionEntity.class.getName() ),
"envers"
);
checkModel(
domainModel.getEntityBinding( SimpleEntity.class.getName() + "_AUD" ),
"envers"
);
}
private void checkModel(PersistentClass entityBinding, String expectedContributor) {
assertThat( entityBinding.getContributor(), is( expectedContributor ) );
assertThat( entityBinding.getRootTable().getContributor(), is( expectedContributor ) );
}
@Entity( name = "SimpleEntity" )
@Table( name = "simple" )
@Audited
public static class SimpleEntity {
@Id
private Integer id;
String name;
public SimpleEntity() {
}
public SimpleEntity(Integer id, String name) {
this.id = id;
this.name = name;
}
public Integer getId() {
return id;
}
private void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
}

View File

@ -0,0 +1,94 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.orm.test.envers;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import org.hibernate.boot.spi.MetadataImplementor;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.envers.Audited;
import org.hibernate.envers.DefaultRevisionEntity;
import org.hibernate.mapping.PersistentClass;
import org.hibernate.metamodel.RuntimeMetamodels;
import org.hibernate.metamodel.model.domain.EntityDomainType;
import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.testing.orm.junit.DomainModel;
import org.hibernate.testing.orm.junit.DomainModelScope;
import org.hibernate.testing.orm.junit.ServiceRegistry;
import org.hibernate.testing.orm.junit.SessionFactory;
import org.hibernate.testing.orm.junit.SessionFactoryScope;
import org.hibernate.testing.orm.junit.Setting;
import org.junit.jupiter.api.Test;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
/**
* @author Steve Ebersole
*/
@ServiceRegistry(
settings = @Setting(
name = AvailableSettings.JPA_METAMODEL_POPULATION,
value = "ignoreUnsupported"
)
)
@DomainModel(
annotatedClasses = RuntimeModelSmokeTests.SimpleEntity.class
)
@SessionFactory
public class RuntimeModelSmokeTests {
public static final String FULL_NAME = "org.hibernate.orm.test.envers.RuntimeModelSmokeTests$SimpleEntity_AUD";
public static final String SIMPLE_NAME = "SimpleEntity_AUD";
@Test
public void basicTest(SessionFactoryScope scope) {
final RuntimeMetamodels runtimeMetamodels = scope.getSessionFactory().getRuntimeMetamodels();
final EntityPersister mappingType = runtimeMetamodels.getMappingMetamodel().findEntityDescriptor( FULL_NAME );
assertThat( mappingType, notNullValue() );
final EntityDomainType<Object> jpaType = runtimeMetamodels.getJpaMetamodel().entity( SIMPLE_NAME );
assertThat( mappingType, notNullValue() );
}
@Entity( name = "SimpleEntity" )
@Table( name = "simple" )
@Audited
public static class SimpleEntity {
@Id
private Integer id;
String name;
public SimpleEntity() {
}
public SimpleEntity(Integer id, String name) {
this.id = id;
this.name = name;
}
public Integer getId() {
return id;
}
private void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
}

View File

@ -0,0 +1,73 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.testing.boot;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.hibernate.boot.registry.classloading.internal.ClassLoaderServiceImpl;
/**
* @author Steve Ebersole
*/
public class ExtraJavaServicesClassLoaderService extends ClassLoaderServiceImpl {
private final List<JavaServiceDescriptor<?>> extraJavaServices;
public ExtraJavaServicesClassLoaderService(List<JavaServiceDescriptor<?>> extraJavaServices) {
this.extraJavaServices = extraJavaServices;
}
@Override
public <S> Collection<S> loadJavaServices(Class<S> serviceContract) {
final Collection<S> baseServices = super.loadJavaServices( serviceContract );
final List<S> services = new ArrayList<>( baseServices );
applyExtraJavaServices( serviceContract, services );
return services;
}
private <S> void applyExtraJavaServices(Class<S> serviceContract, List<S> services) {
extraJavaServices.forEach(
(javaServiceDescriptor) -> {
if ( serviceContract.isAssignableFrom( javaServiceDescriptor.role ) ) {
try {
final Object serviceInstance = javaServiceDescriptor.impl.getDeclaredConstructor().newInstance();
//noinspection unchecked
services.add( (S) serviceInstance );
}
catch (NoSuchMethodException | IllegalAccessException e) {
throw new RuntimeException( "Unable to access constructor for specified 'extra' Java service : " + javaServiceDescriptor.impl.getName(), e );
}
catch (InstantiationException | InvocationTargetException e) {
throw new RuntimeException( "Unable to instantiate specified 'extra' Java service : " + javaServiceDescriptor.impl.getName(), e );
}
}
}
);
}
public static class JavaServiceDescriptor<ROLE> {
private final Class<ROLE> role;
private final Class<? extends ROLE> impl;
public JavaServiceDescriptor(Class<ROLE> role, Class<? extends ROLE> impl) {
this.role = role;
this.impl = impl;
}
public Class<ROLE> getRole() {
return role;
}
public Class<? extends ROLE> getImpl() {
return impl;
}
}
}

View File

@ -85,4 +85,9 @@ public class MetadataBuildingContextTestingImpl implements MetadataBuildingConte
public TypeDefinitionRegistryStandardImpl getTypeDefinitionRegistry() { public TypeDefinitionRegistryStandardImpl getTypeDefinitionRegistry() {
return typeDefinitionRegistry; return typeDefinitionRegistry;
} }
@Override
public String getCurrentContributorName() {
return "orm";
}
} }

View File

@ -0,0 +1,40 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.testing.hamcrest;
import java.util.Locale;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
import org.hamcrest.TypeSafeMatcher;
/**
* @author Steve Ebersole
*/
public class CaseInsensitiveContainsMatcher extends TypeSafeMatcher<String> {
private final String match;
public CaseInsensitiveContainsMatcher(String match) {
this.match = match.toLowerCase( Locale.ROOT );
}
public static Matcher<String> contains(String expected) {
expected = expected.toLowerCase( Locale.ROOT );
return new CaseInsensitiveContainsMatcher( expected );
}
@Override
protected boolean matchesSafely(String string) {
final String normalized = string.toLowerCase( Locale.ROOT ).trim();
return normalized.contains( match );
}
@Override
public void describeTo(Description description) {
description.appendText( "contains (case insensitive)" ).appendValue( match );
}
}

View File

@ -0,0 +1,39 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.testing.hamcrest;
import java.util.Locale;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
import org.hamcrest.TypeSafeMatcher;
/**
* @author Steve Ebersole
*/
public class CaseInsensitiveStartsWithMatcher extends TypeSafeMatcher<String> {
private final String match;
public CaseInsensitiveStartsWithMatcher(String match) {
this.match = match.toLowerCase( Locale.ROOT );
}
public static Matcher<String> startsWith(String expected) {
return new CaseInsensitiveStartsWithMatcher( expected );
}
@Override
protected boolean matchesSafely(String string) {
final String normalized = string.toLowerCase( Locale.ROOT ).trim();
return normalized.startsWith( match );
}
@Override
public void describeTo(Description description) {
description.appendText( "starts with (case insensitive)" ).appendValue( match );
}
}

View File

@ -0,0 +1,53 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.testing.hamcrest;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import org.hamcrest.BaseMatcher;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
/**
* @author Steve Ebersole
*/
public class CollectionElementMatcher<E,C extends Collection<E>> extends BaseMatcher<C> {
public static <T> Matcher<Collection<T>> hasAllOf(Matcher<T>... elementMatchers) {
return new CollectionElementMatcher<>( elementMatchers );
}
private final List<Matcher<E>> elementMatchers;
public CollectionElementMatcher(Matcher<E>... elementMatchers) {
this.elementMatchers = Arrays.asList( elementMatchers );
}
@Override
public boolean matches(Object o) {
assert o instanceof Collection;
final Collection collection = (Collection) o;
outer: for ( Matcher<E> valueMatcher : elementMatchers ) {
for ( Object value : collection ) {
if ( valueMatcher.matches( value ) ) {
continue outer;
}
}
return false;
}
return true;
}
@Override
public void describeTo(Description description) {
description.appendText( "contained" );
}
}

View File

@ -6,6 +6,7 @@
*/ */
package org.hibernate.testing.junit5; package org.hibernate.testing.junit5;
import java.util.Set;
import java.util.function.Consumer; import java.util.function.Consumer;
import java.util.function.Function; import java.util.function.Function;
import javax.persistence.SharedCacheMode; import javax.persistence.SharedCacheMode;
@ -24,6 +25,7 @@ import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.mapping.RootClass; import org.hibernate.mapping.RootClass;
import org.hibernate.tool.schema.Action; import org.hibernate.tool.schema.Action;
import org.hibernate.tool.schema.spi.SchemaManagementToolCoordinator; import org.hibernate.tool.schema.spi.SchemaManagementToolCoordinator;
import org.hibernate.tool.schema.spi.SchemaManagementToolCoordinator.ActionGrouping;
import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.AfterEach;
@ -72,10 +74,11 @@ public abstract class SessionFactoryBasedFunctionalTest
} }
catch (Exception e) { catch (Exception e) {
StandardServiceRegistryBuilder.destroy( ssr ); StandardServiceRegistryBuilder.destroy( ssr );
SchemaManagementToolCoordinator.ActionGrouping actions = SchemaManagementToolCoordinator.ActionGrouping.interpret( if ( exportSchema() && metadata != null ) {
ssrBuilder.getSettings() ); final Set<ActionGrouping> groupings = ActionGrouping.interpret( metadata, ssrBuilder.getSettings() );
if ( ( exportSchema() || actions.getDatabaseAction() != Action.NONE ) && metadata != null ) { if ( ! groupings.isEmpty() ) {
dropDatabase( ); dropDatabase();
}
} }
throw e; throw e;
} }

View File

@ -0,0 +1,39 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.testing.orm;
import java.util.ArrayList;
import org.hibernate.tool.schema.internal.exec.GenerationTarget;
/**
* @author Steve Ebersole
*/
public class JournalingGenerationTarget implements GenerationTarget {
private final ArrayList<String> commands = new ArrayList<>();
@Override
public void prepare() {
}
@Override
public void accept(String command) {
commands.add( command );
}
public ArrayList<String> getCommands() {
return commands;
}
@Override
public void release() {
}
public void clear() {
commands.clear();
}
}

View File

@ -0,0 +1,42 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.testing.orm.junit;
import java.lang.annotation.ElementType;
import java.lang.annotation.Inherited;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import org.hibernate.integrator.spi.Integrator;
/**
* Used to define the bootstrap ServiceRegistry to be used for testing.
*/
@Inherited
@Target( ElementType.TYPE )
@Retention( RetentionPolicy.RUNTIME )
@ServiceRegistryFunctionalTesting
public @interface BootstrapServiceRegistry {
Class<? extends Integrator>[] integrators() default {};
JavaService[] javaServices() default {};
@interface JavaService {
/**
* Logically `?` is `T`
*/
Class<?> role();
/**
* Logically `?` is `S extends T`
*/
Class<?> impl();
}
}

View File

@ -0,0 +1,17 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.testing.orm.junit;
import org.hibernate.boot.registry.BootstrapServiceRegistry;
import org.hibernate.boot.registry.BootstrapServiceRegistryBuilder;
/**
* Producer of BootstrapServiceRegistry
*/
public interface BootstrapServiceRegistryProducer {
BootstrapServiceRegistry produceServiceRegistry(BootstrapServiceRegistryBuilder builder);
}

View File

@ -11,6 +11,7 @@ import java.lang.reflect.InvocationTargetException;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.Optional; import java.util.Optional;
import java.util.Set;
import java.util.function.Consumer; import java.util.function.Consumer;
import java.util.function.Function; import java.util.function.Function;
import javax.persistence.EntityManager; import javax.persistence.EntityManager;
@ -209,10 +210,8 @@ public class EntityManagerFactoryExtension
MetadataImplementor model) { MetadataImplementor model) {
final Map<String, Object> baseProperties = sessionFactory.getProperties(); final Map<String, Object> baseProperties = sessionFactory.getProperties();
final ActionGrouping actions = ActionGrouping.interpret( baseProperties ); final Set<ActionGrouping> groupings = ActionGrouping.interpret( model, baseProperties );
if ( ! groupings.isEmpty() ) {
// if there are explicit setting for auto schema tooling then skip the annotation
if ( actions.getDatabaseAction() != Action.NONE || actions.getScriptAction() != Action.NONE ) {
// the properties contained explicit settings for auto schema tooling - skip the annotation // the properties contained explicit settings for auto schema tooling - skip the annotation
return; return;
} }

View File

@ -14,7 +14,6 @@ import java.lang.annotation.Target;
import org.hibernate.boot.registry.StandardServiceInitiator; import org.hibernate.boot.registry.StandardServiceInitiator;
import org.hibernate.engine.spi.SessionFactoryImplementor; import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.integrator.spi.Integrator;
import org.hibernate.service.spi.ServiceContributor; import org.hibernate.service.spi.ServiceContributor;
/** /**
@ -67,18 +66,11 @@ import org.hibernate.service.spi.ServiceContributor;
@Retention( RetentionPolicy.RUNTIME ) @Retention( RetentionPolicy.RUNTIME )
@ServiceRegistryFunctionalTesting @ServiceRegistryFunctionalTesting
//@TestInstance( TestInstance.Lifecycle.PER_CLASS )
//
//@ExtendWith( FailureExpectedExtension.class )
//@ExtendWith( ServiceRegistryExtension.class )
//@ExtendWith( ServiceRegistryParameterResolver.class )
public @interface ServiceRegistry { public @interface ServiceRegistry {
Class<? extends ServiceContributor>[] serviceContributors() default {}; Class<? extends ServiceContributor>[] serviceContributors() default {};
Class<? extends StandardServiceInitiator>[] initiators() default {}; Class<? extends StandardServiceInitiator>[] initiators() default {};
Class<? extends Integrator>[] integrators() default {};
Service[] services() default {}; Service[] services() default {};
Setting[] settings() default {}; Setting[] settings() default {};

View File

@ -6,6 +6,8 @@
*/ */
package org.hibernate.testing.orm.junit; package org.hibernate.testing.orm.junit;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
@ -17,6 +19,8 @@ import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.integrator.spi.Integrator; import org.hibernate.integrator.spi.Integrator;
import org.hibernate.service.spi.ServiceContributor; import org.hibernate.service.spi.ServiceContributor;
import org.hibernate.testing.boot.ExtraJavaServicesClassLoaderService;
import org.hibernate.testing.boot.ExtraJavaServicesClassLoaderService.JavaServiceDescriptor;
import org.junit.jupiter.api.extension.AfterAllCallback; import org.junit.jupiter.api.extension.AfterAllCallback;
import org.junit.jupiter.api.extension.ExtensionContext; import org.junit.jupiter.api.extension.ExtensionContext;
import org.junit.jupiter.api.extension.TestExecutionExceptionHandler; import org.junit.jupiter.api.extension.TestExecutionExceptionHandler;
@ -61,35 +65,53 @@ public class ServiceRegistryExtension
final ServiceRegistryScopeImpl scope = new ServiceRegistryScopeImpl( ); final ServiceRegistryScopeImpl scope = new ServiceRegistryScopeImpl( );
log.debugf( "Creating ServiceRegistryScope - %s", context.getDisplayName() ); log.debugf( "Creating ServiceRegistryScope - %s", context.getDisplayName() );
final ServiceRegistryProducer producer; final BootstrapServiceRegistryProducer bsrProducer;
if ( testInstance instanceof ServiceRegistryProducer ) { final Optional<BootstrapServiceRegistry> bsrAnnWrapper = AnnotationSupport.findAnnotation(
producer = (ServiceRegistryProducer) testInstance; context.getElement().get(),
BootstrapServiceRegistry.class
);
if ( bsrAnnWrapper.isPresent() ) {
bsrProducer = bsrBuilder -> {
final BootstrapServiceRegistry bsrAnn = bsrAnnWrapper.get();
configureJavaServices( bsrAnn, bsrBuilder );
configureIntegrators( bsrAnn, bsrBuilder );
return bsrBuilder.enableAutoClose().build();
};
} }
else { else {
producer = ssrb -> { bsrProducer = BootstrapServiceRegistryBuilder::build;
}
final ServiceRegistryProducer ssrProducer;
if ( testInstance instanceof ServiceRegistryProducer ) {
ssrProducer = (ServiceRegistryProducer) testInstance;
}
else {
ssrProducer = ssrb -> {
if ( !context.getElement().isPresent() ) { if ( !context.getElement().isPresent() ) {
throw new RuntimeException( "Unable to determine how to handle given ExtensionContext : " + context.getDisplayName() ); throw new RuntimeException( "Unable to determine how to handle given ExtensionContext : " + context.getDisplayName() );
} }
final Optional<ServiceRegistry> serviceRegistryAnnWrapper = AnnotationSupport.findAnnotation(
final Optional<ServiceRegistry> ssrAnnWrapper = AnnotationSupport.findAnnotation(
context.getElement().get(), context.getElement().get(),
ServiceRegistry.class ServiceRegistry.class
); );
if ( serviceRegistryAnnWrapper.isPresent() ) { if ( ssrAnnWrapper.isPresent() ) {
final ServiceRegistry serviceRegistryAnn = serviceRegistryAnnWrapper.get(); final ServiceRegistry serviceRegistryAnn = ssrAnnWrapper.get();
configureServices( serviceRegistryAnn, ssrb ); configureServices( serviceRegistryAnn, ssrb );
configureIntegrators(serviceRegistryAnn, scope);
} }
return ssrb.build(); return ssrb.build();
}; };
} }
scope.createRegistry( bsrProducer, ssrProducer );
scope.createRegistry(producer);
locateExtensionStore( testInstance, context ).put( REGISTRY_KEY, scope ); locateExtensionStore( testInstance, context ).put( REGISTRY_KEY, scope );
@ -103,11 +125,52 @@ public class ServiceRegistryExtension
} }
private static void configureIntegrators( private static void configureIntegrators(
ServiceRegistry serviceRegistryAnn, BootstrapServiceRegistry bsrAnn,
final ServiceRegistryScopeImpl serviceRegistryScope) { final BootstrapServiceRegistryBuilder bsrBuilder) {
for ( Class<? extends Integrator> integrator : serviceRegistryAnn.integrators() ) { final Class<? extends Integrator>[] integrators = bsrAnn.integrators();
serviceRegistryScope.applyIntegrator( integrator ); if ( integrators.length == 0 ) {
return;
} }
for ( Class<? extends Integrator> integratorImpl : integrators ) {
assert integratorImpl != null;
try {
final Constructor<? extends Integrator> constructor = integratorImpl.getDeclaredConstructor();
final Integrator integrator = constructor.newInstance();
bsrBuilder.applyIntegrator( integrator );
}
catch (NoSuchMethodException e) {
throw new IllegalArgumentException( "Could not find no-arg constructor for Integrator : " + integratorImpl.getName(), e );
}
catch (IllegalAccessException e) {
throw new IllegalArgumentException( "Unable to access no-arg constructor for Integrator : " + integratorImpl.getName(), e );
}
catch (InstantiationException | InvocationTargetException e) {
throw new IllegalArgumentException( "Unable to instantiate Integrator : " + integratorImpl.getName(), e );
}
}
}
private static void configureJavaServices(BootstrapServiceRegistry bsrAnn, BootstrapServiceRegistryBuilder bsrBuilder) {
final BootstrapServiceRegistry.JavaService[] javaServiceAnns = bsrAnn.javaServices();
if ( javaServiceAnns.length == 0 ) {
return;
}
final List<JavaServiceDescriptor<?>> javaServiceDescriptors = new ArrayList<>( javaServiceAnns.length );
for ( int i = 0; i < javaServiceAnns.length; i++ ) {
final BootstrapServiceRegistry.JavaService javaServiceAnn = javaServiceAnns[ i ];
javaServiceDescriptors.add(
new JavaServiceDescriptor(
javaServiceAnn.role(),
javaServiceAnn.impl()
)
);
}
final ExtraJavaServicesClassLoaderService cls = new ExtraJavaServicesClassLoaderService( javaServiceDescriptors );
bsrBuilder.applyClassLoaderService( cls );
} }
private static void configureServices(ServiceRegistry serviceRegistryAnn, StandardServiceRegistryBuilder ssrb) { private static void configureServices(ServiceRegistry serviceRegistryAnn, StandardServiceRegistryBuilder ssrb) {
@ -171,36 +234,30 @@ public class ServiceRegistryExtension
} }
private static class ServiceRegistryScopeImpl implements ServiceRegistryScope, ExtensionContext.Store.CloseableResource { private static class ServiceRegistryScopeImpl implements ServiceRegistryScope, ExtensionContext.Store.CloseableResource {
private ServiceRegistryProducer producer; private BootstrapServiceRegistryProducer bsrProducer;
private ServiceRegistryProducer ssrProducer;
private StandardServiceRegistry registry; private StandardServiceRegistry registry;
private boolean active = true; private boolean active = true;
private List<Class<? extends Integrator>> integrators = new ArrayList<>();
public ServiceRegistryScopeImpl() { public ServiceRegistryScopeImpl() {
} }
public StandardServiceRegistry createRegistry(ServiceRegistryProducer producer) { public StandardServiceRegistry createRegistry(BootstrapServiceRegistryProducer bsrProducer, ServiceRegistryProducer ssrProducer) {
this.producer = producer; this.bsrProducer = bsrProducer;
verifyActive(); this.ssrProducer = ssrProducer;
BootstrapServiceRegistryBuilder bootstrapServiceRegistryBuilder = new BootstrapServiceRegistryBuilder().enableAutoClose();
integrators.forEach(
integrator -> {
try {
bootstrapServiceRegistryBuilder.applyIntegrator( integrator.newInstance() );
}
catch (Exception e) {
throw new RuntimeException( "Could not configure BootstrapServiceRegistryBuilder", e );
}
}
);
final StandardServiceRegistryBuilder ssrb = new StandardServiceRegistryBuilder(bootstrapServiceRegistryBuilder.build()); verifyActive();
BootstrapServiceRegistryBuilder bsrb = new BootstrapServiceRegistryBuilder().enableAutoClose();
final org.hibernate.boot.registry.BootstrapServiceRegistry bsr = bsrProducer.produceServiceRegistry( bsrb );
final StandardServiceRegistryBuilder ssrb = new StandardServiceRegistryBuilder( bsr );
// we will close it ourselves explicitly. // we will close it ourselves explicitly.
ssrb.disableAutoClose(); ssrb.disableAutoClose();
return producer.produceServiceRegistry( ssrb ); return ssrProducer.produceServiceRegistry( ssrb );
} }
private void verifyActive() { private void verifyActive() {
@ -209,16 +266,12 @@ public class ServiceRegistryExtension
} }
} }
public void applyIntegrator(Class<? extends Integrator> integrator) {
integrators.add( integrator );
}
@Override @Override
public StandardServiceRegistry getRegistry() { public StandardServiceRegistry getRegistry() {
verifyActive(); verifyActive();
if ( registry == null ) { if ( registry == null ) {
registry = createRegistry( producer ); registry = createRegistry( bsrProducer, ssrProducer );
} }
return registry; return registry;

View File

@ -9,6 +9,7 @@ package org.hibernate.testing.orm.junit;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.Optional; import java.util.Optional;
import java.util.Set;
import java.util.function.Consumer; import java.util.function.Consumer;
import java.util.function.Function; import java.util.function.Function;
@ -20,17 +21,14 @@ import org.hibernate.boot.SessionFactoryBuilder;
import org.hibernate.boot.registry.StandardServiceRegistry; import org.hibernate.boot.registry.StandardServiceRegistry;
import org.hibernate.boot.spi.MetadataImplementor; import org.hibernate.boot.spi.MetadataImplementor;
import org.hibernate.cfg.AvailableSettings; import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.H2Dialect;
import org.hibernate.engine.spi.SessionFactoryImplementor; import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SessionImplementor; import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.internal.util.StringHelper; import org.hibernate.internal.util.StringHelper;
import org.hibernate.resource.jdbc.spi.StatementInspector; import org.hibernate.resource.jdbc.spi.StatementInspector;
import org.hibernate.resource.transaction.spi.TransactionStatus;
import org.hibernate.tool.schema.Action; import org.hibernate.tool.schema.Action;
import org.hibernate.tool.schema.spi.SchemaManagementToolCoordinator; import org.hibernate.tool.schema.spi.SchemaManagementToolCoordinator;
import org.hibernate.tool.schema.spi.SchemaManagementToolCoordinator.ActionGrouping; import org.hibernate.tool.schema.spi.SchemaManagementToolCoordinator.ActionGrouping;
import org.hibernate.testing.junit4.Helper;
import org.junit.jupiter.api.extension.AfterAllCallback; import org.junit.jupiter.api.extension.AfterAllCallback;
import org.junit.jupiter.api.extension.ExtensionContext; import org.junit.jupiter.api.extension.ExtensionContext;
import org.junit.jupiter.api.extension.TestExecutionExceptionHandler; import org.junit.jupiter.api.extension.TestExecutionExceptionHandler;
@ -146,10 +144,10 @@ public class SessionFactoryExtension
boolean createSecondarySchemas) { boolean createSecondarySchemas) {
final Map<String, Object> baseProperties = sessionFactory.getProperties(); final Map<String, Object> baseProperties = sessionFactory.getProperties();
final ActionGrouping actions = ActionGrouping.interpret( baseProperties ); final Set<ActionGrouping> groupings = ActionGrouping.interpret( model, baseProperties );
// if there are explicit setting for auto schema tooling then skip the annotation // if there are explicit setting for auto schema tooling then skip the annotation
if ( actions.getDatabaseAction() != Action.NONE || actions.getScriptAction() != Action.NONE ) { if ( ! groupings.isEmpty() ) {
// the properties contained explicit settings for auto schema tooling - skip the annotation // the properties contained explicit settings for auto schema tooling - skip the annotation
return; return;
} }