HHH-9876 - Ability to filter objects from Database for schema tooling.

This commit is contained in:
Marcus 2015-07-19 13:43:20 +02:00 committed by Steve Ebersole
parent d71f931429
commit 2fdbeb8db6
18 changed files with 549 additions and 19 deletions

View File

@ -768,8 +768,15 @@ public interface AvailableSettings {
*/
String HBM2DLL_CREATE_NAMESPACES = "hibernate.hbm2dll.create_namespaces";
/**
* Used to specify the {@link org.hibernate.tool.schema.spi.SchemaFilterProvider} to be used by
* create, drop, migrate and validate operations on the database schema. SchemaFilterProvider
* provides filters that can be used to limit the scope of these operations to specific namespaces,
* tables and sequences. All objects are included by default.
*
* @since 5.1
*/
String SCHEMA_FILTER_PROVIDER = "hibernate.schema.filter.provider";
/**
* The EntityMode in which set the Session opened from the SessionFactory.
@ -960,7 +967,7 @@ public interface AvailableSettings {
* @since 5.0
*/
String EXTRA_PHYSICAL_TABLE_TYPES = "hibernate.hbm2dll.extra_physical_table_types";
/**
* Unique columns and unique keys both use unique constraints in most dialects.
* SchemaUpdate needs to create these constraints, but DB's
@ -1012,7 +1019,7 @@ public interface AvailableSettings {
/*
* Enable instantiation of composite/embedded objects when all of its attribute values are {@code null}.
* The default (and historical) behavior is that a {@code null} reference will be used to represent the
* The default (and historical) behavior is that a {@code null} reference will be used to represent the
* composite when all of its attributes are {@code null}
*
* @since 5.1

View File

@ -0,0 +1,25 @@
package org.hibernate.tool.schema.internal;
import org.hibernate.boot.model.relational.Namespace;
import org.hibernate.boot.model.relational.Sequence;
import org.hibernate.mapping.Table;
import org.hibernate.tool.schema.spi.SchemaFilter;
public class DefaultSchemaFilter implements SchemaFilter {
public static final DefaultSchemaFilter INSTANCE = new DefaultSchemaFilter();
@Override
public boolean includeNamespace( Namespace namespace ) {
return true;
}
@Override
public boolean includeTable( Table table ) {
return true;
}
@Override
public boolean includeSequence( Sequence sequence ) {
return true;
}
}

View File

@ -0,0 +1,29 @@
package org.hibernate.tool.schema.internal;
import org.hibernate.tool.schema.spi.SchemaFilter;
import org.hibernate.tool.schema.spi.SchemaFilterProvider;
public class DefaultSchemaFilterProvider implements SchemaFilterProvider {
public static final DefaultSchemaFilterProvider INSTANCE = new DefaultSchemaFilterProvider();
@Override
public SchemaFilter getCreateFilter() {
return DefaultSchemaFilter.INSTANCE;
}
@Override
public SchemaFilter getDropFilter() {
return DefaultSchemaFilter.INSTANCE;
}
@Override
public SchemaFilter getMigrateFilter() {
return DefaultSchemaFilter.INSTANCE;
}
@Override
public SchemaFilter getValidateFilter() {
return DefaultSchemaFilter.INSTANCE;
}
}

View File

@ -8,6 +8,8 @@ package org.hibernate.tool.schema.internal;
import java.util.Map;
import org.hibernate.boot.registry.selector.spi.StrategySelector;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.service.ServiceRegistry;
@ -15,6 +17,8 @@ import org.hibernate.service.spi.ServiceRegistryAwareService;
import org.hibernate.service.spi.ServiceRegistryImplementor;
import org.hibernate.tool.schema.spi.SchemaCreator;
import org.hibernate.tool.schema.spi.SchemaDropper;
import org.hibernate.tool.schema.spi.SchemaFilter;
import org.hibernate.tool.schema.spi.SchemaFilterProvider;
import org.hibernate.tool.schema.spi.SchemaManagementTool;
import org.hibernate.tool.schema.spi.SchemaMigrator;
import org.hibernate.tool.schema.spi.SchemaValidator;
@ -29,25 +33,34 @@ public class HibernateSchemaManagementTool implements SchemaManagementTool, Serv
@Override
public SchemaCreator getSchemaCreator(Map options) {
return new SchemaCreatorImpl();
return new SchemaCreatorImpl( getSchemaFilterProvider( options ).getCreateFilter() );
}
@Override
public SchemaDropper getSchemaDropper(Map options) {
return new SchemaDropperImpl();
return new SchemaDropperImpl( getSchemaFilterProvider( options ).getDropFilter() );
}
@Override
public SchemaMigrator getSchemaMigrator(Map options) {
return new SchemaMigratorImpl();
return new SchemaMigratorImpl( getSchemaFilterProvider( options ).getMigrateFilter() );
}
@Override
public SchemaValidator getSchemaValidator(Map options) {
final Dialect dialect = serviceRegistry.getService( JdbcServices.class ).getDialect();
return new SchemaValidatorImpl(dialect);
return new SchemaValidatorImpl( getSchemaFilterProvider( options ).getValidateFilter(), dialect );
}
private SchemaFilterProvider getSchemaFilterProvider(Map options) {
return serviceRegistry.getService( StrategySelector.class )
.resolveDefaultableStrategy(
SchemaFilterProvider.class,
options.get( AvailableSettings.SCHEMA_FILTER_PROVIDER ),
DefaultSchemaFilterProvider.INSTANCE
);
}
@Override
public void injectServices(ServiceRegistryImplementor serviceRegistry) {
this.serviceRegistry = serviceRegistry;

View File

@ -27,6 +27,7 @@ import org.hibernate.mapping.Index;
import org.hibernate.mapping.Table;
import org.hibernate.mapping.UniqueKey;
import org.hibernate.tool.schema.spi.SchemaCreator;
import org.hibernate.tool.schema.spi.SchemaFilter;
import org.hibernate.tool.schema.spi.SchemaManagementException;
import org.hibernate.tool.schema.spi.Target;
@ -38,6 +39,16 @@ import org.hibernate.tool.schema.spi.Target;
*/
public class SchemaCreatorImpl implements SchemaCreator {
private final SchemaFilter filter;
public SchemaCreatorImpl( SchemaFilter filter ) {
this.filter = filter;
}
public SchemaCreatorImpl() {
this( DefaultSchemaFilter.INSTANCE );
}
@Override
public void doCreation(Metadata metadata, boolean createNamespaces, List<Target> targets) throws SchemaManagementException {
doCreation( metadata, createNamespaces, targets.toArray( new Target[ targets.size() ] ) );
@ -161,6 +172,10 @@ public class SchemaCreatorImpl implements SchemaCreator {
Set<Identifier> exportedCatalogs = new HashSet<Identifier>();
for ( Namespace namespace : database.getNamespaces() ) {
if ( !filter.includeNamespace( namespace ) ) {
continue;
}
if ( tryToCreateCatalogs ) {
final Identifier catalogLogicalName = namespace.getName().getCatalog();
final Identifier catalogPhysicalName = namespace.getPhysicalName().getCatalog();
@ -206,8 +221,15 @@ public class SchemaCreatorImpl implements SchemaCreator {
// then, create all schema objects (tables, sequences, constraints, etc) in each schema
for ( Namespace namespace : database.getNamespaces() ) {
if ( !filter.includeNamespace( namespace ) ) {
continue;
}
// sequences
for ( Sequence sequence : namespace.getSequences() ) {
if ( !filter.includeSequence( sequence ) ) {
continue;
}
checkExportIdentifier( sequence, exportIdentifiers );
applySqlStrings(
targets,
@ -221,7 +243,10 @@ public class SchemaCreatorImpl implements SchemaCreator {
// tables
for ( Table table : namespace.getTables() ) {
if( !table.isPhysicalTable() ){
if ( !table.isPhysicalTable() ){
continue;
}
if ( !filter.includeTable( table ) ) {
continue;
}
checkExportIdentifier( table, exportIdentifiers );

View File

@ -25,6 +25,7 @@ import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
import org.hibernate.mapping.ForeignKey;
import org.hibernate.mapping.Table;
import org.hibernate.tool.schema.spi.SchemaDropper;
import org.hibernate.tool.schema.spi.SchemaFilter;
import org.hibernate.tool.schema.spi.SchemaManagementException;
import org.hibernate.tool.schema.spi.Target;
@ -36,6 +37,16 @@ import org.hibernate.tool.schema.spi.Target;
*/
public class SchemaDropperImpl implements SchemaDropper {
private final SchemaFilter filter;
public SchemaDropperImpl( SchemaFilter filter ) {
this.filter = filter;
}
public SchemaDropperImpl() {
this( DefaultSchemaFilter.INSTANCE );
}
/**
* Intended for use from JPA schema export code.
*
@ -141,6 +152,11 @@ public class SchemaDropperImpl implements SchemaDropper {
}
for ( Namespace namespace : database.getNamespaces() ) {
if ( !filter.includeNamespace( namespace ) ) {
continue;
}
// we need to drop all constraints/indexes prior to dropping the tables
applyConstraintDropping( targets, namespace, metadata );
@ -149,6 +165,9 @@ public class SchemaDropperImpl implements SchemaDropper {
if ( !table.isPhysicalTable() ) {
continue;
}
if ( !filter.includeTable( table ) ) {
continue;
}
checkExportIdentifier( table, exportIdentifiers );
applySqlStrings( targets, dialect.getTableExporter().getSqlDropStrings( table, metadata ) );
}
@ -177,6 +196,11 @@ public class SchemaDropperImpl implements SchemaDropper {
Set<Identifier> exportedCatalogs = new HashSet<Identifier>();
for ( Namespace namespace : database.getNamespaces() ) {
if ( !filter.includeNamespace( namespace ) ) {
continue;
}
if ( tryToDropSchemas && namespace.getPhysicalName().getSchema() != null ) {
applySqlStrings(
targets, dialect.getDropSchemaCommand(
@ -217,6 +241,9 @@ public class SchemaDropperImpl implements SchemaDropper {
if ( !table.isPhysicalTable() ) {
continue;
}
if ( !filter.includeTable( table ) ) {
continue;
}
final Iterator fks = table.getForeignKeyIterator();
while ( fks.hasNext() ) {

View File

@ -37,6 +37,7 @@ import org.hibernate.tool.schema.extract.spi.IndexInformation;
import org.hibernate.tool.schema.extract.spi.SequenceInformation;
import org.hibernate.tool.schema.extract.spi.TableInformation;
import org.hibernate.tool.schema.spi.Exporter;
import org.hibernate.tool.schema.spi.SchemaFilter;
import org.hibernate.tool.schema.spi.SchemaManagementException;
import org.hibernate.tool.schema.spi.SchemaMigrator;
import org.hibernate.tool.schema.spi.Target;
@ -46,6 +47,17 @@ import org.hibernate.tool.schema.spi.Target;
* @author Steve Ebersole
*/
public class SchemaMigratorImpl implements SchemaMigrator {
private final SchemaFilter filter;
public SchemaMigratorImpl( SchemaFilter filter ) {
this.filter = filter;
}
public SchemaMigratorImpl() {
this( DefaultSchemaFilter.INSTANCE );
}
@Override
public void doMigration(
Metadata metadata,
@ -119,6 +131,9 @@ public class SchemaMigratorImpl implements SchemaMigrator {
Set<Identifier> exportedCatalogs = new HashSet<Identifier>();
for ( Namespace namespace : database.getNamespaces() ) {
if ( !filter.includeNamespace( namespace ) ) {
continue;
}
if ( tryToCreateCatalogs || tryToCreateSchemas ) {
if ( tryToCreateCatalogs ) {
final Identifier catalogLogicalName = namespace.getName().getCatalog();
@ -158,6 +173,9 @@ public class SchemaMigratorImpl implements SchemaMigrator {
if ( !table.isPhysicalTable() ) {
continue;
}
if ( !filter.includeTable( table ) ) {
continue;
}
checkExportIdentifier( table, exportIdentifiers );
final TableInformation tableInformation = existingDatabase.getTableInformation( table.getQualifiedTableName() );
if ( tableInformation != null && !tableInformation.isPhysicalTable() ) {
@ -175,6 +193,9 @@ public class SchemaMigratorImpl implements SchemaMigrator {
if ( !table.isPhysicalTable() ) {
continue;
}
if ( !filter.includeTable( table ) ) {
continue;
}
final TableInformation tableInformation = existingDatabase.getTableInformation( table.getQualifiedTableName() );
if ( tableInformation != null && !tableInformation.isPhysicalTable() ) {

View File

@ -21,6 +21,7 @@ import org.hibernate.tool.schema.extract.spi.ColumnInformation;
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
import org.hibernate.tool.schema.extract.spi.SequenceInformation;
import org.hibernate.tool.schema.extract.spi.TableInformation;
import org.hibernate.tool.schema.spi.SchemaFilter;
import org.hibernate.tool.schema.spi.SchemaManagementException;
import org.hibernate.tool.schema.spi.SchemaValidator;
import org.hibernate.type.descriptor.JdbcTypeNameMapper;
@ -30,16 +31,25 @@ import org.hibernate.type.descriptor.JdbcTypeNameMapper;
*/
public class SchemaValidatorImpl implements SchemaValidator {
private final SchemaFilter schemaFilter;
private final Dialect dialect;
public SchemaValidatorImpl(Dialect dialect) {
public SchemaValidatorImpl(SchemaFilter schemaFilter, Dialect dialect) {
this.schemaFilter = schemaFilter;
this.dialect = dialect;
}
@Override
public void doValidation(Metadata metadata, DatabaseInformation databaseInformation) {
for ( Namespace namespace : metadata.getDatabase().getNamespaces() ) {
if ( !schemaFilter.includeNamespace( namespace )) {
continue;
}
for ( Table table : namespace.getTables() ) {
if ( !schemaFilter.includeTable( table )) {
continue;
}
if ( !table.isPhysicalTable() ) {
continue;
}
@ -52,7 +62,15 @@ public class SchemaValidatorImpl implements SchemaValidator {
}
for ( Namespace namespace : metadata.getDatabase().getNamespaces() ) {
if ( !schemaFilter.includeNamespace( namespace )) {
continue;
}
for ( Sequence sequence : namespace.getSequences() ) {
if ( !schemaFilter.includeSequence( sequence )) {
continue;
}
final SequenceInformation sequenceInformation = databaseInformation.getSequenceInformation(
sequence.getName()
);

View File

@ -0,0 +1,15 @@
package org.hibernate.tool.schema.spi;
import org.hibernate.boot.model.relational.Namespace;
import org.hibernate.boot.model.relational.Sequence;
import org.hibernate.mapping.Table;
public interface SchemaFilter {
boolean includeNamespace( Namespace namespace );
boolean includeTable( Table table );
boolean includeSequence( Sequence sequence );
}

View File

@ -0,0 +1,20 @@
package org.hibernate.tool.schema.spi;
/**
* Used to specify the {@link org.hibernate.tool.schema.spi.SchemaFilter}s to be used by create, drop, migrate and validate
* operations on the database schema. These filters can be used to limit the scope of operations to specific namespaces,
* tables and sequences.
*
* @since 5.1
*/
public interface SchemaFilterProvider {
SchemaFilter getCreateFilter();
SchemaFilter getDropFilter();
SchemaFilter getMigrateFilter();
SchemaFilter getValidateFilter();
}

View File

@ -6,6 +6,11 @@
*/
package org.hibernate.test.quote;
import static org.junit.Assert.fail;
import java.util.Collections;
import java.util.Map;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
@ -20,20 +25,17 @@ import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.boot.spi.MetadataImplementor;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.engine.jdbc.connections.spi.ConnectionProvider;
import org.hibernate.testing.TestForIssue;
import org.hibernate.testing.boot.JdbcConnectionAccessImpl;
import org.hibernate.testing.junit4.BaseUnitTestCase;
import org.hibernate.tool.hbm2ddl.SchemaValidator;
import org.hibernate.tool.schema.internal.TargetDatabaseImpl;
import org.hibernate.tool.schema.spi.SchemaManagementTool;
import org.hibernate.tool.schema.spi.Target;
import org.hibernate.testing.TestForIssue;
import org.hibernate.testing.junit4.BaseUnitTestCase;
import org.hibernate.testing.boot.JdbcConnectionAccessImpl;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.fail;
/**
* @author Steve Ebersole
*/
@ -63,7 +65,8 @@ public class TableGeneratorQuotingTest extends BaseUnitTestCase {
final Target target = new TargetDatabaseImpl( new JdbcConnectionAccessImpl( connectionProvider ) );
final SchemaManagementTool tool = serviceRegistry.getService( SchemaManagementTool.class );
tool.getSchemaCreator( null ).doCreation( metadata, false, target );
Map options = Collections.emptyMap();
tool.getSchemaCreator( options ).doCreation( metadata, false, target );
try {
new SchemaValidator( serviceRegistry, (MetadataImplementor) metadata ).validate();
@ -72,7 +75,7 @@ public class TableGeneratorQuotingTest extends BaseUnitTestCase {
fail( "The identifier generator table should have validated. " + e.getMessage() );
}
finally {
tool.getSchemaDropper( null ).doDrop( metadata, false, target );
tool.getSchemaDropper( options ).doDrop( metadata, false, target );
}
}

View File

@ -0,0 +1,64 @@
package org.hibernate.test.schemafilter;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.hibernate.tool.schema.spi.Target;
class RecordingTarget implements Target {
private final Map<String,Pattern> patterns = new HashMap<>();
private final Map<String,Set<String>> actionsByCategory = new HashMap<>();
public RecordingTarget() {
patterns.put( "schema.create", Pattern.compile( "create schema (.*)" ) );
patterns.put( "schema.drop", Pattern.compile( "drop schema (.*)" ) );
patterns.put( "table.create", Pattern.compile( "create table (\\S+) .*" ) );
patterns.put( "table.drop", Pattern.compile( "drop table (.*)" ) );
}
public Set<String> getActions( String category ) {
Set<String> result = actionsByCategory.get( category );
if ( result == null ) {
result = new HashSet<>();
actionsByCategory.put( category, result );
}
return result;
}
@Override
public void accept( String action ) {
action = action.toLowerCase();
for ( Entry<String,Pattern> entry : patterns.entrySet() ) {
String category = entry.getKey();
Pattern pattern = entry.getValue();
Matcher matcher = pattern.matcher( action );
if ( matcher.matches() ) {
getActions( category ).add( matcher.group( 1 ) );
return;
}
}
}
@Override
public boolean acceptsImportScriptActions() {
return false;
}
@Override
public void prepare() {
// nothing to do
}
@Override
public void release() {
// nothing to do
}
}

View File

@ -0,0 +1,21 @@
package org.hibernate.test.schemafilter;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
@Entity
@Table(name = "the_entity_1", schema = "the_schema_1")
public class Schema1Entity1 {
@Id
private long id;
public long getId() {
return id;
}
public void setId( long id ) {
this.id = id;
}
}

View File

@ -0,0 +1,21 @@
package org.hibernate.test.schemafilter;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
@Entity
@Table(name = "the_entity_2", schema = "the_schema_1")
public class Schema1Entity2 {
@Id
private long id;
public long getId() {
return id;
}
public void setId( long id ) {
this.id = id;
}
}

View File

@ -0,0 +1,21 @@
package org.hibernate.test.schemafilter;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
@Entity
@Table(name = "the_entity_3", schema = "the_schema_2")
public class Schema2Entity3 {
@Id
private long id;
public long getId() {
return id;
}
public void setId( long id ) {
this.id = id;
}
}

View File

@ -0,0 +1,21 @@
package org.hibernate.test.schemafilter;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
@Entity
@Table(name = "the_entity_4", schema = "the_schema_2")
public class Schema2Entity4 {
@Id
private long id;
public long getId() {
return id;
}
public void setId( long id ) {
this.id = id;
}
}

View File

@ -0,0 +1,157 @@
package org.hibernate.test.schemafilter;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.hamcrest.BaseMatcher;
import org.hamcrest.Description;
import org.hibernate.boot.Metadata;
import org.hibernate.boot.MetadataSources;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.boot.model.relational.Namespace;
import org.hibernate.boot.model.relational.Sequence;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.cfg.Environment;
import org.hibernate.dialect.SQLServerDialect;
import org.hibernate.mapping.Table;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.testing.ServiceRegistryBuilder;
import org.hibernate.testing.TestForIssue;
import org.hibernate.testing.junit4.BaseUnitTestCase;
import org.hibernate.tool.schema.internal.DefaultSchemaFilter;
import org.hibernate.tool.schema.internal.SchemaCreatorImpl;
import org.hibernate.tool.schema.internal.SchemaDropperImpl;
import org.hibernate.tool.schema.spi.SchemaCreator;
import org.hibernate.tool.schema.spi.SchemaDropper;
import org.hibernate.tool.schema.spi.SchemaFilter;
import org.junit.Assert;
import org.junit.Test;
@TestForIssue(jiraKey = "HHH-9876")
@SuppressWarnings({ "rawtypes", "unchecked" })
public class SchemaFilterTest extends BaseUnitTestCase {
private final ServiceRegistry serviceRegistry;
private final Metadata metadata;
public SchemaFilterTest() {
Map settings = new HashMap();
settings.putAll( Environment.getProperties() );
settings.put( AvailableSettings.DIALECT, SQLServerDialect.class.getName() );
this.serviceRegistry = ServiceRegistryBuilder.buildServiceRegistry( settings );
MetadataSources ms = new MetadataSources( serviceRegistry );
ms.addAnnotatedClass( SchemaNoneEntity0.class );
ms.addAnnotatedClass( Schema1Entity1.class );
ms.addAnnotatedClass( Schema1Entity2.class );
ms.addAnnotatedClass( Schema2Entity3.class );
ms.addAnnotatedClass( Schema2Entity4.class );
this.metadata = ms.buildMetadata();
}
@Test
public void createSchema_unfiltered() {
RecordingTarget target = doCreation( new DefaultSchemaFilter() );
Assert.assertThat( target.getActions( "schema.create" ), containsExactly( "the_schema_1", "the_schema_2" ));
Assert.assertThat( target.getActions( "table.create" ), containsExactly(
"the_entity_0",
"the_schema_1.the_entity_1",
"the_schema_1.the_entity_2",
"the_schema_2.the_entity_3",
"the_schema_2.the_entity_4"
));
}
@Test
public void createSchema_filtered() {
RecordingTarget target = doCreation( new TestSchemaFilter() );
Assert.assertThat( target.getActions( "schema.create" ), containsExactly( "the_schema_1" ));
Assert.assertThat( target.getActions( "table.create" ), containsExactly( "the_entity_0", "the_schema_1.the_entity_1" ));
}
@Test
public void dropSchema_unfiltered() {
RecordingTarget target = doDrop( new DefaultSchemaFilter() );
Assert.assertThat( target.getActions( "schema.drop" ), containsExactly( "the_schema_1", "the_schema_2" ));
Assert.assertThat( target.getActions( "table.drop" ), containsExactly(
"the_entity_0",
"the_schema_1.the_entity_1",
"the_schema_1.the_entity_2",
"the_schema_2.the_entity_3",
"the_schema_2.the_entity_4"
));
}
@Test
public void dropSchema_filtered() {
RecordingTarget target = doDrop( new TestSchemaFilter() );
Assert.assertThat( target.getActions( "schema.drop" ), containsExactly( "the_schema_1" ));
Assert.assertThat( target.getActions( "table.drop" ), containsExactly( "the_entity_0", "the_schema_1.the_entity_1" ));
}
private RecordingTarget doCreation( SchemaFilter filter ) {
RecordingTarget target = new RecordingTarget();
SchemaCreator creator = new SchemaCreatorImpl( filter );
creator.doCreation( metadata, true, target );
return target;
}
private RecordingTarget doDrop( SchemaFilter filter ) {
RecordingTarget target = new RecordingTarget();
SchemaDropper dropper = new SchemaDropperImpl( filter );
dropper.doDrop( metadata, true, target );
return target;
}
private BaseMatcher<Set<String>> containsExactly( Object... expected ) {
return containsExactly( new HashSet<>( Arrays.asList( expected ) ) );
}
private BaseMatcher<Set<String>> containsExactly( final Set expected ) {
return new BaseMatcher<Set<String>>() {
@Override
public boolean matches( Object item ) {
Set set = (Set) item;
return set.size() == expected.size()
&& set.containsAll( expected );
}
@Override
public void describeTo( Description description ) {
description.appendText( "Is set containing exactly " + expected );
}
};
}
private static class TestSchemaFilter implements SchemaFilter {
@Override
public boolean includeNamespace( Namespace namespace ) {
// exclude schema "the_schema_2"
Identifier identifier = namespace.getName().getSchema();
if ( identifier != null ) {
return !"the_schema_2".equals( identifier.getText() );
}
return true;
}
@Override
public boolean includeTable( Table table ) {
// exclude table "the_entity_2"
return !"the_entity_2".equals( table.getName() );
}
@Override
public boolean includeSequence( Sequence sequence ) {
return true;
}
}
}

View File

@ -0,0 +1,22 @@
package org.hibernate.test.schemafilter;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
@Entity
@Table(name = "the_entity_0")
public class SchemaNoneEntity0 {
@Id
private long id;
public long getId() {
return id;
}
public void setId( long id ) {
this.id = id;
}
}