Merge remote-tracking branch 'upstream/main' into wip/6.0

This commit is contained in:
Andrea Boriero 2021-06-22 09:09:30 +02:00
commit 6ca0be6d99
25 changed files with 730 additions and 26 deletions

View File

@ -884,6 +884,10 @@ For cases where the `javax.persistence.schema-generation.scripts.action` value i
`*javax.persistence.schema-generation.scripts.drop-target*`:: `*javax.persistence.schema-generation.scripts.drop-target*`::
For cases where the `javax.persistence.schema-generation.scripts.action` value indicates that schema dropping commands should be written to DDL script file, `javax.persistence.schema-generation.scripts.drop-target` specifies either a `java.io.Writer` configured for output of the DDL script or a string specifying the file URL for the DDL script. For cases where the `javax.persistence.schema-generation.scripts.action` value indicates that schema dropping commands should be written to DDL script file, `javax.persistence.schema-generation.scripts.drop-target` specifies either a `java.io.Writer` configured for output of the DDL script or a string specifying the file URL for the DDL script.
`*hibernate.hbm2ddl.schema-generation.script.append*` (e.g. `true` (default value) or `false`)::
For cases where the `javax.persistence.schema-generation.scripts.action` value indicates that schema commands should be written to DDL script file, `hibernate.hbm2ddl.schema-generation.script.append` specifies if schema commands should be appended to the end of the file rather than written at the beginning of the file.
Values are `true` for appending schema commands to the end of the file, `false` for writing achema commands at the beginning of the file.
`*javax.persistence.hibernate.hbm2ddl.import_files*` (e.g. `import.sql` (default value)):: `*javax.persistence.hibernate.hbm2ddl.import_files*` (e.g. `import.sql` (default value))::
Comma-separated names of the optional files containing SQL DML statements executed during the `SessionFactory` creation. Comma-separated names of the optional files containing SQL DML statements executed during the `SessionFactory` creation.
File order matters, the statements of a given file are executed before the statements of the following one. File order matters, the statements of a given file are executed before the statements of the following one.

View File

@ -231,6 +231,10 @@ task copyBundleResources (type: Copy) {
// we will end up a test JAR with bundle files that where variables are replaced // we will end up a test JAR with bundle files that where variables are replaced
processTestResources.finalizedBy copyBundleResources processTestResources.finalizedBy copyBundleResources
sourcesJar {
duplicatesStrategy = DuplicatesStrategy.EXCLUDE
}
task testJar(type: Jar, dependsOn: testClasses) { task testJar(type: Jar, dependsOn: testClasses) {
duplicatesStrategy = DuplicatesStrategy.EXCLUDE duplicatesStrategy = DuplicatesStrategy.EXCLUDE
archiveClassifier.set( 'test' ) archiveClassifier.set( 'test' )

View File

@ -1683,6 +1683,16 @@ public interface AvailableSettings extends org.hibernate.jpa.AvailableSettings {
@SuppressWarnings("JavaDoc") @SuppressWarnings("JavaDoc")
String HBM2DDL_SCRIPTS_CREATE_TARGET = "javax.persistence.schema-generation.scripts.create-target"; String HBM2DDL_SCRIPTS_CREATE_TARGET = "javax.persistence.schema-generation.scripts.create-target";
/**
* For cases where the {@value #HBM2DDL_SCRIPTS_ACTION} value indicates that schema commands should
* be written to DDL script file, specifies if schema commands should be appended to the end of the file rather than written at the beginning of the file.
*
* Values are: {@code true} for appending schema commands to the end of the file, {@code false} for writing schema commands at the beginning.
*
* The default value is {@code true}
*/
String HBM2DDL_SCRIPTS_CREATE_APPEND = "hibernate.hbm2ddl.schema-generation.script.append";
/** /**
* For cases where the {@value #HBM2DDL_SCRIPTS_ACTION} value indicates that schema drop commands should * For cases where the {@value #HBM2DDL_SCRIPTS_ACTION} value indicates that schema drop commands should
* be written to DDL script file, {@value #HBM2DDL_SCRIPTS_DROP_TARGET} specifies either a * be written to DDL script file, {@value #HBM2DDL_SCRIPTS_DROP_TARGET} specifies either a

View File

@ -136,6 +136,7 @@ public class SchemaExport {
} }
} }
boolean append = true;
boolean haltOnError = false; boolean haltOnError = false;
boolean format = false; boolean format = false;
boolean manageNamespaces = false; boolean manageNamespaces = false;
@ -160,6 +161,18 @@ public class SchemaExport {
return this; return this;
} }
/**
* For generating a export script file, by default the content will be appended at the begin or end of the file.
*
* The sql will be written at the beginning of the file rather append to the end.
*
* @return this
*/
public SchemaExport setOverrideOutputFileContent() {
append = false;
return this;
}
/** /**
* Comma-separated list of resource names to use for database init commands on create. * Comma-separated list of resource names to use for database init commands on create.
* *
@ -244,7 +257,12 @@ public class SchemaExport {
LOG.runningHbm2ddlSchemaExport(); LOG.runningHbm2ddlSchemaExport();
final TargetDescriptor targetDescriptor = buildTargetDescriptor( targetTypes, outputFile, serviceRegistry ); final TargetDescriptor targetDescriptor = buildTargetDescriptor(
targetTypes,
outputFile,
append,
serviceRegistry
);
doExecution( action, needsJdbcConnection( targetTypes ), metadata, serviceRegistry, targetDescriptor ); doExecution( action, needsJdbcConnection( targetTypes ), metadata, serviceRegistry, targetDescriptor );
} }
@ -318,6 +336,14 @@ public class SchemaExport {
EnumSet<TargetType> targetTypes, EnumSet<TargetType> targetTypes,
String outputFile, String outputFile,
ServiceRegistry serviceRegistry) { ServiceRegistry serviceRegistry) {
return buildTargetDescriptor( targetTypes, outputFile, true, serviceRegistry );
}
public static TargetDescriptor buildTargetDescriptor(
EnumSet<TargetType> targetTypes,
String outputFile,
boolean append,
ServiceRegistry serviceRegistry) {
final ScriptTargetOutput scriptTarget; final ScriptTargetOutput scriptTarget;
if ( targetTypes.contains( TargetType.SCRIPT ) ) { if ( targetTypes.contains( TargetType.SCRIPT ) ) {
if ( outputFile == null ) { if ( outputFile == null ) {
@ -326,7 +352,8 @@ public class SchemaExport {
scriptTarget = Helper.interpretScriptTargetSetting( scriptTarget = Helper.interpretScriptTargetSetting(
outputFile, outputFile,
serviceRegistry.getService( ClassLoaderService.class ), serviceRegistry.getService( ClassLoaderService.class ),
(String) serviceRegistry.getService( ConfigurationService.class ).getSettings().get( AvailableSettings.HBM2DDL_CHARSET_NAME ) (String) serviceRegistry.getService( ConfigurationService.class ).getSettings().get( AvailableSettings.HBM2DDL_CHARSET_NAME ),
append
); );
} }
else { else {

View File

@ -56,6 +56,7 @@ public class SchemaUpdate {
boolean haltOnError = false; boolean haltOnError = false;
private String outputFile; private String outputFile;
private boolean append = true;
private String delimiter; private String delimiter;
private boolean format; private boolean format;
@ -88,7 +89,7 @@ public class SchemaUpdate {
exceptionHandler exceptionHandler
); );
final TargetDescriptor targetDescriptor = SchemaExport.buildTargetDescriptor( targetTypes, outputFile, serviceRegistry ); final TargetDescriptor targetDescriptor = SchemaExport.buildTargetDescriptor( targetTypes, outputFile, append, serviceRegistry );
try { try {
tool.getSchemaMigrator( config ).doMigration( metadata, executionOptions, ContributableMatcher.ALL, targetDescriptor ); tool.getSchemaMigrator( config ).doMigration( metadata, executionOptions, ContributableMatcher.ALL, targetDescriptor );
@ -124,6 +125,18 @@ public class SchemaUpdate {
return this; return this;
} }
/**
* For generating a export script file, by default the content will be appended at the begin or end of the file.
*
* The sql will be written at the beginning of the file rather append to the end.
*
* @return this
*/
public SchemaUpdate setOverrideOutputFileContent() {
append = false;
return this;
}
/** /**
* Set the end of statement delimiter * Set the end of statement delimiter
* *

View File

@ -94,7 +94,8 @@ public class Helper {
public static ScriptTargetOutput interpretScriptTargetSetting( public static ScriptTargetOutput interpretScriptTargetSetting(
Object scriptTargetSetting, Object scriptTargetSetting,
ClassLoaderService classLoaderService, ClassLoaderService classLoaderService,
String charsetName ) { String charsetName,
boolean append) {
if ( scriptTargetSetting == null ) { if ( scriptTargetSetting == null ) {
return null; return null;
} }
@ -119,7 +120,7 @@ public class Helper {
// assume it is a File path // assume it is a File path
final File file = new File( scriptTargetSettingString ); final File file = new File( scriptTargetSettingString );
return new ScriptTargetOutputToFile( file, charsetName ); return new ScriptTargetOutputToFile( file, charsetName, append );
} }
} }

View File

@ -28,6 +28,7 @@ public class ScriptTargetOutputToFile extends AbstractScriptTargetOutput impleme
private final File file; private final File file;
private final String charsetName; private final String charsetName;
private final boolean append;
private Writer writer; private Writer writer;
@ -36,10 +37,24 @@ public class ScriptTargetOutputToFile extends AbstractScriptTargetOutput impleme
* *
* @param file The file to read from * @param file The file to read from
* @param charsetName The charset name * @param charsetName The charset name
* @param append If true, then bytes will be written to the end of the file rather than the beginning
*/ */
public ScriptTargetOutputToFile(File file, String charsetName) { public ScriptTargetOutputToFile(File file, String charsetName, boolean append) {
this.file = file; this.file = file;
this.charsetName = charsetName; this.charsetName = charsetName;
this.append = append;
}
/**
* Constructs a ScriptTargetOutputToFile instance,
* the bytes will be written to the end of the file rather than the beginning
*
* @param file The file to read from
* @param charsetName The charset name
*
*/
public ScriptTargetOutputToFile(File file, String charsetName ) {
this(file, charsetName, true);
} }
@Override @Override
@ -53,7 +68,7 @@ public class ScriptTargetOutputToFile extends AbstractScriptTargetOutput impleme
@Override @Override
public void prepare() { public void prepare() {
super.prepare(); super.prepare();
this.writer = toFileWriter( this.file, this.charsetName ); this.writer = toFileWriter( this.file, this.charsetName, append );
} }
@Override @Override
@ -63,7 +78,7 @@ public class ScriptTargetOutputToFile extends AbstractScriptTargetOutput impleme
writer.close(); writer.close();
} }
catch (IOException e) { catch (IOException e) {
throw new SchemaManagementException( "Unable to close file writer : " + e.toString() ); throw new SchemaManagementException( "Unable to close file writer : " + e );
} }
finally { finally {
writer = null; writer = null;
@ -72,7 +87,7 @@ public class ScriptTargetOutputToFile extends AbstractScriptTargetOutput impleme
} }
@SuppressWarnings("ResultOfMethodCallIgnored") @SuppressWarnings("ResultOfMethodCallIgnored")
static Writer toFileWriter( File file, String charsetName ) { static Writer toFileWriter(File file, String charsetName, boolean append) {
try { try {
if ( ! file.exists() ) { if ( ! file.exists() ) {
// best effort, since this is very likely not allowed in EE environments // best effort, since this is very likely not allowed in EE environments
@ -84,17 +99,17 @@ public class ScriptTargetOutputToFile extends AbstractScriptTargetOutput impleme
} }
} }
catch (Exception e) { catch (Exception e) {
log.debug( "Exception calling File#createNewFile : " + e.toString() ); log.debug( "Exception calling File#createNewFile : " + e );
} }
try { try {
return charsetName != null ? return charsetName != null ?
new OutputStreamWriter( new OutputStreamWriter(
new FileOutputStream( file, true ), new FileOutputStream( file, append ),
charsetName charsetName
) : ) :
new OutputStreamWriter( new FileOutputStream( new OutputStreamWriter( new FileOutputStream(
file, file,
true append
) ); ) );
} }
catch (IOException e) { catch (IOException e) {

View File

@ -28,6 +28,7 @@ public class ScriptTargetOutputToUrl extends AbstractScriptTargetOutput implemen
private final URL url; private final URL url;
private final String charsetName; private final String charsetName;
private final boolean append;
private Writer writer; private Writer writer;
@ -36,10 +37,23 @@ public class ScriptTargetOutputToUrl extends AbstractScriptTargetOutput implemen
* *
* @param url The url to read from * @param url The url to read from
* @param charsetName The charset name * @param charsetName The charset name
* @param append If true, then bytes will be written to the end of the file rather than the beginning
*/ */
public ScriptTargetOutputToUrl(URL url, String charsetName) { public ScriptTargetOutputToUrl(URL url, String charsetName, boolean append) {
this.url = url; this.url = url;
this.charsetName = charsetName; this.charsetName = charsetName;
this.append = append;
}
/**
* Constructs a ScriptTargetOutputToUrl instance
* the bytes will be written to the end of the file rather than the beginning
*
* @param url The url to read from
* @param charsetName The charset name
*/
public ScriptTargetOutputToUrl(URL url, String charsetName) {
this( url, charsetName, true );
} }
@Override @Override
@ -53,7 +67,7 @@ public class ScriptTargetOutputToUrl extends AbstractScriptTargetOutput implemen
@Override @Override
public void prepare() { public void prepare() {
super.prepare(); super.prepare();
this.writer = toWriter( url, charsetName ); this.writer = toWriter( url, charsetName, append );
} }
@Override @Override
@ -62,17 +76,17 @@ public class ScriptTargetOutputToUrl extends AbstractScriptTargetOutput implemen
writer().close(); writer().close();
} }
catch (IOException e) { catch (IOException e) {
throw new SchemaManagementException( "Unable to close file writer : " + e.toString() ); throw new SchemaManagementException( "Unable to close file writer : " + e );
} }
} }
private static Writer toWriter( URL url, String charsetName ) { private static Writer toWriter( URL url, String charsetName, boolean append ) {
log.debug( "Attempting to resolve writer for URL : " + url ); log.debug( "Attempting to resolve writer for URL : " + url );
// technically only "strings corresponding to file URLs" are supported, which I take to mean URLs whose // technically only "strings corresponding to file URLs" are supported, which I take to mean URLs whose
// protocol is "file" // protocol is "file"
try { try {
return ScriptTargetOutputToFile.toFileWriter( new File( url.toURI() ), charsetName ); return ScriptTargetOutputToFile.toFileWriter( new File( url.toURI() ), charsetName, append );
} }
catch (URISyntaxException e) { catch (URISyntaxException e) {
throw new SchemaManagementException( throw new SchemaManagementException(

View File

@ -130,7 +130,7 @@ public class SchemaManagementToolCoordinator {
if ( scriptActionMap != null ) { if ( scriptActionMap != null ) {
scriptActionMap.forEach( scriptActionMap.forEach(
(action, contributors) -> { (action, contributors) -> {
performScriptAction( action, metadata, tool, serviceRegistry, executionOptions ); performScriptAction( action, metadata, tool, serviceRegistry, executionOptions, configService );
} }
); );
} }
@ -352,13 +352,15 @@ public class SchemaManagementToolCoordinator {
Metadata metadata, Metadata metadata,
SchemaManagementTool tool, SchemaManagementTool tool,
ServiceRegistry serviceRegistry, ServiceRegistry serviceRegistry,
ExecutionOptions executionOptions) { ExecutionOptions executionOptions,
ConfigurationService configurationService) {
switch ( scriptAction ) { switch ( scriptAction ) {
case CREATE_ONLY: { case CREATE_ONLY: {
final JpaTargetAndSourceDescriptor createDescriptor = buildScriptTargetDescriptor( final JpaTargetAndSourceDescriptor createDescriptor = buildScriptTargetDescriptor(
executionOptions.getConfigurationValues(), executionOptions.getConfigurationValues(),
CreateSettingSelector.INSTANCE, CreateSettingSelector.INSTANCE,
serviceRegistry serviceRegistry,
configurationService
); );
tool.getSchemaCreator( executionOptions.getConfigurationValues() ).doCreation( tool.getSchemaCreator( executionOptions.getConfigurationValues() ).doCreation(
metadata, metadata,
@ -374,7 +376,8 @@ public class SchemaManagementToolCoordinator {
final JpaTargetAndSourceDescriptor dropDescriptor = buildScriptTargetDescriptor( final JpaTargetAndSourceDescriptor dropDescriptor = buildScriptTargetDescriptor(
executionOptions.getConfigurationValues(), executionOptions.getConfigurationValues(),
DropSettingSelector.INSTANCE, DropSettingSelector.INSTANCE,
serviceRegistry serviceRegistry,
configurationService
); );
tool.getSchemaDropper( executionOptions.getConfigurationValues() ).doDrop( tool.getSchemaDropper( executionOptions.getConfigurationValues() ).doDrop(
metadata, metadata,
@ -386,7 +389,8 @@ public class SchemaManagementToolCoordinator {
final JpaTargetAndSourceDescriptor createDescriptor = buildScriptTargetDescriptor( final JpaTargetAndSourceDescriptor createDescriptor = buildScriptTargetDescriptor(
executionOptions.getConfigurationValues(), executionOptions.getConfigurationValues(),
CreateSettingSelector.INSTANCE, CreateSettingSelector.INSTANCE,
serviceRegistry serviceRegistry,
configurationService
); );
tool.getSchemaCreator( executionOptions.getConfigurationValues() ).doCreation( tool.getSchemaCreator( executionOptions.getConfigurationValues() ).doCreation(
metadata, metadata,
@ -401,7 +405,8 @@ public class SchemaManagementToolCoordinator {
final JpaTargetAndSourceDescriptor dropDescriptor = buildScriptTargetDescriptor( final JpaTargetAndSourceDescriptor dropDescriptor = buildScriptTargetDescriptor(
executionOptions.getConfigurationValues(), executionOptions.getConfigurationValues(),
DropSettingSelector.INSTANCE, DropSettingSelector.INSTANCE,
serviceRegistry serviceRegistry,
configurationService
); );
tool.getSchemaDropper( executionOptions.getConfigurationValues() ).doDrop( tool.getSchemaDropper( executionOptions.getConfigurationValues() ).doDrop(
metadata, metadata,
@ -416,7 +421,8 @@ public class SchemaManagementToolCoordinator {
final JpaTargetAndSourceDescriptor migrateDescriptor = buildScriptTargetDescriptor( final JpaTargetAndSourceDescriptor migrateDescriptor = buildScriptTargetDescriptor(
executionOptions.getConfigurationValues(), executionOptions.getConfigurationValues(),
MigrateSettingSelector.INSTANCE, MigrateSettingSelector.INSTANCE,
serviceRegistry serviceRegistry,
configurationService
); );
tool.getSchemaMigrator( executionOptions.getConfigurationValues() ).doMigration( tool.getSchemaMigrator( executionOptions.getConfigurationValues() ).doMigration(
metadata, metadata,
@ -435,7 +441,8 @@ public class SchemaManagementToolCoordinator {
private static JpaTargetAndSourceDescriptor buildScriptTargetDescriptor( private static JpaTargetAndSourceDescriptor buildScriptTargetDescriptor(
Map<?,?> configurationValues, Map<?,?> configurationValues,
SettingSelector settingSelector, SettingSelector settingSelector,
ServiceRegistry serviceRegistry) { ServiceRegistry serviceRegistry,
ConfigurationService configurationService) {
final Object scriptSourceSetting = settingSelector.getScriptSourceSetting( configurationValues ); final Object scriptSourceSetting = settingSelector.getScriptSourceSetting( configurationValues );
final SourceType sourceType = SourceType.interpret( final SourceType sourceType = SourceType.interpret(
settingSelector.getSourceTypeSetting( configurationValues ), settingSelector.getSourceTypeSetting( configurationValues ),
@ -455,10 +462,13 @@ public class SchemaManagementToolCoordinator {
? Helper.interpretScriptSourceSetting( scriptSourceSetting, serviceRegistry.getService( ClassLoaderService.class ), charsetName ) ? Helper.interpretScriptSourceSetting( scriptSourceSetting, serviceRegistry.getService( ClassLoaderService.class ), charsetName )
: null; : null;
boolean append = configurationService.getSetting( AvailableSettings.HBM2DDL_SCRIPTS_CREATE_APPEND, StandardConverters.BOOLEAN, true );
final ScriptTargetOutput scriptTargetOutput = Helper.interpretScriptTargetSetting( final ScriptTargetOutput scriptTargetOutput = Helper.interpretScriptTargetSetting(
settingSelector.getScriptTargetSetting( configurationValues ), settingSelector.getScriptTargetSetting( configurationValues ),
serviceRegistry.getService( ClassLoaderService.class ), serviceRegistry.getService( ClassLoaderService.class ),
charsetName charsetName,
append
); );
return new JpaTargetAndSourceDescriptor() { return new JpaTargetAndSourceDescriptor() {

View File

@ -0,0 +1,170 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.test.schemaupdate;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.nio.file.Files;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import javax.persistence.Entity;
import javax.persistence.Id;
import org.hibernate.boot.MetadataSources;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.boot.spi.MetadataImplementor;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.cfg.Environment;
import org.hibernate.dialect.H2Dialect;
import org.hibernate.engine.config.spi.ConfigurationService;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.tool.schema.spi.SchemaManagementToolCoordinator;
import org.hibernate.testing.RequiresDialect;
import org.hibernate.testing.ServiceRegistryBuilder;
import org.hibernate.testing.TestForIssue;
import org.hibernate.testing.junit4.BaseUnitTestCase;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.StringContains.containsString;
@TestForIssue(jiraKey = "HHH-11817")
@RequiresDialect(H2Dialect.class)
public class SchemaCreationToOutputScriptTest extends BaseUnitTestCase {
private final String createTableMyEntity = "create table MyEntity";
private final String createTableMySecondEntity = "create table MySecondEntity";
private File output;
private ServiceRegistry serviceRegistry;
private MetadataImplementor metadata;
@Before
public void setUp() throws Exception {
output = File.createTempFile( "creation_script", ".sql" );
output.deleteOnExit();
List<String> content = Arrays.asList(
"This is the database creation script generated by Hibernate"
, "This is the second line"
, "This is the third line"
, "This is the fourth line"
, "This is the fifth line"
, "This is the sixth line"
, "This is the seventh line"
, "This is the eighth line"
, "This is the ninth line"
);
try (BufferedWriter bw = new BufferedWriter( new FileWriter( output ) )) {
for ( String s : content ) {
bw.write( s );
bw.write( System.lineSeparator() );
}
}
}
private void createServiceRegistryAndMetadata(String append) {
final StandardServiceRegistryBuilder standardServiceRegistryBuilder = new StandardServiceRegistryBuilder()
.applySetting( Environment.FORMAT_SQL, "false" )
.applySetting( Environment.HBM2DDL_SCRIPTS_ACTION, "create" )
.applySetting( AvailableSettings.HBM2DDL_SCRIPTS_CREATE_TARGET, output.getAbsolutePath() );
if ( append != null ) {
standardServiceRegistryBuilder.applySetting( AvailableSettings.HBM2DDL_SCRIPTS_CREATE_APPEND, append );
}
serviceRegistry = standardServiceRegistryBuilder.build();
metadata = (MetadataImplementor) new MetadataSources( serviceRegistry )
.addAnnotatedClass( MyEntity.class )
.addAnnotatedClass( MySecondEntity.class )
.buildMetadata();
metadata.validate();
}
@After
public void tearDown() {
ServiceRegistryBuilder.destroy( serviceRegistry );
}
@Test
public void testAppendModeFalse() throws Exception {
createServiceRegistryAndMetadata( "false" );
HashMap properties = new HashMap<>();
properties.putAll( serviceRegistry.getService( ConfigurationService.class ).getSettings() );
SchemaManagementToolCoordinator.process(
metadata,
serviceRegistry,
properties,
null
);
List<String> commands = Files.readAllLines( output.toPath() );
assertThat( commands.size(), is( 2 ) );
assertThat( commands.get( 0 ), containsString( createTableMyEntity ) );
assertThat( commands.get( 1 ), containsString( createTableMySecondEntity ) );
}
@Test
public void testAppendModeTrue() throws Exception {
createServiceRegistryAndMetadata( "true" );
HashMap properties = new HashMap<>();
properties.putAll( serviceRegistry.getService( ConfigurationService.class ).getSettings() );
SchemaManagementToolCoordinator.process(
metadata,
serviceRegistry,
properties,
null
);
List<String> commands = Files.readAllLines( output.toPath() );
assertThat( commands.size(), is( 11 ) );
assertThat( commands.get( 9 ), containsString( createTableMyEntity ) );
assertThat( commands.get( 10 ), containsString( createTableMySecondEntity ) );
}
@Test
public void testDefaultAppendMode() throws Exception {
createServiceRegistryAndMetadata( null );
HashMap properties = new HashMap<>();
properties.putAll( serviceRegistry.getService( ConfigurationService.class ).getSettings() );
SchemaManagementToolCoordinator.process(
metadata,
serviceRegistry,
properties,
null
);
List<String> commands = Files.readAllLines( output.toPath() );
assertThat( commands.size(), is( 11 ) );
assertThat( commands.get( 9 ), containsString( createTableMyEntity ) );
assertThat( commands.get( 10 ), containsString( createTableMySecondEntity ) );
}
@Entity(name = "MyEntity")
public static class MyEntity {
@Id
private Long id;
private String name;
}
@Entity(name = "MySecondEntity")
public static class MySecondEntity {
@Id
private Long id;
private String name;
}
}

View File

@ -0,0 +1,170 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.test.schemaupdate;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.nio.file.Files;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import javax.persistence.Entity;
import javax.persistence.Id;
import org.hibernate.boot.MetadataSources;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.boot.spi.MetadataImplementor;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.cfg.Environment;
import org.hibernate.dialect.H2Dialect;
import org.hibernate.engine.config.spi.ConfigurationService;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.tool.schema.spi.SchemaManagementToolCoordinator;
import org.hibernate.testing.RequiresDialect;
import org.hibernate.testing.ServiceRegistryBuilder;
import org.hibernate.testing.TestForIssue;
import org.hibernate.testing.junit4.BaseUnitTestCase;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
@TestForIssue(jiraKey = "HHH-11817")
@RequiresDialect(H2Dialect.class)
public class SchemaDropToOutputScriptTest extends BaseUnitTestCase {
private File output;
private ServiceRegistry serviceRegistry;
private MetadataImplementor metadata;
private final String dropMyEntityTable = "drop table if exists MyEntity";
private final String dropMySecondEntityTable = "drop table if exists MySecondEntity";
@Before
public void setUp() throws Exception {
output = File.createTempFile( "creation_script", ".sql" );
output.deleteOnExit();
List<String> content = Arrays.asList(
"This is the database creation script generated by Hibernate"
, "This is the second line"
, "This is the third line"
, "This is the fourth line"
, "This is the fifth line"
, "This is the sixth line"
, "This is the seventh line"
, "This is the eighth line"
, "This is the ninth line"
);
try (BufferedWriter bw = new BufferedWriter( new FileWriter( output ) )) {
for ( String s : content ) {
bw.write( s );
bw.write( System.lineSeparator() );
}
}
}
private void createServiceRegistryAndMetadata(String append) {
final StandardServiceRegistryBuilder standardServiceRegistryBuilder = new StandardServiceRegistryBuilder()
.applySetting( Environment.FORMAT_SQL, "false" )
.applySetting( Environment.HBM2DDL_SCRIPTS_ACTION, "drop" )
.applySetting( AvailableSettings.HBM2DDL_SCRIPTS_DROP_TARGET, output.getAbsolutePath() );
if ( append != null ) {
standardServiceRegistryBuilder.applySetting( AvailableSettings.HBM2DDL_SCRIPTS_CREATE_APPEND, append );
}
serviceRegistry = standardServiceRegistryBuilder.build();
metadata = (MetadataImplementor) new MetadataSources( serviceRegistry )
.addAnnotatedClass( MyEntity.class )
.addAnnotatedClass( MySecondEntity.class )
.buildMetadata();
metadata.validate();
}
@After
public void tearDown() {
ServiceRegistryBuilder.destroy( serviceRegistry );
}
@Test
public void testAppendModeFalse() throws Exception {
createServiceRegistryAndMetadata( "false" );
HashMap properties = new HashMap<>();
properties.putAll( serviceRegistry.getService( ConfigurationService.class ).getSettings() );
SchemaManagementToolCoordinator.process(
metadata,
serviceRegistry,
properties,
null
);
List<String> commands = Files.readAllLines( output.toPath() );
assertThat( commands.size(), is( 2 ) );
assertThat( commands.get( 0 ), containsString( dropMyEntityTable ) );
assertThat( commands.get( 1 ), containsString( dropMySecondEntityTable ) );
}
@Test
public void testAppendModeTrue() throws Exception {
createServiceRegistryAndMetadata( "true" );
HashMap properties = new HashMap<>();
properties.putAll( serviceRegistry.getService( ConfigurationService.class ).getSettings() );
SchemaManagementToolCoordinator.process(
metadata,
serviceRegistry,
properties,
null
);
List<String> commands = Files.readAllLines( output.toPath() );
assertThat( commands.size(), is( 11 ) );
assertThat( commands.get( 9 ), containsString( dropMyEntityTable ) );
assertThat( commands.get( 10 ), containsString( dropMySecondEntityTable ) );
}
@Test
public void testDefaultAppendMode() throws Exception {
createServiceRegistryAndMetadata( null );
HashMap properties = new HashMap<>();
properties.putAll( serviceRegistry.getService( ConfigurationService.class ).getSettings() );
SchemaManagementToolCoordinator.process(
metadata,
serviceRegistry,
properties,
null
);
List<String> commands = Files.readAllLines( output.toPath() );
assertThat( commands.size(), is( 11 ) );
assertThat( commands.get( 9 ), containsString( dropMyEntityTable ) );
assertThat( commands.get( 10 ), containsString( dropMySecondEntityTable ) );
}
@Entity(name = "MyEntity")
public static class MyEntity {
@Id
private Long id;
private String name;
}
@Entity(name = "MySecondEntity")
public static class MySecondEntity {
@Id
private Long id;
private String name;
}
}

View File

@ -0,0 +1,188 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.test.schemaupdate;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.nio.file.Files;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import javax.persistence.Entity;
import javax.persistence.Id;
import org.hibernate.SessionFactory;
import org.hibernate.boot.MetadataSources;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.boot.spi.MetadataImplementor;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.cfg.Environment;
import org.hibernate.dialect.H2Dialect;
import org.hibernate.engine.config.spi.ConfigurationService;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.tool.schema.spi.SchemaManagementToolCoordinator;
import org.hibernate.testing.RequiresDialect;
import org.hibernate.testing.ServiceRegistryBuilder;
import org.hibernate.testing.TestForIssue;
import org.hibernate.testing.junit4.BaseUnitTestCase;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
@TestForIssue(jiraKey = "HHH-11817")
@RequiresDialect(H2Dialect.class)
public class SchemaMigrationToOutputScriptTest extends BaseUnitTestCase {
private final String createTableMySecondEntity = "create table MySecondEntity";
private File output;
private ServiceRegistry serviceRegistry;
private MetadataImplementor metadata;
@Before
public void setUp() throws Exception {
output = File.createTempFile( "creation_script", ".sql" );
output.deleteOnExit();
List<String> content = Arrays.asList(
"This is the database creation script generated by Hibernate"
, "This is the second line"
, "This is the third line"
, "This is the fourth line"
, "This is the fifth line"
, "This is the sixth line"
, "This is the seventh line"
, "This is the eighth line"
, "This is the ninth line"
);
try (BufferedWriter bw = new BufferedWriter( new FileWriter( output ) )) {
for ( String s : content ) {
bw.write( s );
bw.write( System.lineSeparator() );
}
}
serviceRegistry = new StandardServiceRegistryBuilder().applySetting(
AvailableSettings.HBM2DDL_AUTO,
"create-only"
)
.build();
metadata = (MetadataImplementor) new MetadataSources( serviceRegistry )
.addAnnotatedClass( MyEntity.class )
.buildMetadata();
final SessionFactory sessionFactory = metadata.buildSessionFactory();
sessionFactory.close();
}
private void createServiceRegistryAndMetadata(String append) {
final StandardServiceRegistryBuilder standardServiceRegistryBuilder = new StandardServiceRegistryBuilder()
.applySetting( Environment.FORMAT_SQL, "false" )
.applySetting( Environment.HBM2DDL_SCRIPTS_ACTION, "update" )
.applySetting( AvailableSettings.HBM2DDL_SCRIPTS_CREATE_TARGET, output.getAbsolutePath() );
if ( append != null ) {
standardServiceRegistryBuilder.applySetting( AvailableSettings.HBM2DDL_SCRIPTS_CREATE_APPEND, append );
}
serviceRegistry = standardServiceRegistryBuilder.build();
metadata = (MetadataImplementor) new MetadataSources( serviceRegistry )
.addAnnotatedClass( MyEntity.class )
.addAnnotatedClass( MySecondEntity.class )
.buildMetadata();
metadata.validate();
}
@After
public void tearDown() {
ServiceRegistryBuilder.destroy( serviceRegistry );
serviceRegistry = new StandardServiceRegistryBuilder().applySetting( AvailableSettings.HBM2DDL_AUTO, "drop" )
.build();
metadata = (MetadataImplementor) new MetadataSources( serviceRegistry )
.addAnnotatedClass( MyEntity.class )
.buildMetadata();
final SessionFactory sessionFactory = metadata.buildSessionFactory();
sessionFactory.close();
}
@Test
public void testAppendModeFalse() throws Exception {
createServiceRegistryAndMetadata( "false" );
HashMap properties = new HashMap<>();
properties.putAll( serviceRegistry.getService( ConfigurationService.class ).getSettings() );
SchemaManagementToolCoordinator.process(
metadata,
serviceRegistry,
properties,
null
);
List<String> commands = Files.readAllLines( output.toPath() );
assertThat( commands.size(), is( 1 ) );
assertThat( commands.get( 0 ), containsString( createTableMySecondEntity ) );
}
@Test
public void testAppendModeTrue() throws Exception {
createServiceRegistryAndMetadata( "true" );
HashMap properties = new HashMap<>();
properties.putAll( serviceRegistry.getService( ConfigurationService.class ).getSettings() );
SchemaManagementToolCoordinator.process(
metadata,
serviceRegistry,
properties,
null
);
List<String> commands = Files.readAllLines( output.toPath() );
assertThat( commands.size(), is( 10 ) );
assertThat( commands.get( 9 ), containsString( createTableMySecondEntity ) );
}
@Test
public void testDefaultAppendMode() throws Exception {
createServiceRegistryAndMetadata( null );
HashMap properties = new HashMap<>();
properties.putAll( serviceRegistry.getService( ConfigurationService.class ).getSettings() );
SchemaManagementToolCoordinator.process(
metadata,
serviceRegistry,
properties,
null
);
List<String> commands = Files.readAllLines( output.toPath() );
assertThat( commands.size(), is( 10 ) );
assertThat( commands.get( 9 ), containsString( createTableMySecondEntity ) );
}
@Entity(name = "MyEntity")
public static class MyEntity {
@Id
private Long id;
private String name;
}
@Entity(name = "MySecondEntity")
public static class MySecondEntity {
@Id
private Long id;
private String name;
}
}

View File

@ -75,6 +75,10 @@ jar {
} }
} }
sourcesJar {
duplicatesStrategy = DuplicatesStrategy.EXCLUDE
}
task testJar(type: Jar, dependsOn: testClasses) { task testJar(type: Jar, dependsOn: testClasses) {
duplicatesStrategy = DuplicatesStrategy.EXCLUDE duplicatesStrategy = DuplicatesStrategy.EXCLUDE
archiveClassifier.set( 'test' ) archiveClassifier.set( 'test' )

View File

@ -42,4 +42,9 @@ public class CockroachDB202SpatialDialect extends CockroachDB201Dialect implemen
delegateContributeTypes( typeContributions, serviceRegistry ); delegateContributeTypes( typeContributions, serviceRegistry );
} }
@Override
public boolean equivalentTypes(int typeCode1, int typeCode2) {
return super.equivalentTypes( typeCode1, typeCode2 ) ||
( isSpatial( typeCode1 ) && isSpatial( typeCode2 ) );
}
} }

View File

@ -74,4 +74,9 @@ public interface CockroachSpatialDialectTrait extends SpatialDialect {
return DELEGATE.supports( function ); return DELEGATE.supports( function );
} }
default boolean isSpatial(int typeCode) {
return DELEGATE.isSpatial( typeCode );
}
} }

View File

@ -7,6 +7,8 @@
package org.hibernate.spatial.dialect.postgis; package org.hibernate.spatial.dialect.postgis;
import java.sql.Types;
import org.hibernate.spatial.SpatialDialect; import org.hibernate.spatial.SpatialDialect;
import org.hibernate.spatial.SpatialFunction; import org.hibernate.spatial.SpatialFunction;
import org.hibernate.spatial.dialect.SpatialFunctionsRegistry; import org.hibernate.spatial.dialect.SpatialFunctionsRegistry;
@ -125,4 +127,13 @@ interface PGSpatialDialectTrait extends SpatialDialect {
default boolean supports(SpatialFunction function) { default boolean supports(SpatialFunction function) {
return support.supports( function ); return support.supports( function );
} }
/**
* Checks whether the typeCode is (potentially) the code for a spatial type
* @param typeCode the JDBC type code
* @return if the typecode corresponds with a spatialt type
*/
default boolean isSpatial(int typeCode){
return support.isSpatial( typeCode );
}
} }

View File

@ -7,6 +7,7 @@
package org.hibernate.spatial.dialect.postgis; package org.hibernate.spatial.dialect.postgis;
import java.sql.Types;
import java.util.Map; import java.util.Map;
import org.hibernate.boot.model.TypeContributions; import org.hibernate.boot.model.TypeContributions;
@ -36,4 +37,11 @@ public class PostgisPG10Dialect extends PostgreSQL10Dialect implements PGSpatial
support.contributeTypes( typeContributions, serviceRegistry ); support.contributeTypes( typeContributions, serviceRegistry );
} }
@Override
public boolean equivalentTypes(int typeCode1, int typeCode2) {
return super.equivalentTypes( typeCode1, typeCode2 ) ||
( isSpatial( typeCode1 ) && isSpatial( typeCode2 ) );
}
} }

View File

@ -48,6 +48,11 @@ public class PostgisPG82Dialect extends PostgreSQL82Dialect implements SpatialDi
support.contributeTypes( typeContributions, serviceRegistry ); support.contributeTypes( typeContributions, serviceRegistry );
} }
@Override
public boolean equivalentTypes(int typeCode1, int typeCode2) {
return super.equivalentTypes( typeCode1, typeCode2 ) ||
( support.isSpatial( typeCode1 ) && support.isSpatial( typeCode2 ) );
}
@Override @Override
public String getSpatialRelateSQL(String columnName, int spatialRelation) { public String getSpatialRelateSQL(String columnName, int spatialRelation) {

View File

@ -48,6 +48,12 @@ public class PostgisPG91Dialect extends PostgreSQL91Dialect implements SpatialDi
support.contributeTypes( typeContributions, serviceRegistry ); support.contributeTypes( typeContributions, serviceRegistry );
} }
@Override
public boolean equivalentTypes(int typeCode1, int typeCode2) {
return super.equivalentTypes( typeCode1, typeCode2 ) ||
( support.isSpatial( typeCode1 ) && support.isSpatial( typeCode2 ) );
}
/** /**
* Returns the SQL fragment for the SQL WHERE-clause when parsing * Returns the SQL fragment for the SQL WHERE-clause when parsing
* <code>org.hibernatespatial.criterion.SpatialRelateExpression</code>s * <code>org.hibernatespatial.criterion.SpatialRelateExpression</code>s

View File

@ -48,6 +48,12 @@ public class PostgisPG92Dialect extends PostgreSQL92Dialect implements SpatialDi
support.contributeTypes( typeContributions, serviceRegistry ); support.contributeTypes( typeContributions, serviceRegistry );
} }
@Override
public boolean equivalentTypes(int typeCode1, int typeCode2) {
return super.equivalentTypes( typeCode1, typeCode2 ) ||
( support.isSpatial( typeCode1 ) && support.isSpatial( typeCode2 ) );
}
/** /**
* Returns the SQL fragment for the SQL WHERE-clause when parsing * Returns the SQL fragment for the SQL WHERE-clause when parsing
* <code>org.hibernatespatial.criterion.SpatialRelateExpression</code>s * <code>org.hibernatespatial.criterion.SpatialRelateExpression</code>s

View File

@ -48,6 +48,12 @@ public class PostgisPG93Dialect extends PostgreSQL93Dialect implements SpatialDi
support.contributeTypes( typeContributions, serviceRegistry ); support.contributeTypes( typeContributions, serviceRegistry );
} }
@Override
public boolean equivalentTypes(int typeCode1, int typeCode2) {
return super.equivalentTypes( typeCode1, typeCode2 ) ||
( support.isSpatial( typeCode1 ) && support.isSpatial( typeCode2 ) );
}
/** /**
* Returns the SQL fragment for the SQL WHERE-clause when parsing * Returns the SQL fragment for the SQL WHERE-clause when parsing
* <code>org.hibernatespatial.criterion.SpatialRelateExpression</code>s * <code>org.hibernatespatial.criterion.SpatialRelateExpression</code>s

View File

@ -48,6 +48,12 @@ public class PostgisPG94Dialect extends PostgreSQL94Dialect implements SpatialDi
support.contributeTypes( typeContributions, serviceRegistry ); support.contributeTypes( typeContributions, serviceRegistry );
} }
@Override
public boolean equivalentTypes(int typeCode1, int typeCode2) {
return super.equivalentTypes( typeCode1, typeCode2 ) ||
( support.isSpatial( typeCode1 ) && support.isSpatial( typeCode2 ) );
}
/** /**
* Returns the SQL fragment for the SQL WHERE-clause when parsing * Returns the SQL fragment for the SQL WHERE-clause when parsing
* <code>org.hibernatespatial.criterion.SpatialRelateExpression</code>s * <code>org.hibernatespatial.criterion.SpatialRelateExpression</code>s

View File

@ -42,4 +42,9 @@ public class PostgisPG95Dialect extends PostgreSQL95Dialect implements PGSpatial
support.contributeTypes( typeContributions, serviceRegistry ); support.contributeTypes( typeContributions, serviceRegistry );
} }
@Override
public boolean equivalentTypes(int typeCode1, int typeCode2) {
return super.equivalentTypes( typeCode1, typeCode2 ) ||
( isSpatial( typeCode1 ) && isSpatial( typeCode2 ) );
}
} }

View File

@ -48,6 +48,12 @@ public class PostgisPG9Dialect extends PostgreSQL9Dialect implements SpatialDial
support.contributeTypes( typeContributions, serviceRegistry ); support.contributeTypes( typeContributions, serviceRegistry );
} }
@Override
public boolean equivalentTypes(int typeCode1, int typeCode2) {
return super.equivalentTypes( typeCode1, typeCode2 ) ||
( support.isSpatial( typeCode1 ) && support.isSpatial( typeCode2 ) );
}
/** /**
* Returns the SQL fragment for the SQL WHERE-clause when parsing * Returns the SQL fragment for the SQL WHERE-clause when parsing
* <code>org.hibernatespatial.criterion.SpatialRelateExpression</code>s * <code>org.hibernatespatial.criterion.SpatialRelateExpression</code>s

View File

@ -7,6 +7,7 @@
package org.hibernate.spatial.dialect.postgis; package org.hibernate.spatial.dialect.postgis;
import java.io.Serializable; import java.io.Serializable;
import java.sql.Types;
import org.hibernate.boot.model.TypeContributions; import org.hibernate.boot.model.TypeContributions;
import org.hibernate.service.ServiceRegistry; import org.hibernate.service.ServiceRegistry;
@ -47,6 +48,10 @@ public class PostgisSupport implements SpatialDialect, Serializable {
return postgisFunctions; return postgisFunctions;
} }
public boolean isSpatial(int typeCode){
return typeCode == Types.OTHER || typeCode == PGGeometryTypeDescriptor.INSTANCE_WKB_1.getSqlType();
}
/** /**
* Returns the SQL fragment for the SQL WHERE-clause when parsing * Returns the SQL fragment for the SQL WHERE-clause when parsing
* <code>org.hibernatespatial.criterion.SpatialRelateExpression</code>s * <code>org.hibernatespatial.criterion.SpatialRelateExpression</code>s