HHH-5337 support for comma-separated multi-files imports

hibernate.hbm2ddl.import_files=[filename],[filename2],...

git-svn-id: https://svn.jboss.org/repos/hibernate/core/trunk@19804 1b8cb986-b30d-0410-93ca-fae66ebed9b2
This commit is contained in:
Emmanuel Bernard 2010-06-23 22:33:00 +00:00
parent 5aad112e03
commit fe92f84072
9 changed files with 120 additions and 34 deletions

View File

@ -469,13 +469,17 @@ public final class Environment {
public static final String HBM2DDL_AUTO = "hibernate.hbm2ddl.auto";
/**
* Name of the optional file containing SQL DML statements executed during the SessionFactory creation.
* Comma-separated names of the optional files containing SQL DML statements executed
* during the SessionFactory creation.
* File order matters, the statements of a give file are executed before the statements of the
* following files.
*
* These statements are only executed if the schema is created ie if <tt>hibernate.hbm2ddl.auto</tt>
* is set to <tt>create</tt> or <tt>create-drop</tt>.
*
* The default value is <tt>/import.sql</tt>
*/
public static final String HBM2DDL_IMPORT_FILE = "hibernate.hbm2ddl.import_file";
public static final String HBM2DDL_IMPORT_FILES = "hibernate.hbm2ddl.import_files";
/**
* The {@link org.hibernate.exception.SQLExceptionConverter} to use for converting SQLExceptions

View File

@ -100,7 +100,7 @@ public final class Settings {
// private ComponentTuplizerFactory componentTuplizerFactory; todo : HHH-3517 and HHH-1907
// private BytecodeProvider bytecodeProvider;
private JdbcSupport jdbcSupport;
private String importFile;
private String importFiles;
/**
* Package protected constructor
@ -118,12 +118,12 @@ private String importFile;
// return formatSql;
// }
public String getImportFile() {
return importFile;
public String getImportFiles() {
return importFiles;
}
public void setImportFile(String importFile) {
this.importFile = importFile;
public void setImportFiles(String importFiles) {
this.importFiles = importFiles;
}
public SQLStatementLogger getSqlStatementLogger() {

View File

@ -337,7 +337,7 @@ public class SettingsFactory implements Serializable {
settings.setAutoCreateSchema(true);
settings.setAutoDropSchema(true);
}
settings.setImportFile( properties.getProperty( Environment.HBM2DDL_IMPORT_FILE ) );
settings.setImportFiles( properties.getProperty( Environment.HBM2DDL_IMPORT_FILES ) );
EntityMode defaultEntityMode = EntityMode.parse( properties.getProperty( Environment.DEFAULT_ENTITY_MODE ) );
log.info( "Default entity-mode: " + defaultEntityMode );

View File

@ -73,7 +73,7 @@ public class SchemaExport {
private String[] dropSQL;
private String[] createSQL;
private String outputFile = null;
private String importFile;
private String importFiles;
private Dialect dialect;
private String delimiter;
private final List exceptions = new ArrayList();
@ -106,7 +106,7 @@ public class SchemaExport {
createSQL = cfg.generateSchemaCreationScript( dialect );
sqlStatementLogger = settings.getSqlStatementLogger();
formatter = ( sqlStatementLogger.isFormatSql() ? FormatStyle.DDL : FormatStyle.NONE ).getFormatter();
importFile = settings.getImportFile() != null ? settings.getImportFile() : DEFAULT_IMPORT_FILE;
importFiles = settings.getImportFiles() != null ? settings.getImportFiles() : DEFAULT_IMPORT_FILE;
}
/**
@ -132,7 +132,7 @@ public class SchemaExport {
formatter = ( PropertiesHelper.getBoolean( Environment.FORMAT_SQL, props ) ? FormatStyle.DDL : FormatStyle.NONE ).getFormatter();
importFile = PropertiesHelper.getString( Environment.HBM2DDL_IMPORT_FILE, props, DEFAULT_IMPORT_FILE );
importFiles = PropertiesHelper.getString( Environment.HBM2DDL_IMPORT_FILES, props, DEFAULT_IMPORT_FILE );
}
/**
@ -148,7 +148,7 @@ public class SchemaExport {
dropSQL = cfg.generateDropSchemaScript( dialect );
createSQL = cfg.generateSchemaCreationScript( dialect );
formatter = ( PropertiesHelper.getBoolean( Environment.FORMAT_SQL, cfg.getProperties() ) ? FormatStyle.DDL : FormatStyle.NONE ).getFormatter();
importFile = PropertiesHelper.getString( Environment.HBM2DDL_IMPORT_FILE, cfg.getProperties(),
importFiles = PropertiesHelper.getString( Environment.HBM2DDL_IMPORT_FILES, cfg.getProperties(),
DEFAULT_IMPORT_FILE
);
}
@ -172,7 +172,7 @@ public class SchemaExport {
* @deprecated use {@link org.hibernate.cfg.Environment.HBM2DDL_IMPORT_FILE}
*/
public SchemaExport setImportFile(String filename) {
importFile = filename;
importFiles = filename;
return this;
}
@ -235,19 +235,22 @@ public class SchemaExport {
Connection connection = null;
Writer outputFileWriter = null;
Reader importFileReader = null;
List<NamedReader> importFileReaders = new ArrayList<NamedReader>();
Statement statement = null;
exceptions.clear();
try {
try {
InputStream stream = ConfigHelper.getResourceAsStream( importFile );
importFileReader = new InputStreamReader( stream );
}
catch ( HibernateException e ) {
log.debug( "import file not found: " + importFile );
for ( String currentFile : importFiles.split(",") ) {
try {
final String resourceName = currentFile.trim();
InputStream stream = ConfigHelper.getResourceAsStream( resourceName );
importFileReaders.add( new NamedReader( resourceName, stream ) );
}
catch ( HibernateException e ) {
log.debug( "import file not found: " + currentFile );
}
}
if ( outputFile != null ) {
@ -268,8 +271,10 @@ public class SchemaExport {
if ( !justDrop ) {
create( script, export, outputFileWriter, statement );
if ( export && importFileReader != null ) {
importScript( importFileReader, statement );
if ( export && importFileReaders.size() > 0 ) {
for (NamedReader reader : importFileReaders) {
importScript( reader, statement );
}
}
}
@ -301,22 +306,47 @@ public class SchemaExport {
if ( outputFileWriter != null ) {
outputFileWriter.close();
}
if ( importFileReader != null ) {
importFileReader.close();
}
}
catch ( IOException ioe ) {
exceptions.add( ioe );
log.error( "Error closing output file: " + outputFile, ioe );
}
for (NamedReader reader : importFileReaders) {
try {
reader.getReader().close();
}
catch ( IOException ioe ) {
exceptions.add( ioe );
log.error( "Error closing imput files: " + reader.getName(), ioe );
}
}
}
}
private void importScript(Reader importFileReader, Statement statement)
private class NamedReader {
private final Reader reader;
private final String name;
public NamedReader(String name, InputStream stream) {
this.name = name;
this.reader = new InputStreamReader( stream );
}
public Reader getReader() {
return reader;
}
public String getName() {
return name;
}
}
private void importScript(NamedReader importFileReader, Statement statement)
throws IOException {
log.info( "Executing import script: " + importFile );
BufferedReader reader = new BufferedReader( importFileReader );
log.info( "Executing import script: " + importFileReader.getName() );
BufferedReader reader = new BufferedReader( importFileReader.getReader() );
long lineNo = 0;
for ( String sql = reader.readLine(); sql != null; sql = reader.readLine() ) {
try {
@ -480,7 +510,7 @@ public class SchemaExport {
}
if (importFile != null) {
cfg.setProperty( Environment.HBM2DDL_IMPORT_FILE, importFile );
cfg.setProperty( Environment.HBM2DDL_IMPORT_FILES, importFile );
}
SchemaExport se = new SchemaExport( cfg )
.setHaltOnError( halt )

View File

@ -0,0 +1,17 @@
<?xml version="1.0"?>
<!DOCTYPE hibernate-mapping PUBLIC
"-//Hibernate/Hibernate Mapping DTD 3.0//EN"
"http://hibernate.sourceforge.net/hibernate-mapping-3.0.dtd">
<!--
Test for import files.
-->
<hibernate-mapping package="org.hibernate.test.importfile">
<class name="Dog" table="dog">
<id name="id"/>
<many-to-one name="master" column="master_fk"/>
</class>
</hibernate-mapping>

View File

@ -0,0 +1,25 @@
package org.hibernate.test.importfile;
/**
* @author Emmanuel Bernard
*/
public class Dog {
private Integer id;
private Human master;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public Human getMaster() {
return master;
}
public void setMaster(Human master) {
this.master = master;
}
}

View File

@ -16,9 +16,15 @@ public class ImportFileTest extends FunctionalTestCase {
public void testImportFile() throws Exception {
Session s = openSession( );
final Transaction tx = s.beginTransaction();
final List<?> list = s.createQuery( "from " + Human.class.getName() ).list();
assertEquals( "database.sql not imported", 3, list.size() );
for (Object entity : list) {
final List<?> humans = s.createQuery( "from " + Human.class.getName() ).list();
assertEquals( "humans.sql not imported", 3, humans.size() );
final List<?> dogs = s.createQuery( "from " + Dog.class.getName() ).list();
assertEquals( "dogs.sql not imported", 3, dogs.size() );
for (Object entity : dogs) {
s.delete( entity );
}
for (Object entity : humans) {
s.delete( entity );
}
tx.commit();
@ -26,7 +32,7 @@ public class ImportFileTest extends FunctionalTestCase {
}
public void configure(Configuration cfg) {
cfg.setProperty( Environment.HBM2DDL_IMPORT_FILE, "/database.sql");
cfg.setProperty( Environment.HBM2DDL_IMPORT_FILES, "/humans.sql,/dogs.sql");
}
public ImportFileTest(String string) {
@ -35,7 +41,8 @@ public class ImportFileTest extends FunctionalTestCase {
public String[] getMappings() {
return new String[] {
"importfile/Human.hbm.xml"
"importfile/Human.hbm.xml",
"importfile/Dog.hbm.xml"
};
}
}

View File

@ -0,0 +1,3 @@
INSERT INTO dog (id, master_fk) VALUES (1,1)
INSERT INTO dog (id, master_fk) VALUES (2,2)
INSERT INTO dog (id, master_fk) VALUES (3,3)