HHH-9930 - Enable mariadb (mysql) database profile

This commit is contained in:
Steve Ebersole 2015-07-23 12:25:40 -05:00
parent 11c76129bd
commit 37cc060b45
8 changed files with 111 additions and 37 deletions

View File

@ -189,9 +189,9 @@ subprojects { subProject ->
systemProperties['hibernate.test.validatefailureexpected'] = true
systemProperties += System.properties.findAll { it.key.startsWith( "hibernate.") }
beforeTest { descriptor ->
println "Starting test: " + descriptor
}
// beforeTest { descriptor ->
// println "Starting test: " + descriptor
// }
// afterTest { descriptor ->
// println "Completed test: " + descriptor

View File

@ -35,7 +35,7 @@ public class IdentifierHelperBuilder {
private Set<String> reservedWords = new TreeSet<String>( String.CASE_INSENSITIVE_ORDER );
private boolean globallyQuoteIdentifiers = false;
private boolean autoQuoteKeywords = true;
private IdentifierCaseStrategy unquotedCaseStrategy = IdentifierCaseStrategy.UPPER;
private IdentifierCaseStrategy unquotedCaseStrategy = IdentifierCaseStrategy.MIXED;
private IdentifierCaseStrategy quotedCaseStrategy = IdentifierCaseStrategy.MIXED;
public static IdentifierHelperBuilder from(JdbcEnvironment jdbcEnvironment) {

View File

@ -110,9 +110,12 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
@Override
public boolean schemaExists(Identifier catalog, Identifier schema) {
try {
final String catalogFilter = determineCatalogFilter( catalog );
final String schemaFilter = determineSchemaFilter( schema );
final ResultSet resultSet = extractionContext.getJdbcDatabaseMetaData().getSchemas(
determineCatalogFilter( catalog ),
determineSchemaFilter( schema )
catalogFilter,
schemaFilter
);
try {
@ -267,6 +270,9 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
Identifier catalog,
Identifier schema,
Identifier tableName) {
Identifier catalogToUse = null;
Identifier schemaToUse = null;
final String catalogFilter;
final String schemaFilter;
@ -275,6 +281,7 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
catalogFilter = "";
}
else {
catalogToUse = catalog;
catalogFilter = toMetaDataObjectName( catalog );
}
}
@ -287,6 +294,7 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
schemaFilter = "";
}
else {
schemaToUse = schema;
schemaFilter = toMetaDataObjectName( schema );
}
}
@ -305,8 +313,8 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
);
return processGetTableResults(
catalog,
schema,
catalogToUse,
schemaToUse,
tableName,
resultSet
);
@ -364,14 +372,44 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
@Override
public ColumnInformation getColumn(TableInformation tableInformation, Identifier columnIdentifier) {
final Identifier catalog = tableInformation.getName().getCatalogName();
final Identifier schema = tableInformation.getName().getSchemaName();
final String catalogFilter;
final String schemaFilter;
if ( extractionContext.getJdbcEnvironment().getNameQualifierSupport().supportsCatalogs() ) {
if ( catalog == null ) {
catalogFilter = "";
}
else {
catalogFilter = toMetaDataObjectName( catalog );
}
}
else {
catalogFilter = null;
}
if ( extractionContext.getJdbcEnvironment().getNameQualifierSupport().supportsSchemas() ) {
if ( schema == null ) {
schemaFilter = "";
}
else {
schemaFilter = toMetaDataObjectName( schema );
}
}
else {
schemaFilter = null;
}
final String tableFilter = toMetaDataObjectName( tableInformation.getName().getTableName() );
final String columnFilter = toMetaDataObjectName( columnIdentifier );
try {
ResultSet resultSet = extractionContext.getJdbcDatabaseMetaData().getColumns(
identifierHelper().toMetaDataCatalogName( tableInformation.getName().getCatalogName() ),
identifierHelper().toMetaDataSchemaName( tableInformation.getName().getSchemaName() ),
identifierHelper().toMetaDataObjectName( tableInformation.getName().getTableName() ),
extractionContext.getJdbcEnvironment()
.getIdentifierHelper()
.toMetaDataObjectName( columnIdentifier )
catalogFilter,
schemaFilter,
tableFilter,
columnFilter
);
try {

View File

@ -74,7 +74,7 @@ public class ManyToManyOwner {
)
},
inverseForeignKey = @ForeignKey(value = ConstraintMode.NO_CONSTRAINT),
name = "many_to_many"
name = "Many_To_Many"
)
public Set<ManyToManyTarget> getManyToMany() {
return manyToMany;

View File

@ -128,8 +128,8 @@ public class MigrationTest extends BaseUnitTestCase {
.addAnnotatedClass( EntityWithIndex.class )
.buildMetadata();
// export the schema
new SchemaExport( metadata ).execute( Target.EXPORT, SchemaExport.Type.CREATE );
// drop and then create the schema
new SchemaExport( metadata ).execute( Target.EXPORT, SchemaExport.Type.BOTH );
try {
// update the schema
@ -157,8 +157,8 @@ public class MigrationTest extends BaseUnitTestCase {
.addAnnotatedClass( PersonInfo.class )
.buildMetadata();
// export the schema
new SchemaExport( metadata, true ).execute( Target.EXPORT, SchemaExport.Type.CREATE );
// drop and then create the schema
new SchemaExport( metadata, true ).execute( Target.EXPORT, SchemaExport.Type.BOTH );
try {
// update the schema

View File

@ -12,6 +12,7 @@ import org.hibernate.cfg.Environment;
import org.hibernate.dialect.Dialect;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.tool.hbm2ddl.SchemaExport;
import org.hibernate.tool.hbm2ddl.Target;
import org.hibernate.testing.ServiceRegistryBuilder;
import org.hibernate.testing.junit4.BaseUnitTestCase;
@ -43,6 +44,7 @@ public abstract class SchemaExportTest extends BaseUnitTestCase {
metadata = (MetadataImplementor) new MetadataSources( serviceRegistry )
.addResource( "org/hibernate/test/schemaupdate/mapping.hbm.xml" )
.buildMetadata();
metadata.validate();
SchemaExport schemaExport = createSchemaExport( metadata, serviceRegistry );
schemaExport.drop( true, true );
@ -59,20 +61,15 @@ public abstract class SchemaExportTest extends BaseUnitTestCase {
final SchemaExport schemaExport = createSchemaExport( metadata, serviceRegistry );
// create w/o dropping first; (OK because tables don't exist yet
schemaExport.execute( false, true, false, true );
// if ( doesDialectSupportDropTableIfExist() ) {
assertEquals( 0, schemaExport.getExceptions().size() );
// }
// else {
// assertEquals( 2, schemaExport.getExceptions().size() );
// }
// create w/o dropping again; should be an exception for each table
// (2 total) because the tables exist already
// assertEquals( 0, schemaExport.getExceptions().size() );
schemaExport.execute( false, true, false, true );
assertEquals( 2, schemaExport.getExceptions().size() );
schemaExport.execute( Target.EXPORT, SchemaExport.Type.CREATE );
assertEquals( 0, schemaExport.getExceptions().size() );
// create w/o dropping again; should cause an exception because the tables exist already
schemaExport.execute( Target.EXPORT, SchemaExport.Type.CREATE );
assertEquals( 1, schemaExport.getExceptions().size() );
// drop tables only
schemaExport.execute( false, true, true, false );
schemaExport.execute( Target.EXPORT, SchemaExport.Type.DROP );
assertEquals( 0, schemaExport.getExceptions().size() );
}
@ -86,11 +83,13 @@ public abstract class SchemaExportTest extends BaseUnitTestCase {
assertEquals( 0, schemaExport.getExceptions().size() );
}
else {
assertEquals( 2, schemaExport.getExceptions().size() );
assertEquals( 1, schemaExport.getExceptions().size() );
}
// drop before crete again (this time drops the tables before re-creating)
schemaExport.execute( false, true, false, false );
assertEquals( 0, schemaExport.getExceptions().size() );
// drop tables
schemaExport.execute( false, true, true, false );
assertEquals( 0, schemaExport.getExceptions().size() );
@ -101,14 +100,15 @@ public abstract class SchemaExportTest extends BaseUnitTestCase {
final SchemaExport schemaExport = createSchemaExport( metadata, serviceRegistry );
java.io.File outFile = new java.io.File("schema.ddl");
schemaExport.setOutputFile(outFile.getPath());
schemaExport.setOutputFile( outFile.getPath() );
// do not script to console or export to database
schemaExport.execute( false, false, false, true );
if ( doesDialectSupportDropTableIfExist()
&& schemaExport.getExceptions().size() > 0 ) {
if ( doesDialectSupportDropTableIfExist() && schemaExport.getExceptions().size() > 0 ) {
assertEquals( 2, schemaExport.getExceptions().size() );
}
assertTrue( outFile.exists() );
//check file is not empty
assertTrue( outFile.length() > 0 );
outFile.delete();

View File

@ -13,7 +13,7 @@
<class name="Version">
<id name="id">
<generator class="sequence"/>
<generator class="increment"/>
</id>
<property name="description"/>
</class>

View File

@ -0,0 +1,36 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.testing.jdbc;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @author Steve Ebersole
*/
public class ResultSetUtil {
public static List<Map<String,?>> extractResults(ResultSet resultSet) throws SQLException {
List<Map<String,?>> results = new ArrayList<Map<String, ?>>();
while ( resultSet.next() ) {
Map<String,Object> row = new HashMap<String, Object>();
for ( int i = 1; i <= resultSet.getMetaData().getColumnCount(); i++ ) {
row.put(
resultSet.getMetaData().getColumnLabel( i ),
resultSet.getObject( i )
);
results.add( row );
}
}
return results;
}
}