Merge remote-tracking branch 'upstream/main' into wip/6.0
This commit is contained in:
commit
e39c77423b
|
@ -6,11 +6,15 @@
|
|||
*/
|
||||
package org.hibernate.dialect;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.sql.Types;
|
||||
|
||||
import org.hibernate.dialect.sequence.MariaDBSequenceSupport;
|
||||
import org.hibernate.dialect.sequence.SequenceSupport;
|
||||
import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo;
|
||||
import org.hibernate.engine.jdbc.env.spi.IdentifierCaseStrategy;
|
||||
import org.hibernate.engine.jdbc.env.spi.IdentifierHelper;
|
||||
import org.hibernate.engine.jdbc.env.spi.IdentifierHelperBuilder;
|
||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||
import org.hibernate.query.spi.QueryEngine;
|
||||
import org.hibernate.sql.ast.SqlAstTranslator;
|
||||
|
@ -157,4 +161,15 @@ public class MariaDBDialect extends MySQLDialect {
|
|||
return false;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public IdentifierHelper buildIdentifierHelper(IdentifierHelperBuilder builder, DatabaseMetaData dbMetaData)
|
||||
throws SQLException {
|
||||
|
||||
// some MariaDB drivers does not return case strategy info
|
||||
builder.setUnquotedCaseStrategy( IdentifierCaseStrategy.MIXED );
|
||||
builder.setQuotedCaseStrategy( IdentifierCaseStrategy.MIXED );
|
||||
|
||||
return super.buildIdentifierHelper( builder, dbMetaData );
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,6 +7,9 @@
|
|||
package org.hibernate.dialect;
|
||||
|
||||
import org.hibernate.LockOptions;
|
||||
import org.hibernate.engine.jdbc.env.spi.IdentifierCaseStrategy;
|
||||
import org.hibernate.engine.jdbc.env.spi.IdentifierHelper;
|
||||
import org.hibernate.engine.jdbc.env.spi.IdentifierHelperBuilder;
|
||||
import org.hibernate.query.NullOrdering;
|
||||
import org.hibernate.query.NullPrecedence;
|
||||
import org.hibernate.PessimisticLockException;
|
||||
|
@ -52,6 +55,7 @@ import org.hibernate.type.descriptor.jdbc.JdbcTypeDescriptor;
|
|||
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeDescriptorRegistry;
|
||||
|
||||
import java.sql.CallableStatement;
|
||||
import java.sql.DatabaseMetaData;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.Types;
|
||||
|
@ -749,6 +753,18 @@ public class MySQLDialect extends Dialect {
|
|||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public IdentifierHelper buildIdentifierHelper(IdentifierHelperBuilder builder, DatabaseMetaData dbMetaData)
|
||||
throws SQLException {
|
||||
|
||||
if ( dbMetaData == null ) {
|
||||
builder.setUnquotedCaseStrategy( IdentifierCaseStrategy.MIXED );
|
||||
builder.setQuotedCaseStrategy( IdentifierCaseStrategy.MIXED );
|
||||
}
|
||||
|
||||
return super.buildIdentifierHelper( builder, dbMetaData );
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getNotExpression(String expression) {
|
||||
return "not (" + expression + ")";
|
||||
|
|
|
@ -19,6 +19,9 @@ import org.hibernate.dialect.sequence.ANSISequenceSupport;
|
|||
import org.hibernate.dialect.sequence.NoSequenceSupport;
|
||||
import org.hibernate.dialect.sequence.SequenceSupport;
|
||||
import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo;
|
||||
import org.hibernate.engine.jdbc.env.spi.IdentifierCaseStrategy;
|
||||
import org.hibernate.engine.jdbc.env.spi.IdentifierHelper;
|
||||
import org.hibernate.engine.jdbc.env.spi.IdentifierHelperBuilder;
|
||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||
import org.hibernate.exception.LockTimeoutException;
|
||||
import org.hibernate.exception.spi.SQLExceptionConversionDelegate;
|
||||
|
@ -36,6 +39,8 @@ import org.hibernate.type.StandardBasicTypes;
|
|||
import org.hibernate.type.descriptor.jdbc.JdbcTypeDescriptor;
|
||||
import org.hibernate.type.descriptor.jdbc.SmallIntTypeDescriptor;
|
||||
|
||||
import java.sql.DatabaseMetaData;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.Types;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
|
@ -216,6 +221,18 @@ public class SQLServerDialect extends AbstractTransactSQLDialect {
|
|||
return "sysdatetime()";
|
||||
}
|
||||
|
||||
@Override
|
||||
public IdentifierHelper buildIdentifierHelper(
|
||||
IdentifierHelperBuilder builder, DatabaseMetaData dbMetaData) throws SQLException {
|
||||
|
||||
if ( dbMetaData == null ) {
|
||||
builder.setUnquotedCaseStrategy( IdentifierCaseStrategy.MIXED );
|
||||
builder.setQuotedCaseStrategy( IdentifierCaseStrategy.MIXED );
|
||||
}
|
||||
|
||||
return super.buildIdentifierHelper( builder, dbMetaData );
|
||||
}
|
||||
|
||||
@Override
|
||||
public String currentTime() {
|
||||
return "convert(time, getdate())";
|
||||
|
|
|
@ -40,7 +40,7 @@ public class IdentifierHelperBuilder {
|
|||
private boolean globallyQuoteIdentifiers = false;
|
||||
private boolean skipGlobalQuotingForColumnDefinitions = false;
|
||||
private boolean autoQuoteKeywords = true;
|
||||
private IdentifierCaseStrategy unquotedCaseStrategy = IdentifierCaseStrategy.MIXED;
|
||||
private IdentifierCaseStrategy unquotedCaseStrategy = IdentifierCaseStrategy.UPPER;
|
||||
private IdentifierCaseStrategy quotedCaseStrategy = IdentifierCaseStrategy.MIXED;
|
||||
|
||||
public static IdentifierHelperBuilder from(JdbcEnvironment jdbcEnvironment) {
|
||||
|
|
|
@ -122,7 +122,7 @@ public class StatisticsImpl implements StatisticsImplementor, Service, Manageabl
|
|||
Statistics.DEFAULT_QUERY_STATISTICS_MAX_SIZE,
|
||||
20
|
||||
);
|
||||
clear();
|
||||
resetStartTime();
|
||||
metamodel = sessionFactory.getMetamodel();
|
||||
cache = sessionFactory.getCache();
|
||||
cacheRegionPrefix = sessionFactoryOptions.getCacheRegionPrefix();
|
||||
|
@ -192,6 +192,10 @@ public class StatisticsImpl implements StatisticsImplementor, Service, Manageabl
|
|||
queryPlanCacheHitCount.reset();
|
||||
queryPlanCacheMissCount.reset();
|
||||
|
||||
resetStartTime();
|
||||
}
|
||||
|
||||
private void resetStartTime() {
|
||||
startTime = System.currentTimeMillis();
|
||||
}
|
||||
|
||||
|
@ -559,24 +563,21 @@ public class StatisticsImpl implements StatisticsImplementor, Service, Manageabl
|
|||
}
|
||||
|
||||
@Override
|
||||
public CacheRegionStatisticsImpl getQueryRegionStatistics(String regionName) {
|
||||
final CacheRegionStatisticsImpl existing = l2CacheStatsMap.get( regionName );
|
||||
if ( existing != null ) {
|
||||
return existing;
|
||||
}
|
||||
|
||||
final QueryResultsCache regionAccess = cache
|
||||
.getQueryResultsCacheStrictly( regionName );
|
||||
if ( regionAccess == null ) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return l2CacheStatsMap.getOrCompute(
|
||||
regionName,
|
||||
s -> new CacheRegionStatisticsImpl( regionAccess.getRegion() )
|
||||
);
|
||||
public CacheRegionStatisticsImpl getQueryRegionStatistics(final String regionName) {
|
||||
return l2CacheStatsMap.getOrCompute( regionName, this::computeQueryRegionStatistics );
|
||||
}
|
||||
|
||||
private CacheRegionStatisticsImpl computeQueryRegionStatistics(final String regionName) {
|
||||
final QueryResultsCache regionAccess = cache.getQueryResultsCacheStrictly( regionName );
|
||||
if ( regionAccess == null ) {
|
||||
return null; //this null value will be cached
|
||||
}
|
||||
else {
|
||||
return new CacheRegionStatisticsImpl( regionAccess.getRegion() );
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public CacheRegionStatisticsImpl getCacheRegionStatistics(String regionName) {
|
||||
if ( ! secondLevelCacheEnabled ) {
|
||||
|
|
|
@ -25,7 +25,8 @@ import org.hibernate.internal.util.collections.BoundedConcurrentHashMap;
|
|||
*/
|
||||
public final class StatsNamedContainer<V> {
|
||||
|
||||
private final ConcurrentMap<String,V> map;
|
||||
private final ConcurrentMap<String,Object> map;
|
||||
private final static Object NULL_TOKEN = new Object();
|
||||
|
||||
/**
|
||||
* Creates a bounded container - based on BoundedConcurrentHashMap
|
||||
|
@ -63,33 +64,39 @@ public final class StatsNamedContainer<V> {
|
|||
* sure the function is invoked at most once: we don't need this guarantee, and prefer to reduce risk of blocking.
|
||||
*/
|
||||
public V getOrCompute(final String key, final Function<String, V> function) {
|
||||
final V v1 = map.get( key );
|
||||
final Object v1 = map.get( key );
|
||||
if ( v1 != null ) {
|
||||
return v1;
|
||||
if ( v1 == NULL_TOKEN ) {
|
||||
return null;
|
||||
}
|
||||
return (V) v1;
|
||||
}
|
||||
else {
|
||||
final V v2 = function.apply( key );
|
||||
//Occasionally a function might return null. We can't store a null in the CHM,
|
||||
// so a placeholder would be required to implement that, but we prefer to just keep this
|
||||
// situation as slightly sub-optimal so to not make the code more complex just to handle the exceptional case:
|
||||
// null values are assumed to be rare enough for this not being worth it.
|
||||
if ( v2 == null ) {
|
||||
map.put( key, NULL_TOKEN );
|
||||
return null;
|
||||
}
|
||||
else {
|
||||
final V v3 = map.putIfAbsent( key, v2 );
|
||||
final Object v3 = map.putIfAbsent( key, v2 );
|
||||
if ( v3 == null ) {
|
||||
return v2;
|
||||
}
|
||||
else {
|
||||
return v3;
|
||||
return (V) v3;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public V get(final String key) {
|
||||
return map.get( key );
|
||||
final Object o = map.get( key );
|
||||
if ( o == NULL_TOKEN) {
|
||||
return null;
|
||||
}
|
||||
else {
|
||||
return (V) o;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -29,6 +29,8 @@ import org.hibernate.cfg.AvailableSettings;
|
|||
import org.hibernate.engine.config.spi.ConfigurationService;
|
||||
import org.hibernate.engine.config.spi.StandardConverters;
|
||||
import org.hibernate.engine.jdbc.env.spi.IdentifierHelper;
|
||||
import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
|
||||
import org.hibernate.engine.jdbc.env.spi.NameQualifierSupport;
|
||||
import org.hibernate.internal.CoreLogging;
|
||||
import org.hibernate.internal.CoreMessageLogger;
|
||||
import org.hibernate.internal.util.StringHelper;
|
||||
|
@ -59,12 +61,26 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
|
|||
|
||||
private final ExtractionContext extractionContext;
|
||||
|
||||
private final boolean useJdbcMetadataDefaultsSetting;
|
||||
|
||||
private Identifier currentCatalog;
|
||||
private Identifier currentSchema;
|
||||
|
||||
private String currentCatalogFilter;
|
||||
private String currentSchemaFilter;
|
||||
|
||||
public InformationExtractorJdbcDatabaseMetaDataImpl(ExtractionContext extractionContext) {
|
||||
this.extractionContext = extractionContext;
|
||||
|
||||
ConfigurationService configService = extractionContext.getServiceRegistry()
|
||||
.getService( ConfigurationService.class );
|
||||
|
||||
useJdbcMetadataDefaultsSetting = configService.getSetting(
|
||||
"hibernate.temp.use_jdbc_metadata_defaults",
|
||||
StandardConverters.BOOLEAN,
|
||||
Boolean.TRUE
|
||||
);
|
||||
|
||||
final String extraPhysycalTableTypesConfig = configService.getSetting(
|
||||
AvailableSettings.EXTRA_PHYSICAL_TABLE_TYPES,
|
||||
StandardConverters.STRING,
|
||||
|
@ -229,11 +245,14 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
|
|||
TableInformation tableInfo = null;
|
||||
|
||||
// 1) look in current namespace
|
||||
if ( extractionContext.getJdbcEnvironment().getCurrentCatalog() != null
|
||||
|| extractionContext.getJdbcEnvironment().getCurrentSchema() != null ) {
|
||||
final JdbcEnvironment jdbcEnvironment = extractionContext.getJdbcEnvironment();
|
||||
final Identifier currentSchema = getCurrentSchema( jdbcEnvironment );
|
||||
final Identifier currentCatalog = getCurrentCatalog( jdbcEnvironment );
|
||||
if ( currentCatalog != null
|
||||
|| currentSchema != null ) {
|
||||
tableInfo = locateTableInNamespace(
|
||||
extractionContext.getJdbcEnvironment().getCurrentCatalog(),
|
||||
extractionContext.getJdbcEnvironment().getCurrentSchema(),
|
||||
currentCatalog,
|
||||
currentSchema,
|
||||
tableName
|
||||
);
|
||||
|
||||
|
@ -288,23 +307,106 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
|
|||
}
|
||||
}
|
||||
|
||||
private Identifier getCurrentSchema(JdbcEnvironment jdbcEnvironment) {
|
||||
if ( currentSchema != null ) {
|
||||
return currentSchema;
|
||||
}
|
||||
final Identifier schema = jdbcEnvironment.getCurrentSchema();
|
||||
if ( schema != null ) {
|
||||
currentSchema = schema;
|
||||
}
|
||||
if ( !useJdbcMetadataDefaultsSetting ) {
|
||||
try {
|
||||
currentSchema = extractionContext.getJdbcEnvironment()
|
||||
.getIdentifierHelper()
|
||||
.toIdentifier( extractionContext.getJdbcConnection().getSchema() );
|
||||
}
|
||||
catch (SQLException ignore) {
|
||||
log.sqlWarning( ignore.getErrorCode(), ignore.getSQLState() );
|
||||
}
|
||||
}
|
||||
return currentCatalog;
|
||||
}
|
||||
|
||||
private Identifier getCurrentCatalog(JdbcEnvironment jdbcEnvironment) {
|
||||
if ( currentCatalog != null ) {
|
||||
return currentCatalog;
|
||||
}
|
||||
final Identifier catalog = jdbcEnvironment.getCurrentCatalog();
|
||||
if ( catalog != null ) {
|
||||
currentCatalog = catalog;
|
||||
}
|
||||
if ( !useJdbcMetadataDefaultsSetting ) {
|
||||
try {
|
||||
currentCatalog = extractionContext.getJdbcEnvironment()
|
||||
.getIdentifierHelper()
|
||||
.toIdentifier( extractionContext.getJdbcConnection().getCatalog() );
|
||||
}
|
||||
catch (SQLException ignore) {
|
||||
log.sqlWarning( ignore.getErrorCode(), ignore.getSQLState() );
|
||||
}
|
||||
}
|
||||
return currentCatalog;
|
||||
}
|
||||
|
||||
private String getCurrentCatalogFilter(JdbcEnvironment jdbcEnvironment) {
|
||||
if ( currentCatalogFilter != null ) {
|
||||
return currentCatalogFilter;
|
||||
}
|
||||
final Identifier currentCatalog = jdbcEnvironment.getCurrentCatalog();
|
||||
if ( currentCatalog != null ) {
|
||||
currentCatalogFilter = toMetaDataObjectName( currentCatalog );
|
||||
}
|
||||
if ( !useJdbcMetadataDefaultsSetting ) {
|
||||
try {
|
||||
currentCatalogFilter = extractionContext.getJdbcConnection().getCatalog();
|
||||
}
|
||||
catch (SQLException ignore) {
|
||||
log.sqlWarning( ignore.getErrorCode(), ignore.getSQLState() );
|
||||
}
|
||||
}
|
||||
return currentCatalogFilter;
|
||||
}
|
||||
|
||||
private String getCurrentSchemaFilter(JdbcEnvironment jdbcEnvironment) {
|
||||
if ( currentSchemaFilter != null ) {
|
||||
return currentSchemaFilter;
|
||||
}
|
||||
final Identifier currentSchema = jdbcEnvironment.getCurrentSchema();
|
||||
if ( currentSchema != null ) {
|
||||
currentSchemaFilter = toMetaDataObjectName( currentSchema );
|
||||
}
|
||||
|
||||
if ( !useJdbcMetadataDefaultsSetting ) {
|
||||
try {
|
||||
currentSchemaFilter = extractionContext.getJdbcConnection().getSchema();
|
||||
}
|
||||
catch (SQLException ignore) {
|
||||
log.sqlWarning( ignore.getErrorCode(), ignore.getSQLState() );
|
||||
}
|
||||
}
|
||||
return currentSchemaFilter;
|
||||
}
|
||||
|
||||
public NameSpaceTablesInformation getTables(Identifier catalog, Identifier schema) {
|
||||
|
||||
String catalogFilter = null;
|
||||
String schemaFilter = null;
|
||||
|
||||
if ( extractionContext.getJdbcEnvironment().getNameQualifierSupport().supportsCatalogs() ) {
|
||||
final JdbcEnvironment jdbcEnvironment = extractionContext.getJdbcEnvironment();
|
||||
final NameQualifierSupport nameQualifierSupport = jdbcEnvironment.getNameQualifierSupport();
|
||||
if ( nameQualifierSupport.supportsCatalogs() ) {
|
||||
if ( catalog == null ) {
|
||||
if ( extractionContext.getJdbcEnvironment().getCurrentCatalog() != null ) {
|
||||
// 1) look in current namespace
|
||||
catalogFilter = toMetaDataObjectName( extractionContext.getJdbcEnvironment().getCurrentCatalog() );
|
||||
}
|
||||
else if ( extractionContext.getDefaultCatalog() != null ) {
|
||||
// 2) look in default namespace
|
||||
catalogFilter = toMetaDataObjectName( extractionContext.getDefaultCatalog() );
|
||||
}
|
||||
else {
|
||||
catalogFilter = "";
|
||||
// look in the current namespace
|
||||
catalogFilter = getCurrentCatalogFilter(jdbcEnvironment);
|
||||
if ( catalogFilter == null ) {
|
||||
if ( extractionContext.getDefaultCatalog() != null ) {
|
||||
// 2) look in default namespace
|
||||
catalogFilter = toMetaDataObjectName( extractionContext.getDefaultCatalog() );
|
||||
}
|
||||
else {
|
||||
catalogFilter = "";
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
|
@ -312,18 +414,18 @@ public class InformationExtractorJdbcDatabaseMetaDataImpl implements Information
|
|||
}
|
||||
}
|
||||
|
||||
if ( extractionContext.getJdbcEnvironment().getNameQualifierSupport().supportsSchemas() ) {
|
||||
if ( nameQualifierSupport.supportsSchemas() ) {
|
||||
if ( schema == null ) {
|
||||
if ( extractionContext.getJdbcEnvironment().getCurrentSchema() != null ) {
|
||||
// 1) look in current namespace
|
||||
schemaFilter = toMetaDataObjectName( extractionContext.getJdbcEnvironment().getCurrentSchema() );
|
||||
}
|
||||
else if ( extractionContext.getDefaultSchema() != null ) {
|
||||
// 2) look in default namespace
|
||||
schemaFilter = toMetaDataObjectName( extractionContext.getDefaultSchema() );
|
||||
}
|
||||
else {
|
||||
schemaFilter = "";
|
||||
// 1) look in current namespace
|
||||
schemaFilter = getCurrentSchemaFilter( jdbcEnvironment );
|
||||
if ( schemaFilter == null ) {
|
||||
if ( extractionContext.getDefaultSchema() != null ) {
|
||||
// 2) look in default namespace
|
||||
schemaFilter = toMetaDataObjectName( extractionContext.getDefaultSchema() );
|
||||
}
|
||||
else {
|
||||
schemaFilter = "";
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
|
|
|
@ -8,14 +8,21 @@ package org.hibernate.orm.test.stat.internal;
|
|||
|
||||
import org.hibernate.stat.internal.StatsNamedContainer;
|
||||
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
import org.hibernate.testing.TestForIssue;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.junit.Assert.assertArrayEquals;
|
||||
import static org.junit.Assert.assertNull;
|
||||
|
||||
@TestForIssue(jiraKey = "HHH-13645")
|
||||
public class StatsNamedContainerNullComputedValueTest {
|
||||
|
||||
private final static AtomicInteger invocationCounterNullProducer = new AtomicInteger();
|
||||
private final static AtomicInteger invocationCounterValueProducer = new AtomicInteger();
|
||||
|
||||
@Test
|
||||
public void testNullComputedValue() {
|
||||
final StatsNamedContainer statsNamedContainer = new StatsNamedContainer<Integer>();
|
||||
|
@ -29,4 +36,48 @@ public class StatsNamedContainerNullComputedValueTest {
|
|||
);
|
||||
}
|
||||
|
||||
}
|
||||
@Test
|
||||
public void abletoStoreNullValues() {
|
||||
final StatsNamedContainer statsNamedContainer = new StatsNamedContainer<Integer>();
|
||||
Assert.assertEquals( 0, invocationCounterNullProducer.get() );
|
||||
assertNull( getCacheWithNullValue( statsNamedContainer ) );
|
||||
Assert.assertEquals( 1, invocationCounterNullProducer.get() );
|
||||
assertNull( getCacheWithNullValue( statsNamedContainer ) );
|
||||
Assert.assertEquals( 1, invocationCounterNullProducer.get() );
|
||||
}
|
||||
|
||||
@Test
|
||||
public void abletoStoreActualValues() {
|
||||
final StatsNamedContainer statsNamedContainer = new StatsNamedContainer<Integer>();
|
||||
Assert.assertEquals( 0, invocationCounterValueProducer.get() );
|
||||
Assert.assertEquals( 5, getCacheWithActualValue( statsNamedContainer ) );
|
||||
Assert.assertEquals( 1, invocationCounterValueProducer.get() );
|
||||
Assert.assertEquals( 5, getCacheWithActualValue( statsNamedContainer ) );
|
||||
Assert.assertEquals( 1, invocationCounterValueProducer.get() );
|
||||
}
|
||||
|
||||
private Object getCacheWithActualValue(StatsNamedContainer statsNamedContainer) {
|
||||
return statsNamedContainer.getOrCompute(
|
||||
"key",
|
||||
StatsNamedContainerNullComputedValueTest::produceValue
|
||||
);
|
||||
}
|
||||
|
||||
private Object getCacheWithNullValue(StatsNamedContainer statsNamedContainer) {
|
||||
return statsNamedContainer.getOrCompute(
|
||||
"key",
|
||||
StatsNamedContainerNullComputedValueTest::produceNull
|
||||
);
|
||||
}
|
||||
|
||||
private static Integer produceValue(Object o) {
|
||||
invocationCounterValueProducer.getAndIncrement();
|
||||
return Integer.valueOf( 5 );
|
||||
}
|
||||
|
||||
private static Integer produceNull(Object v) {
|
||||
invocationCounterNullProducer.getAndIncrement();
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,147 @@
|
|||
/*
|
||||
* Hibernate, Relational Persistence for Idiomatic Java
|
||||
*
|
||||
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
|
||||
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
|
||||
*/
|
||||
package org.hibernate.test.schemaupdate;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.util.EnumSet;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import javax.persistence.Column;
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.Id;
|
||||
|
||||
import org.hibernate.boot.MetadataSources;
|
||||
import org.hibernate.boot.registry.StandardServiceRegistry;
|
||||
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
|
||||
import org.hibernate.boot.spi.MetadataImplementor;
|
||||
import org.hibernate.cfg.AvailableSettings;
|
||||
import org.hibernate.dialect.Dialect;
|
||||
import org.hibernate.tool.hbm2ddl.SchemaExport;
|
||||
import org.hibernate.tool.hbm2ddl.SchemaUpdate;
|
||||
import org.hibernate.tool.hbm2ddl.SchemaValidator;
|
||||
import org.hibernate.tool.schema.JdbcMetadaAccessStrategy;
|
||||
import org.hibernate.tool.schema.TargetType;
|
||||
|
||||
import org.hibernate.testing.TestForIssue;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.junit.runners.Parameterized;
|
||||
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.junit.Assert.assertThat;
|
||||
|
||||
@TestForIssue(jiraKey = "HHH-13788")
|
||||
@RunWith(Parameterized.class)
|
||||
public class SchemaUpdateWithUseJdbcMetadataDefaultsSettingToFalseAndQuotedNameTest {
|
||||
@Parameterized.Parameters
|
||||
public static String[] parameters() {
|
||||
return new String[] {
|
||||
JdbcMetadaAccessStrategy.GROUPED.toString(),
|
||||
JdbcMetadaAccessStrategy.INDIVIDUALLY.toString()
|
||||
};
|
||||
}
|
||||
|
||||
@Parameterized.Parameter
|
||||
public String jdbcMetadataExtractorStrategy;
|
||||
|
||||
private File updateOutputFile;
|
||||
private File createOutputFile;
|
||||
private StandardServiceRegistry ssr;
|
||||
private MetadataImplementor metadata;
|
||||
|
||||
@Before
|
||||
public void setUp() throws IOException {
|
||||
createOutputFile = File.createTempFile( "create_script", ".sql" );
|
||||
createOutputFile.deleteOnExit();
|
||||
updateOutputFile = File.createTempFile( "update_script", ".sql" );
|
||||
updateOutputFile.deleteOnExit();
|
||||
ssr = new StandardServiceRegistryBuilder()
|
||||
.applySetting( "hibernate.temp.use_jdbc_metadata_defaults", "false" )
|
||||
.applySetting( AvailableSettings.SHOW_SQL, "true" )
|
||||
.applySetting(
|
||||
AvailableSettings.HBM2DDL_JDBC_METADATA_EXTRACTOR_STRATEGY,
|
||||
jdbcMetadataExtractorStrategy
|
||||
)
|
||||
.build();
|
||||
|
||||
final MetadataSources metadataSources = new MetadataSources( ssr );
|
||||
metadataSources.addAnnotatedClass( AnotherTestEntity.class );
|
||||
|
||||
metadata = (MetadataImplementor) metadataSources.buildMetadata();
|
||||
metadata.validate();
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() {
|
||||
new SchemaExport().setHaltOnError( true )
|
||||
.setFormat( false )
|
||||
.drop( EnumSet.of( TargetType.DATABASE ), metadata );
|
||||
StandardServiceRegistryBuilder.destroy( ssr );
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSchemaUpdateDoesNotTryToRecreateExistingTables()
|
||||
throws Exception {
|
||||
createSchema();
|
||||
|
||||
new SchemaUpdate().setHaltOnError( true )
|
||||
.setOutputFile( updateOutputFile.getAbsolutePath() )
|
||||
.setFormat( false )
|
||||
.execute( EnumSet.of( TargetType.DATABASE, TargetType.SCRIPT ), metadata );
|
||||
|
||||
checkNoUpdateStatementHasBeenGenerated();
|
||||
}
|
||||
|
||||
private void checkNoUpdateStatementHasBeenGenerated() throws IOException {
|
||||
final String fileContent = new String( Files.readAllBytes( updateOutputFile.toPath() ) );
|
||||
assertThat(
|
||||
"The update output file should be empty because the db schema had already been generated and the domain model was not modified",
|
||||
fileContent,
|
||||
is( "" )
|
||||
);
|
||||
}
|
||||
|
||||
private void createSchema() throws Exception {
|
||||
new SchemaUpdate().setHaltOnError( true )
|
||||
.setOutputFile( createOutputFile.getAbsolutePath() )
|
||||
.execute( EnumSet.of( TargetType.DATABASE, TargetType.SCRIPT ), metadata );
|
||||
new SchemaValidator().validate( metadata );
|
||||
checkSchemaHasBeenGenerated();
|
||||
}
|
||||
|
||||
private void checkSchemaHasBeenGenerated() throws Exception {
|
||||
String fileContent = new String( Files.readAllBytes( createOutputFile.toPath() ) );
|
||||
final Dialect dialect = metadata.getDatabase().getDialect();
|
||||
Pattern fileContentPattern;
|
||||
if ( dialect.openQuote() == '[' ) {
|
||||
fileContentPattern = Pattern.compile( "create( (column|row))? table " + "\\[" + "another_test_entity" + "\\]" );
|
||||
}
|
||||
else {
|
||||
fileContentPattern = Pattern.compile( "create( (column|row))? table " + dialect.openQuote() + "another_test_entity" + dialect
|
||||
.closeQuote() );
|
||||
}
|
||||
Matcher fileContentMatcher = fileContentPattern.matcher( fileContent.toLowerCase() );
|
||||
assertThat(
|
||||
"The schema has not been correctly generated, Script file : " + fileContent.toLowerCase(),
|
||||
fileContentMatcher.find(),
|
||||
is( true )
|
||||
);
|
||||
}
|
||||
|
||||
@Entity(name = "`Another_Test_Entity`")
|
||||
public static class AnotherTestEntity {
|
||||
@Id
|
||||
private Long id;
|
||||
|
||||
@Column(name = "`another_NAME`")
|
||||
private String name;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,141 @@
|
|||
/*
|
||||
* Hibernate, Relational Persistence for Idiomatic Java
|
||||
*
|
||||
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
|
||||
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
|
||||
*/
|
||||
package org.hibernate.test.schemaupdate;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.util.EnumSet;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import javax.persistence.Column;
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.Id;
|
||||
|
||||
import org.hibernate.boot.MetadataSources;
|
||||
import org.hibernate.boot.registry.StandardServiceRegistry;
|
||||
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
|
||||
import org.hibernate.boot.spi.MetadataImplementor;
|
||||
import org.hibernate.cfg.AvailableSettings;
|
||||
import org.hibernate.tool.hbm2ddl.SchemaExport;
|
||||
import org.hibernate.tool.hbm2ddl.SchemaUpdate;
|
||||
import org.hibernate.tool.hbm2ddl.SchemaValidator;
|
||||
import org.hibernate.tool.schema.JdbcMetadaAccessStrategy;
|
||||
import org.hibernate.tool.schema.TargetType;
|
||||
|
||||
import org.hibernate.testing.TestForIssue;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.junit.runners.Parameterized;
|
||||
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.junit.Assert.assertThat;
|
||||
|
||||
/**
|
||||
* @author Andrea Boriero
|
||||
*/
|
||||
@TestForIssue(jiraKey = "HHH-13788")
|
||||
@RunWith(Parameterized.class)
|
||||
public class SchemaUpdateWithUseJdbcMetadataDefaultsSettingToFalseTest {
|
||||
|
||||
@Parameterized.Parameters
|
||||
public static String[] parameters() {
|
||||
return new String[] {
|
||||
JdbcMetadaAccessStrategy.GROUPED.toString(),
|
||||
JdbcMetadaAccessStrategy.INDIVIDUALLY.toString()
|
||||
};
|
||||
}
|
||||
|
||||
@Parameterized.Parameter
|
||||
public String jdbcMetadataExtractorStrategy;
|
||||
|
||||
private File updateOutputFile;
|
||||
private File createOutputFile;
|
||||
private StandardServiceRegistry ssr;
|
||||
private MetadataImplementor metadata;
|
||||
|
||||
@Before
|
||||
public void setUp() throws IOException {
|
||||
createOutputFile = File.createTempFile( "create_script", ".sql" );
|
||||
createOutputFile.deleteOnExit();
|
||||
updateOutputFile = File.createTempFile( "update_script", ".sql" );
|
||||
updateOutputFile.deleteOnExit();
|
||||
ssr = new StandardServiceRegistryBuilder()
|
||||
.applySetting( "hibernate.temp.use_jdbc_metadata_defaults", "false" )
|
||||
.applySetting( AvailableSettings.SHOW_SQL, "true" )
|
||||
.applySetting(
|
||||
AvailableSettings.HBM2DDL_JDBC_METADATA_EXTRACTOR_STRATEGY,
|
||||
jdbcMetadataExtractorStrategy
|
||||
)
|
||||
.build();
|
||||
|
||||
final MetadataSources metadataSources = new MetadataSources( ssr );
|
||||
metadataSources.addAnnotatedClass( TestEntity.class );
|
||||
|
||||
metadata = (MetadataImplementor) metadataSources.buildMetadata();
|
||||
metadata.validate();
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() {
|
||||
new SchemaExport().setHaltOnError( true )
|
||||
.setFormat( false )
|
||||
.drop( EnumSet.of( TargetType.DATABASE ), metadata );
|
||||
StandardServiceRegistryBuilder.destroy( ssr );
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSchemaUpdateDoesNotTryToRecreateExistingTables()
|
||||
throws Exception {
|
||||
createSchema();
|
||||
|
||||
new SchemaUpdate().setHaltOnError( true )
|
||||
.setOutputFile( updateOutputFile.getAbsolutePath() )
|
||||
.setFormat( false )
|
||||
.execute( EnumSet.of( TargetType.DATABASE, TargetType.SCRIPT ), metadata );
|
||||
|
||||
checkNoUpdateStatementHasBeenGenerated();
|
||||
}
|
||||
|
||||
private void checkNoUpdateStatementHasBeenGenerated() throws IOException {
|
||||
final String fileContent = new String( Files.readAllBytes( updateOutputFile.toPath() ) );
|
||||
assertThat(
|
||||
"The update output file should be empty because the db schema had already been generated and the domain model was not modified",
|
||||
fileContent,
|
||||
is( "" )
|
||||
);
|
||||
}
|
||||
|
||||
private void createSchema() throws Exception {
|
||||
new SchemaUpdate().setHaltOnError( true )
|
||||
.setOutputFile( createOutputFile.getAbsolutePath() )
|
||||
.execute( EnumSet.of( TargetType.DATABASE, TargetType.SCRIPT ), metadata );
|
||||
new SchemaValidator().validate( metadata );
|
||||
checkSchemaHasBeenGenerated();
|
||||
}
|
||||
|
||||
private void checkSchemaHasBeenGenerated() throws Exception {
|
||||
String fileContent = new String( Files.readAllBytes( createOutputFile.toPath() ) );
|
||||
Pattern fileContentPattern = Pattern.compile( "create( (column|row))? table my_test_entity" );
|
||||
Matcher fileContentMatcher = fileContentPattern.matcher( fileContent.toLowerCase() );
|
||||
assertThat(
|
||||
"The schema has not been correctly generated, Script file : " + fileContent.toLowerCase(),
|
||||
fileContentMatcher.find(),
|
||||
is( true )
|
||||
);
|
||||
}
|
||||
|
||||
@Entity(name = "My_Test_Entity")
|
||||
public static class TestEntity {
|
||||
@Id
|
||||
private Long id;
|
||||
|
||||
private String name;
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue