HHH-15114 - Clean up deprecations

This commit is contained in:
Steve Ebersole 2022-03-08 20:23:45 -06:00
parent c46ec5a14e
commit 415b28f116
50 changed files with 262 additions and 543 deletions

View File

@ -155,10 +155,6 @@ public class BootstrapTest {
// Read all mappings from a jar file.
// Assumes that any file named *.hbm.xml is a mapping document.
sources.addJar(new File("./entities.jar"));
// Read a mapping as an application resource using the convention that a class named foo.bar.MyEntity is
// mapped by a file named foo/bar/MyEntity.hbm.xml which can be resolved as a classpath resource.
sources.addClass(MyEntity.class);
//end::bootstrap-bootstrap-native-registry-MetadataSources-example[]
}
catch (Exception ignore) {

View File

@ -10,6 +10,7 @@ import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import static java.lang.annotation.ElementType.CONSTRUCTOR;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.ElementType.PACKAGE;
import static java.lang.annotation.ElementType.TYPE;
@ -22,7 +23,7 @@ import static java.lang.annotation.RetentionPolicy.CLASS;
*
* @author Steve Ebersole
*/
@Target({PACKAGE, TYPE, METHOD, CONSTRUCTOR})
@Target({PACKAGE, TYPE, METHOD, FIELD, CONSTRUCTOR})
@Retention(CLASS)
public @interface Internal {
}

View File

@ -38,50 +38,6 @@ public enum LockMode {
* pulled from a cache.
*/
READ( 5, "read" ),
/**
* An upgrade lock. Objects loaded in this lock mode are
* materialized using an SQL {@code select ... for update}.
*
* @deprecated instead use PESSIMISTIC_WRITE
*/
@Deprecated
UPGRADE( 10, "upgrade" ),
/**
* Attempt to obtain an upgrade lock, using an Oracle-style
* {@code select for update nowait}. The semantics of
* this lock mode, once obtained, are the same as
* {@link #UPGRADE}.
*/
UPGRADE_NOWAIT( 10, "upgrade-nowait" ),
/**
* Attempt to obtain an upgrade lock, using an Oracle-style
* {@code select for update skip locked}. The semantics of
* this lock mode, once obtained, are the same as
* {@link #UPGRADE}.
*/
UPGRADE_SKIPLOCKED( 10, "upgrade-skiplocked" ),
/**
* A {@code WRITE} lock is obtained when an object is updated
* or inserted. This lock mode is for internal use only and is
* not a valid mode for {@code load()} or {@code lock()} (both
* of which throw exceptions if {@code WRITE} is specified).
*/
WRITE( 10, "write" ),
/**
* Similar to {@link #UPGRADE} except that, for versioned entities,
* it results in a forced version increment.
*
* @deprecated instead use PESSIMISTIC_FORCE_INCREMENT
*/
@Deprecated
FORCE( 15, "force" ),
/*
* start of jakarta.persistence.LockModeType equivalent modes
*/
/**
* Optimistically assume that transaction will not experience contention for
@ -95,15 +51,39 @@ public enum LockMode {
*/
OPTIMISTIC_FORCE_INCREMENT( 7, "optimistic_force_increment" ),
/**
* A {@code WRITE} lock is obtained when an object is updated or inserted.
*
* This lock mode is for internal use only and is not a valid mode for
* {@code load()} or {@code lock()}, both of which throw exceptions if
* {@code WRITE} is specified.
*/
@Internal
WRITE( 10, "write" ),
/**
* Attempt to obtain an upgrade lock, using an Oracle-style
* {@code select for update nowait}. The semantics of
* this lock mode, once obtained, are the same as
* {@link #PESSIMISTIC_WRITE}.
*/
UPGRADE_NOWAIT( 10, "upgrade-nowait" ),
/**
* Attempt to obtain an upgrade lock, using an Oracle-style
* {@code select for update skip locked}. The semantics of
* this lock mode, once obtained, are the same as
* {@link #PESSIMISTIC_WRITE}.
*/
UPGRADE_SKIPLOCKED( 10, "upgrade-skiplocked" ),
/**
* Implemented as PESSIMISTIC_WRITE.
* TODO: introduce separate support for PESSIMISTIC_READ
*/
PESSIMISTIC_READ( 12, "pessimistic_read" ),
/**
* Transaction will obtain a database lock immediately.
* TODO: add PESSIMISTIC_WRITE_NOWAIT
*/
PESSIMISTIC_WRITE( 13, "pessimistic_write" ),

View File

@ -35,7 +35,7 @@ public class LockOptions implements Serializable {
* Represents LockMode.UPGRADE (will wait forever for lock and scope of false meaning only entity is locked).
*/
@SuppressWarnings("deprecation")
public static final LockOptions UPGRADE = new LockOptions(LockMode.UPGRADE);
public static final LockOptions UPGRADE = new LockOptions(LockMode.PESSIMISTIC_WRITE);
/**
* Indicates that the database should not wait at all to acquire the pessimistic lock.

View File

@ -144,10 +144,6 @@ public interface SessionBuilder<T extends SessionBuilder> {
* @return {@code this}, for method chaining
*
* @see jakarta.persistence.PersistenceContextType
*
* @deprecated Only integrations can specify autoClosing behavior of
* individual sessions.
*/
@Deprecated
T autoClose(boolean autoClose);
}

View File

@ -306,53 +306,4 @@ public interface SessionFactory extends EntityManagerFactory, Referenceable, Ser
*/
boolean containsFetchProfileDefinition(String name);
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Deprecations
/**
* Retrieve the {@link ClassMetadata} associated with the given entity class.
*
* @param entityClass The entity class
*
* @return The metadata associated with the given entity; may be null if no such
* entity was mapped.
*
* @throws HibernateException Generally null is returned instead of throwing.
*
* @deprecated Use the descriptors from {@link #getMetamodel()} instead
*/
@Deprecated
ClassMetadata getClassMetadata(@SuppressWarnings("rawtypes") Class entityClass);
/**
* Retrieve the {@link ClassMetadata} associated with the given entity class.
*
* @param entityName The entity class
*
* @return The metadata associated with the given entity; may be null if no such
* entity was mapped.
*
* @throws HibernateException Generally null is returned instead of throwing.
* @since 3.0
*
* @deprecated Use the descriptors from {@link #getMetamodel()} instead
*/
@Deprecated
ClassMetadata getClassMetadata(String entityName);
/**
* Get the {@link CollectionMetadata} associated with the named collection role.
*
* @param roleName The collection role (in form [owning-entity-name].[collection-property-name]).
*
* @return The metadata associated with the given collection; may be null if no such
* collection was mapped.
*
* @throws HibernateException Generally null is returned instead of throwing.
*
* @deprecated Use the descriptors from {@link #getMetamodel()} instead
*/
@Deprecated
CollectionMetadata getCollectionMetadata(String roleName);
}

View File

@ -6,6 +6,11 @@
*/
package org.hibernate.annotations;
import jakarta.persistence.OneToMany;
import jakarta.persistence.OneToOne;
import org.hibernate.Internal;
/**
* Cascade types (can override default JPA cascades).
*/
@ -53,9 +58,10 @@ public enum CascadeType {
/**
* Hibernate originally handled orphan removal as a specialized cascade.
*
* @deprecated use @OneToOne(orphanRemoval=true) or @OneToMany(orphanRemoval=true)
* @apiNote This is valid only for internal usage. Use {@link OneToOne#orphanRemoval()}
* or {@link OneToMany#orphanRemoval()} instead
*/
@Deprecated
@Internal
DELETE_ORPHAN,
/**

View File

@ -21,7 +21,8 @@ import static java.lang.annotation.RetentionPolicy.RUNTIME;
*/
@Target({FIELD, METHOD, TYPE})
@Retention(RUNTIME)
@Deprecated
@Deprecated( forRemoval = true )
@Remove( )
public @interface ForeignKey {
/**
* Name of the foreign key of a {@code OneToMany}, {@code ManyToOne}, or

View File

@ -20,6 +20,7 @@ import java.util.Map;
import java.util.Objects;
import org.hibernate.HibernateException;
import org.hibernate.Internal;
import org.hibernate.boot.archive.spi.InputStreamAccess;
import org.hibernate.boot.internal.MetadataBuilderImpl;
import org.hibernate.boot.jaxb.internal.XmlSources;
@ -164,9 +165,8 @@ public class MetadataSources implements Serializable {
* Get a builder for metadata where non-default options can be specified.
*
* @return The built metadata.
* @deprecated Use {@link #getMetadataBuilder()} instead
*/
@Deprecated
@Internal
public MetadataBuilder getMetadataBuilder(StandardServiceRegistry serviceRegistry) {
return getCustomBuilderOrDefault( new MetadataBuilderImpl(this, serviceRegistry ) );
}
@ -326,29 +326,6 @@ public class MetadataSources implements Serializable {
return this;
}
/**
* Read a mapping as an application resource using the convention
* that a class named {@code foo.bar.Foo} is mapped by a file named
* {@code foo/bar/Foo.hbm.xml} which can be resolved as a classpath
* resource.
*
* @param entityClass The mapped class
*
* @return this (for method chaining purposes)
*
* @deprecated hbm.xml is a legacy mapping format now considered deprecated.
*/
@Deprecated
public MetadataSources addClass(Class<?> entityClass) {
if ( entityClass == null ) {
throw new IllegalArgumentException( "The specified class cannot be null" );
}
if ( LOG.isDebugEnabled() ) {
LOG.debugf( "adding resource mappings from class convention : %s", entityClass.getName() );
}
return addResource( entityClass.getName().replace( '.', '/' ) + ".hbm.xml" );
}
/**
* Read mappings as an application resourceName (i.e. classpath lookup).
*
@ -559,23 +536,6 @@ public class MetadataSources implements Serializable {
return this;
}
/**
* Read mappings from a DOM {@link Document}
*
* @param document The DOM document
*
* @return this (for method chaining purposes)
*
* @deprecated since 5.0. Use one of the other methods for passing mapping source(s).
*/
@Deprecated
public MetadataSources addDocument(Document document) {
final XmlSource xmlSource = XmlSources.fromDocument( document );
final XmlMappingBinderAccess binderAccess = getXmlMappingBinderAccess();
getXmlBindingsForWrite().add( xmlSource.doBind( binderAccess.getMappingBinder() ) );
return this;
}
/**
* Read all {@code .hbm.xml} mappings from a jar file.
* <p/>

View File

@ -46,22 +46,7 @@ public interface AuxiliaryDatabaseObject extends Exportable, Serializable {
*
* @return the SQL strings for creating the database object.
*/
default String[] sqlCreateStrings(SqlStringGenerationContext context) {
return sqlCreateStrings( context.getDialect() );
}
/**
* Gets the SQL strings for creating the database object.
*
* @param dialect The dialect for which to generate the SQL creation strings
*
* @return the SQL strings for creating the database object.
* @deprecated Implement {@link #sqlCreateStrings(SqlStringGenerationContext)} instead.
*/
@Deprecated
default String[] sqlCreateStrings(Dialect dialect) {
throw new IllegalStateException( this + " does not implement sqlCreateStrings(...)" );
}
String[] sqlCreateStrings(SqlStringGenerationContext context);
/**
* Gets the SQL strings for dropping the database object.
@ -70,22 +55,7 @@ public interface AuxiliaryDatabaseObject extends Exportable, Serializable {
*
* @return the SQL strings for dropping the database object.
*/
default String[] sqlDropStrings(SqlStringGenerationContext context) {
return sqlDropStrings( context.getDialect() );
}
/**
* Gets the SQL strings for dropping the database object.
*
* @param dialect The dialect for which to generate the SQL drop strings
*
* @return the SQL strings for dropping the database object.
* @deprecated Implement {@link #sqlDropStrings(SqlStringGenerationContext)} instead.
*/
@Deprecated
default String[] sqlDropStrings(Dialect dialect) {
throw new IllegalStateException( this + " does not implement sqlDropStrings(...)" );
}
String[] sqlDropStrings(SqlStringGenerationContext context);
/**
* Additional, optional interface for AuxiliaryDatabaseObject that want to allow

View File

@ -9,7 +9,6 @@ package org.hibernate.boot.model.relational;
import java.util.Set;
import org.hibernate.boot.model.naming.Identifier;
import org.hibernate.dialect.Dialect;
import org.hibernate.internal.util.StringHelper;
/**
@ -76,7 +75,7 @@ public class SimpleAuxiliaryDatabaseObject extends AbstractAuxiliaryDatabaseObje
}
@Override
public String[] sqlCreateStrings(Dialect dialect) {
public String[] sqlCreateStrings(SqlStringGenerationContext context) {
final String[] copy = new String[createStrings.length];
for ( int i = 0, max =createStrings.length; i<max; i++ ) {
copy[i] = injectCatalogAndSchema( createStrings[i] );
@ -85,7 +84,7 @@ public class SimpleAuxiliaryDatabaseObject extends AbstractAuxiliaryDatabaseObje
}
@Override
public String[] sqlDropStrings(Dialect dialect) {
public String[] sqlDropStrings(SqlStringGenerationContext context) {
final String[] copy = new String[dropStrings.length];
for ( int i = 0, max = dropStrings.length; i<max; i++ ) {
copy[i] = injectCatalogAndSchema( dropStrings[i] );

View File

@ -14,6 +14,7 @@ import java.util.List;
import java.util.Map;
import java.util.Properties;
import org.hibernate.Internal;
import org.hibernate.boot.cfgxml.internal.ConfigLoader;
import org.hibernate.boot.cfgxml.spi.LoadedConfig;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
@ -123,28 +124,6 @@ public class StandardServiceRegistryBuilder {
this.initiators = standardInitiatorList();
}
/**
* Intended for use exclusively from Quarkus bootstrapping, or extensions of
* this class which need to override the standard ServiceInitiator list.
*
* Consider this an SPI.
*
* @deprecated Quarkus will switch to use
* {@link #StandardServiceRegistryBuilder(BootstrapServiceRegistry, Map, ConfigLoader, LoadedConfig, List)}
*/
@Deprecated
protected StandardServiceRegistryBuilder(
BootstrapServiceRegistry bootstrapServiceRegistry,
Map <String,Object> settings,
LoadedConfig loadedConfig,
List<StandardServiceInitiator<?>> initiators) {
this.bootstrapServiceRegistry = bootstrapServiceRegistry;
this.configLoader = new ConfigLoader( bootstrapServiceRegistry );
this.settings = settings;
this.aggregatedCfgXml = loadedConfig;
this.initiators = initiators;
}
/**
* Intended for use exclusively from Quarkus bootstrapping, or extensions of
* this class which need to override the standard ServiceInitiator list.
@ -407,13 +386,8 @@ public class StandardServiceRegistryBuilder {
/**
* Obtain the current aggregated settings.
*
* @deprecated Temporarily exposed since
* {@link org.hibernate.cfg.Configuration} is still around and much code
* still uses it. This allows code to configure the builder and access
* that to configure the {@code Configuration} object.
*/
@Deprecated
@Internal
public Map<String,Object> getSettings() {
return settings;
}

View File

@ -22,6 +22,7 @@ import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.hibernate.Internal;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.boot.registry.classloading.spi.ClassLoadingException;
import org.hibernate.cfg.AvailableSettings;
@ -100,6 +101,7 @@ public class ClassLoaderServiceImpl implements ClassLoaderService {
*/
@Deprecated
@SuppressWarnings("unchecked")
@Internal
public static ClassLoaderServiceImpl fromConfigSettings(Map configValues) {
final List<ClassLoader> providedClassLoaders = new ArrayList<>();

View File

@ -10,6 +10,7 @@ import java.util.Collection;
import java.util.Map;
import org.hibernate.Incubating;
import org.hibernate.Internal;
import org.hibernate.annotations.common.reflection.ReflectionManager;
import org.hibernate.boot.CacheRegionDefinition;
import org.hibernate.boot.archive.scan.spi.ScanEnvironment;
@ -116,11 +117,11 @@ public interface BootstrapContext {
*
* @return The Hibernate Commons Annotations ReflectionManager to use.
*
* @deprecated Deprecated (with no replacement) to indicate that this will go away as
* we migrate away from Hibernate Commons Annotations to Jandex for annotation handling
* and XMl->annotation merging.
* @apiNote Supported for internal use only. This method will go away as
* we migrate away from Hibernate Commons Annotations to Jandex for annotation
* handling and XMl->annotation merging.
*/
@Deprecated
@Internal
ReflectionManager getReflectionManager();
/**

View File

@ -177,11 +177,6 @@ public class DisabledCaching implements CacheImplementor {
public void close() {
}
@Override @Deprecated
public String[] getSecondLevelCacheRegionNames() {
return ArrayHelper.EMPTY_STRING_ARRAY;
}
@Override
public Set<String> getCacheRegionNames() {
return null;
@ -222,9 +217,4 @@ public class DisabledCaching implements CacheImplementor {
public <T> T unwrap(Class<T> cls) {
return (T) this;
}
@Override
public Set<NaturalIdDataAccess> getNaturalIdAccessesInRegion(String regionName) {
return Collections.emptySet();
}
}

View File

@ -29,8 +29,8 @@ import org.hibernate.cache.spi.QueryResultsCache;
import org.hibernate.cache.spi.QueryResultsRegion;
import org.hibernate.cache.spi.Region;
import org.hibernate.cache.spi.RegionFactory;
import org.hibernate.cache.spi.TimestampsRegion;
import org.hibernate.cache.spi.TimestampsCache;
import org.hibernate.cache.spi.TimestampsRegion;
import org.hibernate.cache.spi.access.CollectionDataAccess;
import org.hibernate.cache.spi.access.EntityDataAccess;
import org.hibernate.cache.spi.access.NaturalIdDataAccess;
@ -38,7 +38,6 @@ import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.internal.CoreLogging;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.internal.util.collections.ArrayHelper;
import org.hibernate.metamodel.model.domain.NavigableRole;
import org.hibernate.metamodel.spi.MappingMetamodelImplementor;
import org.hibernate.persister.collection.CollectionPersister;
@ -637,15 +636,4 @@ public class EnabledCaching implements CacheImplementor, DomainDataRegionBuildin
public CollectionDataAccess getCollectionRegionAccess(NavigableRole collectionRole) {
return collectionAccessMap.get( collectionRole );
}
@Override @Deprecated
public String[] getSecondLevelCacheRegionNames() {
return ArrayHelper.toStringArray( legacySecondLevelCacheNames );
}
@Override
public Set<NaturalIdDataAccess> getNaturalIdAccessesInRegion(String regionName) {
return legacyNaturalIdAccessesForRegion.get( regionName );
}
}

View File

@ -7,11 +7,12 @@
package org.hibernate.cache.spi;
import java.io.Serializable;
import java.util.Locale;
import java.util.Set;
import org.hibernate.Cache;
import org.hibernate.HibernateException;
import org.hibernate.Internal;
import org.hibernate.annotations.Remove;
import org.hibernate.cache.cfg.spi.DomainDataRegionConfig;
import org.hibernate.cache.spi.access.CollectionDataAccess;
import org.hibernate.cache.spi.access.EntityDataAccess;
@ -124,32 +125,19 @@ public interface CacheImplementor extends Service, Cache, Serializable {
*/
void close();
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Deprecations (5.3)
/**
* Get the *qualified* names of all regions caching entity and collection data.
*
* @return All cache region names
*
* @deprecated Use {@link CacheImplementor#getCacheRegionNames()} instead
*/
@Deprecated(since = "5.3")
String[] getSecondLevelCacheRegionNames();
/**
* Find the cache data access strategy for an entity. Will
* return {@code null} when the entity is not configured for caching.
*
* @param rootEntityName The NavigableRole representation of the root entity
*
* @apiNote It is only valid to call this method after {@link #prime} has
* @implSpec It is only valid to call this method after {@link #prime} has
* been performed
*
* @deprecated Use {@link EntityPersister#getCacheAccessStrategy()} instead
* @apiNote Use {@link EntityPersister#getCacheAccessStrategy()} instead
*/
@Deprecated
@Internal
@Remove
EntityDataAccess getEntityRegionAccess(NavigableRole rootEntityName);
/**
@ -159,67 +147,25 @@ public interface CacheImplementor extends Service, Cache, Serializable {
*
* @param rootEntityName The NavigableRole representation of the root entity
*
* @apiNote It is only valid to call this method after {@link #prime} has
* @implSpec It is only valid to call this method after {@link #prime} has
* been performed
*
* @deprecated Use {@link EntityPersister#getNaturalIdCacheAccessStrategy()} instead
* @apiNote Use {@link EntityPersister#getNaturalIdCacheAccessStrategy()} instead
*/
@Deprecated
@Internal
@Remove
NaturalIdDataAccess getNaturalIdCacheRegionAccessStrategy(NavigableRole rootEntityName);
/**
* Find the cache data access strategy for the given collection. Will
* return {@code null} when the collection is not configured for caching.
*
* @apiNote It is only valid to call this method after {@link #prime} has
* @implSpec It is only valid to call this method after {@link #prime} has
* been performed
*
* @deprecated Use {@link EntityPersister#getNaturalIdCacheAccessStrategy()} instead
* @apiNote Use {@link EntityPersister#getNaturalIdCacheAccessStrategy()} instead
*/
@Deprecated
@Internal
@Remove
CollectionDataAccess getCollectionRegionAccess(NavigableRole collectionRole);
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Some new (default) support methods for the above deprecations
// - themselves deprecated
/**
* @deprecated No replacement - added just to continue some backwards compatibility
* in supporting the newly deprecated methods expecting a qualified (prefix +) region name
*/
@Deprecated(since = "5.3")
default String unqualifyRegionName(String name) {
if ( getSessionFactory().getSessionFactoryOptions().getCacheRegionPrefix() == null ) {
return name;
}
if ( !name.startsWith( getSessionFactory().getSessionFactoryOptions().getCacheRegionPrefix() ) ) {
throw new IllegalArgumentException(
String.format(
Locale.ROOT,
"Legacy methods for accessing cache information expect a qualified (prefix) region name - " +
"but passed name [%s] was not qualified by the configured prefix [%s]",
name,
getSessionFactory().getSessionFactoryOptions().getCacheRegionPrefix()
)
);
}
return name.substring( getSessionFactory().getSessionFactoryOptions().getCacheRegionPrefix().length() + 1 );
}
/**
* @deprecated No replacement - added just for support of the newly deprecated methods expecting a qualified region name
*/
@Deprecated
default Region getRegionByLegacyName(String legacyName) {
return getRegion( unqualifyRegionName( legacyName ) );
}
/**
* @deprecated No replacement - added just for support of the newly deprecated methods expecting a qualified region name
*/
@Deprecated
Set<NaturalIdDataAccess> getNaturalIdAccessesInRegion(String legacyQualifiedRegionName);
}

View File

@ -527,14 +527,21 @@ public class Configuration {
* which can be resolved as a {@linkplain ClassLoader#getResource(String)
* classpath resource}.
*
* @param persistentClass The mapped class
* @param entityClass The mapped class
* @return this (for method chaining purposes)
* @throws MappingException Indicates problems locating the resource or
* processing the contained mapping document.
*/
public Configuration addClass(Class persistentClass) throws MappingException {
metadataSources.addClass( persistentClass );
return this;
public Configuration addClass(Class entityClass) throws MappingException {
if ( entityClass == null ) {
throw new IllegalArgumentException( "The specified class cannot be null" );
}
if ( log.isDebugEnabled() ) {
log.debugf( "adding resource mappings from class convention : %s", entityClass.getName() );
}
return addResource( entityClass.getName().replace( '.', '/' ) + ".hbm.xml" );
}
/**

View File

@ -518,20 +518,22 @@ public abstract class AbstractHANADialect extends Dialect {
@SuppressWarnings({ "deprecation" })
private String getForUpdateString(String aliases, LockMode lockMode, int timeout) {
switch ( lockMode ) {
case UPGRADE:
return getForUpdateString( aliases );
case PESSIMISTIC_READ:
case PESSIMISTIC_READ: {
return getReadLockString( aliases, timeout );
case PESSIMISTIC_WRITE:
}
case PESSIMISTIC_WRITE: {
return getWriteLockString( aliases, timeout );
}
case UPGRADE_NOWAIT:
case FORCE:
case PESSIMISTIC_FORCE_INCREMENT:
case PESSIMISTIC_FORCE_INCREMENT: {
return getForUpdateNowaitString( aliases );
case UPGRADE_SKIPLOCKED:
}
case UPGRADE_SKIPLOCKED: {
return getForUpdateSkipLockedString( aliases );
default:
}
default: {
return "";
}
}
}
@ -1712,7 +1714,7 @@ public abstract class AbstractHANADialect extends Dialect {
@Override
protected void doBind(CallableStatement st, X value, String name, WrapperOptions options) throws SQLException {
JdbcType descriptor = BlobJdbcType.BLOB_BINDING;
if ( byte[].class.isInstance( value ) ) {
if ( value instanceof byte[] ) {
// performance shortcut for binding BLOB data in byte[] format
descriptor = BlobJdbcType.PRIMITIVE_ARRAY_BINDING;
}

View File

@ -14,6 +14,7 @@ import java.util.Date;
import java.util.Iterator;
import java.util.Map;
import java.util.TimeZone;
import jakarta.persistence.TemporalType;
import org.hibernate.LockMode;
import org.hibernate.LockOptions;
@ -30,10 +31,10 @@ import org.hibernate.engine.jdbc.env.spi.IdentifierHelper;
import org.hibernate.engine.jdbc.env.spi.IdentifierHelperBuilder;
import org.hibernate.engine.jdbc.env.spi.NameQualifierSupport;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.query.spi.QueryEngine;
import org.hibernate.query.sqm.IntervalType;
import org.hibernate.query.sqm.NullOrdering;
import org.hibernate.query.sqm.TemporalUnit;
import org.hibernate.query.spi.QueryEngine;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.SqlAstTranslatorFactory;
@ -41,22 +42,36 @@ import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.sql.ast.spi.StandardSqlAstTranslatorFactory;
import org.hibernate.sql.ast.tree.Statement;
import org.hibernate.sql.exec.spi.JdbcOperation;
import org.hibernate.type.BasicType;
import org.hibernate.type.BasicTypeRegistry;
import org.hibernate.type.StandardBasicTypes;
import org.hibernate.type.descriptor.jdbc.InstantAsTimestampWithTimeZoneJdbcType;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.UUIDJdbcType;
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
import org.hibernate.type.descriptor.sql.internal.Scale6IntervalSecondDdlType;
import org.hibernate.type.descriptor.sql.internal.DdlTypeImpl;
import org.hibernate.type.descriptor.sql.internal.Scale6IntervalSecondDdlType;
import org.hibernate.type.descriptor.sql.spi.DdlTypeRegistry;
import jakarta.persistence.TemporalType;
import static org.hibernate.query.sqm.TemporalUnit.DAY;
import static org.hibernate.query.sqm.TemporalUnit.NATIVE;
import static org.hibernate.type.SqlTypes.*;
import static org.hibernate.type.SqlTypes.BINARY;
import static org.hibernate.type.SqlTypes.BLOB;
import static org.hibernate.type.SqlTypes.CHAR;
import static org.hibernate.type.SqlTypes.CLOB;
import static org.hibernate.type.SqlTypes.GEOMETRY;
import static org.hibernate.type.SqlTypes.INET;
import static org.hibernate.type.SqlTypes.JSON;
import static org.hibernate.type.SqlTypes.LONG32NVARCHAR;
import static org.hibernate.type.SqlTypes.LONG32VARBINARY;
import static org.hibernate.type.SqlTypes.LONG32VARCHAR;
import static org.hibernate.type.SqlTypes.NCHAR;
import static org.hibernate.type.SqlTypes.NCLOB;
import static org.hibernate.type.SqlTypes.NVARCHAR;
import static org.hibernate.type.SqlTypes.OTHER;
import static org.hibernate.type.SqlTypes.TIMESTAMP_UTC;
import static org.hibernate.type.SqlTypes.TIMESTAMP_WITH_TIMEZONE;
import static org.hibernate.type.SqlTypes.TINYINT;
import static org.hibernate.type.SqlTypes.UUID;
import static org.hibernate.type.SqlTypes.VARBINARY;
import static org.hibernate.type.SqlTypes.VARCHAR;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsDate;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsTime;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsTimestampWithMicros;
@ -211,10 +226,7 @@ public class CockroachDialect extends Dialect {
public void initializeFunctionRegistry(QueryEngine queryEngine) {
super.initializeFunctionRegistry(queryEngine);
final BasicTypeRegistry basicTypeRegistry = queryEngine.getTypeConfiguration().getBasicTypeRegistry();
final BasicType<String> stringType = basicTypeRegistry.resolve( StandardBasicTypes.STRING );
CommonFunctionFactory functionFactory = new CommonFunctionFactory(queryEngine);
final CommonFunctionFactory functionFactory = new CommonFunctionFactory( queryEngine );
functionFactory.ascii();
functionFactory.char_chr();
functionFactory.overlay();
@ -589,33 +601,36 @@ public class CockroachDialect extends Dialect {
lockMode = lockOptions.getLockMode();
}
switch ( lockMode ) {
//noinspection deprecation
case UPGRADE:
return getForUpdateString(aliases);
case PESSIMISTIC_READ:
case PESSIMISTIC_READ: {
return getReadLockString( aliases, lockOptions.getTimeOut() );
case PESSIMISTIC_WRITE:
}
case PESSIMISTIC_WRITE: {
return getWriteLockString( aliases, lockOptions.getTimeOut() );
}
case UPGRADE_NOWAIT:
//noinspection deprecation
case FORCE:
case PESSIMISTIC_FORCE_INCREMENT:
case PESSIMISTIC_FORCE_INCREMENT: {
return getForUpdateNowaitString(aliases);
case UPGRADE_SKIPLOCKED:
}
case UPGRADE_SKIPLOCKED: {
return getForUpdateSkipLockedString(aliases);
default:
}
default: {
return "";
}
}
}
private String withTimeout(String lockString, int timeout) {
switch (timeout) {
case LockOptions.NO_WAIT:
case LockOptions.NO_WAIT: {
return supportsNoWait() ? lockString + " nowait" : lockString;
case LockOptions.SKIP_LOCKED:
}
case LockOptions.SKIP_LOCKED: {
return supportsSkipLocked() ? lockString + " skip locked" : lockString;
default:
}
default: {
return lockString;
}
}
}

View File

@ -1556,23 +1556,24 @@ public abstract class Dialect implements ConversionContext {
return getForUpdateString( lockOptions.getLockMode(), lockOptions.getTimeOut() );
}
@SuppressWarnings("deprecation")
private String getForUpdateString(LockMode lockMode, int timeout){
switch ( lockMode ) {
case UPGRADE:
return getForUpdateString();
case PESSIMISTIC_READ:
case PESSIMISTIC_READ: {
return getReadLockString( timeout );
case PESSIMISTIC_WRITE:
}
case PESSIMISTIC_WRITE: {
return getWriteLockString( timeout );
}
case UPGRADE_NOWAIT:
case FORCE:
case PESSIMISTIC_FORCE_INCREMENT:
case PESSIMISTIC_FORCE_INCREMENT: {
return getForUpdateNowaitString();
case UPGRADE_SKIPLOCKED:
}
case UPGRADE_SKIPLOCKED: {
return getForUpdateSkipLockedString();
default:
}
default: {
return "";
}
}
}
@ -3563,15 +3564,15 @@ public abstract class Dialect implements ConversionContext {
case PESSIMISTIC_READ:
return getReadRowLockStrategy();
case WRITE:
case FORCE:
case PESSIMISTIC_FORCE_INCREMENT:
case PESSIMISTIC_WRITE:
case UPGRADE:
case UPGRADE_SKIPLOCKED:
case UPGRADE_NOWAIT:
case UPGRADE_NOWAIT: {
return getWriteRowLockStrategy();
default:
}
default: {
return RowLockStrategy.NONE;
}
}
}

View File

@ -18,6 +18,7 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.TimeZone;
import jakarta.persistence.TemporalType;
import org.hibernate.LockMode;
import org.hibernate.LockOptions;
@ -47,11 +48,11 @@ import org.hibernate.metamodel.mapping.EntityMappingType;
import org.hibernate.metamodel.spi.RuntimeModelCreationContext;
import org.hibernate.procedure.internal.PostgresCallableStatementSupport;
import org.hibernate.procedure.spi.CallableStatementSupport;
import org.hibernate.query.SemanticException;
import org.hibernate.query.spi.QueryEngine;
import org.hibernate.query.sqm.FetchClauseType;
import org.hibernate.query.sqm.IntervalType;
import org.hibernate.query.SemanticException;
import org.hibernate.query.sqm.TemporalUnit;
import org.hibernate.query.spi.QueryEngine;
import org.hibernate.query.sqm.mutation.internal.cte.CteInsertStrategy;
import org.hibernate.query.sqm.mutation.internal.cte.CteMutationStrategy;
import org.hibernate.query.sqm.mutation.spi.SqmMultiTableInsertStrategy;
@ -72,21 +73,37 @@ import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.jdbc.ObjectNullAsBinaryTypeJdbcType;
import org.hibernate.type.descriptor.jdbc.UUIDJdbcType;
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
import org.hibernate.type.descriptor.sql.internal.Scale6IntervalSecondDdlType;
import org.hibernate.type.descriptor.sql.internal.DdlTypeImpl;
import org.hibernate.type.descriptor.sql.internal.Scale6IntervalSecondDdlType;
import org.hibernate.type.descriptor.sql.spi.DdlTypeRegistry;
import org.hibernate.type.spi.TypeConfiguration;
import jakarta.persistence.TemporalType;
import static org.hibernate.exception.spi.TemplatedViolatedConstraintNameExtractor.extractUsingTemplate;
import static org.hibernate.query.sqm.TemporalUnit.DAY;
import static org.hibernate.query.sqm.TemporalUnit.EPOCH;
import static org.hibernate.query.sqm.TemporalUnit.MONTH;
import static org.hibernate.query.sqm.TemporalUnit.QUARTER;
import static org.hibernate.query.sqm.TemporalUnit.YEAR;
import static org.hibernate.type.SqlTypes.*;
import static org.hibernate.type.SqlTypes.BINARY;
import static org.hibernate.type.SqlTypes.BLOB;
import static org.hibernate.type.SqlTypes.CHAR;
import static org.hibernate.type.SqlTypes.CLOB;
import static org.hibernate.type.SqlTypes.GEOMETRY;
import static org.hibernate.type.SqlTypes.INET;
import static org.hibernate.type.SqlTypes.JSON;
import static org.hibernate.type.SqlTypes.LONG32NVARCHAR;
import static org.hibernate.type.SqlTypes.LONG32VARBINARY;
import static org.hibernate.type.SqlTypes.LONG32VARCHAR;
import static org.hibernate.type.SqlTypes.NCHAR;
import static org.hibernate.type.SqlTypes.NCLOB;
import static org.hibernate.type.SqlTypes.NVARCHAR;
import static org.hibernate.type.SqlTypes.OTHER;
import static org.hibernate.type.SqlTypes.TIMESTAMP_UTC;
import static org.hibernate.type.SqlTypes.TIMESTAMP_WITH_TIMEZONE;
import static org.hibernate.type.SqlTypes.TINYINT;
import static org.hibernate.type.SqlTypes.UUID;
import static org.hibernate.type.SqlTypes.VARBINARY;
import static org.hibernate.type.SqlTypes.VARCHAR;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsDate;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsTime;
import static org.hibernate.type.descriptor.DateTimeUtils.appendAsTimestampWithMicros;
@ -558,22 +575,22 @@ public class PostgreSQLDialect extends Dialect {
lockMode = lockOptions.getLockMode();
}
switch ( lockMode ) {
//noinspection deprecation
case UPGRADE:
return getForUpdateString(aliases);
case PESSIMISTIC_READ:
case PESSIMISTIC_READ: {
return getReadLockString( aliases, lockOptions.getTimeOut() );
case PESSIMISTIC_WRITE:
}
case PESSIMISTIC_WRITE: {
return getWriteLockString( aliases, lockOptions.getTimeOut() );
}
case UPGRADE_NOWAIT:
//noinspection deprecation
case FORCE:
case PESSIMISTIC_FORCE_INCREMENT:
case PESSIMISTIC_FORCE_INCREMENT: {
return getForUpdateNowaitString(aliases);
case UPGRADE_SKIPLOCKED:
}
case UPGRADE_SKIPLOCKED: {
return getForUpdateSkipLockedString(aliases);
default:
}
default: {
return "";
}
}
}

View File

@ -432,8 +432,6 @@ public class SQLServerDialect extends AbstractTransactSQLDialect {
final String skipLockStr = lockOptions.getTimeOut() == LockOptions.SKIP_LOCKED ? ",readpast" : "";
switch ( lockMode ) {
//noinspection deprecation
case UPGRADE:
case PESSIMISTIC_WRITE:
case WRITE:
return tableName + " with (" + writeLockStr + ",rowlock" + noWaitStr + skipLockStr + ")";
@ -449,8 +447,6 @@ public class SQLServerDialect extends AbstractTransactSQLDialect {
}
else {
switch ( lockOptions.getLockMode() ) {
//noinspection deprecation
case UPGRADE:
case UPGRADE_NOWAIT:
case PESSIMISTIC_WRITE:
case WRITE:

View File

@ -133,10 +133,8 @@ public class SQLServerSqlAstTranslator<T extends JdbcOperation> extends Abstract
if ( getDialect().getVersion().isSameOrAfter( 9 ) ) {
final int effectiveLockTimeout = getEffectiveLockTimeout( lockMode );
switch ( lockMode ) {
//noinspection deprecation
case UPGRADE:
case PESSIMISTIC_WRITE:
case WRITE:
case WRITE: {
switch ( effectiveLockTimeout ) {
case LockOptions.SKIP_LOCKED:
appendSql( " with (updlock,rowlock,readpast)" );
@ -149,20 +147,22 @@ public class SQLServerSqlAstTranslator<T extends JdbcOperation> extends Abstract
break;
}
break;
case PESSIMISTIC_READ:
}
case PESSIMISTIC_READ: {
switch ( effectiveLockTimeout ) {
case LockOptions.SKIP_LOCKED:
appendSql( " with (updlock,rowlock,readpast)" );
break;
case LockOptions.NO_WAIT:
appendSql( " with (holdlock,rowlock,nowait)");
appendSql( " with (holdlock,rowlock,nowait)" );
break;
default:
appendSql( " with (holdlock,rowlock)");
appendSql( " with (holdlock,rowlock)" );
break;
}
break;
case UPGRADE_SKIPLOCKED:
}
case UPGRADE_SKIPLOCKED: {
if ( effectiveLockTimeout == LockOptions.NO_WAIT ) {
appendSql( " with (updlock,rowlock,readpast,nowait)" );
}
@ -170,26 +170,29 @@ public class SQLServerSqlAstTranslator<T extends JdbcOperation> extends Abstract
appendSql( " with (updlock,rowlock,readpast)" );
}
break;
case UPGRADE_NOWAIT:
}
case UPGRADE_NOWAIT: {
appendSql( " with (updlock,holdlock,rowlock,nowait)" );
break;
}
}
}
else {
switch ( lockMode ) {
//noinspection deprecation
case UPGRADE:
case UPGRADE_NOWAIT:
case PESSIMISTIC_WRITE:
case WRITE:
case WRITE: {
appendSql( " with (updlock,rowlock)" );
break;
case PESSIMISTIC_READ:
appendSql(" with (holdlock,rowlock)" );
}
case PESSIMISTIC_READ: {
appendSql( " with (holdlock,rowlock)" );
break;
case UPGRADE_SKIPLOCKED:
}
case UPGRADE_SKIPLOCKED: {
appendSql( " with (updlock,rowlock,readpast)" );
break;
}
}
}
}

View File

@ -54,7 +54,7 @@ public class UpdateLockingStrategy implements LockingStrategy {
public UpdateLockingStrategy(Lockable lockable, LockMode lockMode) {
this.lockable = lockable;
this.lockMode = lockMode;
if ( lockMode.lessThan( LockMode.UPGRADE ) ) {
if ( lockMode.lessThan( LockMode.WRITE ) ) {
throw new HibernateException( "[" + lockMode + "] not valid for update statement" );
}
if ( !lockable.isVersioned() ) {

View File

@ -48,16 +48,16 @@ public class Oracle12LimitHandler extends AbstractLimitHandler {
if ( lockOptions != null ) {
final LockMode lockMode = lockOptions.getLockMode();
switch ( lockMode ) {
case UPGRADE:
case PESSIMISTIC_READ:
case PESSIMISTIC_WRITE:
case UPGRADE_NOWAIT:
case FORCE:
case PESSIMISTIC_FORCE_INCREMENT:
case UPGRADE_SKIPLOCKED:
case UPGRADE_SKIPLOCKED: {
return processSql( sql, getForUpdateIndex( sql ), hasFirstRow );
default:
}
default: {
return processSqlOffsetFetch( sql, hasFirstRow );
}
}
}
return processSqlOffsetFetch( sql, hasFirstRow );

View File

@ -380,9 +380,7 @@ public abstract class AbstractEntityEntry implements Serializable, EntityEntry {
public void forceLocked(Object entity, Object nextVersion) {
version = nextVersion;
loadedState[ persister.getVersionProperty() ] = version;
// TODO: use LockMode.PESSIMISTIC_FORCE_INCREMENT
//noinspection deprecation
setLockMode( LockMode.FORCE );
setLockMode( LockMode.PESSIMISTIC_FORCE_INCREMENT );
persister.setValue( entity, getPersister().getVersionProperty(), nextVersion );
}

View File

@ -13,6 +13,7 @@ import java.util.function.BiConsumer;
import java.util.function.Supplier;
import org.hibernate.HibernateException;
import org.hibernate.Internal;
import org.hibernate.LockMode;
import org.hibernate.query.Query;
import org.hibernate.collection.spi.PersistentCollection;
@ -477,11 +478,9 @@ public interface PersistenceContext {
// HashSet getNullifiableEntityKeys();
/**
* Get the mapping from key value to entity instance
* @deprecated this will be removed: it provides too wide access, making it hard to optimise the internals
* for specific access needs. Consider using #iterateEntities instead.
* Doubly internal
*/
@Deprecated
@Internal
Map<EntityKey,Object> getEntitiesByKey();
/**
@ -503,10 +502,9 @@ public interface PersistenceContext {
int getNumberOfManagedEntities();
/**
* Get the mapping from collection instance to collection entry
* @deprecated use {@link #removeCollectionEntry(PersistentCollection)} or {@link #getCollectionEntriesSize()}, {@link #forEachCollectionEntry(BiConsumer,boolean)}.
* Doubly internal
*/
@Deprecated
@Internal
Map<PersistentCollection<?>,CollectionEntry> getCollectionEntries();
/**

View File

@ -104,21 +104,6 @@ public class SessionFactoryDelegatingImpl implements SessionFactoryImplementor,
return delegate.openStatelessSession( connection );
}
@Override @Deprecated
public ClassMetadata getClassMetadata(@SuppressWarnings("rawtypes") Class entityClass) {
return delegate.getClassMetadata( entityClass );
}
@Override @Deprecated
public ClassMetadata getClassMetadata(String entityName) {
return delegate.getClassMetadata( entityName );
}
@Override @Deprecated
public CollectionMetadata getCollectionMetadata(String roleName) {
return delegate.getCollectionMetadata( roleName );
}
@Override
public StatisticsImplementor getStatistics() {
return delegate.getStatistics();

View File

@ -726,18 +726,10 @@ public class SessionFactoryImpl implements SessionFactoryImplementor {
return runtimeMetamodels.getMappingMetamodel().getEntityDescriptor( className ).getIdentifierPropertyName();
}
public ClassMetadata getClassMetadata(@SuppressWarnings("rawtypes") Class persistentClass) throws HibernateException {
return getClassMetadata( persistentClass.getName() );
}
public CollectionMetadata getCollectionMetadata(String roleName) throws HibernateException {
return (CollectionMetadata) runtimeMetamodels.getMappingMetamodel().getCollectionDescriptor( roleName );
}
public ClassMetadata getClassMetadata(String entityName) throws HibernateException {
return (ClassMetadata) runtimeMetamodels.getMappingMetamodel().getEntityDescriptor( entityName );
}
public Type getReferencedPropertyType(String className, String propertyName) throws MappingException {
return runtimeMetamodels.getMappingMetamodel().getEntityDescriptor( className ).getPropertyType( propertyName );
}

View File

@ -42,13 +42,11 @@ public final class LockModeConverter {
return LockModeType.PESSIMISTIC_READ;
}
else if ( lockMode == LockMode.PESSIMISTIC_WRITE
|| lockMode == LockMode.UPGRADE
|| lockMode == LockMode.UPGRADE_NOWAIT
|| lockMode == LockMode.UPGRADE_SKIPLOCKED) {
return LockModeType.PESSIMISTIC_WRITE;
}
else if ( lockMode == LockMode.PESSIMISTIC_FORCE_INCREMENT
|| lockMode == LockMode.FORCE ) {
else if ( lockMode == LockMode.PESSIMISTIC_FORCE_INCREMENT ) {
return LockModeType.PESSIMISTIC_FORCE_INCREMENT;
}
throw new AssertionFailure( "unhandled lock mode " + lockMode );

View File

@ -57,7 +57,7 @@ public class LoaderHelper {
lock = cache.lockItem( session, ck, entry.getVersion() );
}
if ( persister.isVersioned() && requestedLockMode == LockMode.FORCE ) {
if ( persister.isVersioned() && requestedLockMode == LockMode.PESSIMISTIC_FORCE_INCREMENT ) {
// todo : should we check the current isolation mode explicitly?
Object nextVersion = persister.forceVersionIncrement(
entry.getId(), entry.getVersion(), session

View File

@ -105,7 +105,7 @@ public class SingleIdEntityLoaderStandardImpl<T> extends SingleIdEntityLoaderSup
final CascadingFetchProfile enabledCascadingFetchProfile = loadQueryInfluencers.getEnabledCascadingFetchProfile();
if ( enabledCascadingFetchProfile != null ) {
if ( LockMode.UPGRADE.greaterThan( lockOptions.getLockMode() ) ) {
if ( LockMode.WRITE.greaterThan( lockOptions.getLockMode() ) ) {
if ( selectByInternalCascadeProfile == null ) {
selectByInternalCascadeProfile = new EnumMap<>( CascadingFetchProfile.class );
}

View File

@ -16,7 +16,6 @@ import org.hibernate.type.Type;
/**
* Exposes entity class metadata to the application
*
* @see org.hibernate.SessionFactory#getClassMetadata(Class)
* @author Gavin King
*
* @deprecated Use Hibernate's mapping model {@link org.hibernate.metamodel.MappingMetamodel}

View File

@ -1108,9 +1108,6 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
int timeoutMillis = forUpdateClause.getTimeoutMillis();
LockKind lockKind = LockKind.NONE;
switch ( forUpdateClause.getLockMode() ) {
//noinspection deprecation
case UPGRADE:
timeoutMillis = LockOptions.WAIT_FOREVER;
case PESSIMISTIC_WRITE:
lockKind = LockKind.UPDATE;
break;
@ -1118,8 +1115,6 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
lockKind = LockKind.SHARE;
break;
case UPGRADE_NOWAIT:
//noinspection deprecation
case FORCE:
case PESSIMISTIC_FORCE_INCREMENT:
timeoutMillis = LockOptions.NO_WAIT;
lockKind = LockKind.UPDATE;
@ -1215,21 +1210,18 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
}
int timeoutMillis = getLockOptions().getTimeOut();
switch ( lockMode ) {
//noinspection deprecation
case UPGRADE:
timeoutMillis = LockOptions.WAIT_FOREVER;
break;
case UPGRADE_NOWAIT:
//noinspection deprecation
case FORCE:
case PESSIMISTIC_FORCE_INCREMENT:
case PESSIMISTIC_FORCE_INCREMENT: {
timeoutMillis = LockOptions.NO_WAIT;
break;
case UPGRADE_SKIPLOCKED:
}
case UPGRADE_SKIPLOCKED: {
timeoutMillis = LockOptions.SKIP_LOCKED;
break;
default:
}
default: {
break;
}
}
return timeoutMillis;
}

View File

@ -535,7 +535,7 @@ public class StatisticsImpl implements StatisticsImplementor, Service {
@Override
public String[] getSecondLevelCacheRegionNames() {
return cache.getSecondLevelCacheRegionNames();
return cache.getCacheRegionNames().toArray( new String[0] );
}
@Override

View File

@ -11,6 +11,7 @@ import java.util.Arrays;
import java.util.List;
import org.hibernate.Session;
import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.query.sqm.SortOrder;
import org.hibernate.metamodel.mapping.PluralAttributeMapping;
import org.hibernate.metamodel.mapping.ordering.OrderByFragmentImpl;
@ -135,8 +136,11 @@ public class ElementCollectionSortingTest {
private void checkSQLOrderBy(Session session, String entityName, String propertyName, SortOrder order) {
String roleName = entityName + "." + propertyName;
String alias = "alias1";
BasicCollectionPersister collectionPersister = (BasicCollectionPersister) session.getSessionFactory()
.getCollectionMetadata( roleName );
BasicCollectionPersister collectionPersister = (BasicCollectionPersister) session
.unwrap( SessionImplementor.class )
.getFactory()
.getMappingMetamodel()
.getCollectionDescriptor( roleName );
assertTrue( collectionPersister.hasOrdering() );
PluralAttributeMapping attributeMapping = collectionPersister.getAttributeMapping();
assertThat( attributeMapping.getFetchableName(), is( propertyName ) );

View File

@ -15,6 +15,7 @@ import org.hibernate.Transaction;
import org.hibernate.dialect.CockroachDialect;
import org.hibernate.boot.registry.BootstrapServiceRegistryBuilder;
import org.hibernate.dialect.PostgreSQLDialect;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.persister.collection.BasicCollectionPersister;
import org.hibernate.testing.SkipForDialect;
@ -114,7 +115,10 @@ public class Ejb3XmlTest extends BaseCoreFunctionalTestCase {
// For the element-collection, check that the orm.xml entries are honored.
// This includes: map-key-column/column/collection-table/join-column
BasicCollectionPersister confRoomMeta = (BasicCollectionPersister) sf.getCollectionMetadata( Company.class.getName() + ".conferenceRoomExtensions" );
BasicCollectionPersister confRoomMeta = (BasicCollectionPersister) sf
.unwrap( SessionFactoryImplementor.class )
.getMappingMetamodel()
.getCollectionDescriptor( Company.class.getName() + ".conferenceRoomExtensions" );
assertEquals( "company_id", confRoomMeta.getKeyColumnNames()[0] );
assertEquals( "phone_extension", confRoomMeta.getElementColumnNames()[0] );
assertEquals( "room_number", confRoomMeta.getIndexColumnNames()[0] );

View File

@ -52,7 +52,7 @@ public class ByteCodeEnhancedImmutableReferenceCacheTest extends BaseCoreFunctio
@Test
public void testUseOfDirectReferencesInCache() throws Exception {
EntityPersister persister = (EntityPersister) sessionFactory().getClassMetadata( MyEnhancedReferenceData.class );
EntityPersister persister = sessionFactory().getMappingMetamodel().getEntityDescriptor( MyEnhancedReferenceData.class );
assertFalse( persister.isMutable() );
assertTrue( persister.buildCacheEntry( null, null, null, null ).isReferenceEntry() );
assertFalse( persister.hasProxy() );

View File

@ -1,51 +0,0 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
*/
package org.hibernate.orm.test.cache;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.testing.junit4.BaseNonConfigCoreFunctionalTestCase;
import org.junit.Test;
import org.hamcrest.CoreMatchers;
import org.hamcrest.MatcherAssert;
/**
* @author Steve Ebersole
*/
public class QualifiedRegionNameHandlingTest extends BaseNonConfigCoreFunctionalTestCase {
private static final String PREFIX = "app1";
private static final String LOCAL_NAME = "a.b.c";
@Override
protected void configureStandardServiceRegistryBuilder(StandardServiceRegistryBuilder ssrb) {
super.configureStandardServiceRegistryBuilder( ssrb );
ssrb.applySetting( AvailableSettings.USE_SECOND_LEVEL_CACHE, "true" );
ssrb.applySetting( AvailableSettings.USE_QUERY_CACHE, "true" );
ssrb.applySetting( AvailableSettings.CACHE_REGION_PREFIX, PREFIX );
}
@Test
public void testValidCall() {
MatcherAssert.assertThat(
sessionFactory().getCache().unqualifyRegionName( PREFIX + '.' + LOCAL_NAME ),
CoreMatchers.is( LOCAL_NAME )
);
}
@Test
public void testUnqualifiedNameUsed() {
try {
sessionFactory().getCache().unqualifyRegionName( LOCAL_NAME );
}
catch (IllegalArgumentException expected) {
}
}
}

View File

@ -44,7 +44,7 @@ public class ReferenceCacheTest extends BaseCoreFunctionalTestCase {
@Test
public void testUseOfDirectReferencesInCache() throws Exception {
EntityPersister persister = (EntityPersister) sessionFactory().getClassMetadata( MyReferenceData.class );
EntityPersister persister = sessionFactory().getMappingMetamodel().getEntityDescriptor( MyReferenceData.class );
assertFalse( persister.isMutable() );
assertTrue( persister.buildCacheEntry( null, null, null, null ).isReferenceEntry() );
assertFalse( persister.hasProxy() );

View File

@ -594,7 +594,7 @@ public class SQLServer2005DialectTestCase extends BaseUnitTestCase {
public void testAppendLockHintUpgrade() {
final String expectedLockHint = "tab1 with (updlock,holdlock,rowlock)";
LockOptions lockOptions = new LockOptions( LockMode.UPGRADE );
LockOptions lockOptions = new LockOptions( LockMode.PESSIMISTIC_WRITE );
String lockHint = dialect.appendLockHint( lockOptions, "tab1" );
assertEquals( expectedLockHint, lockHint );
@ -605,7 +605,7 @@ public class SQLServer2005DialectTestCase extends BaseUnitTestCase {
public void testAppendLockHintUpgradeNoTimeout() {
final String expectedLockHint = "tab1 with (updlock,holdlock,rowlock,nowait)";
LockOptions lockOptions = new LockOptions( LockMode.UPGRADE );
LockOptions lockOptions = new LockOptions( LockMode.PESSIMISTIC_WRITE );
lockOptions.setTimeOut( LockOptions.NO_WAIT );
String lockHint = dialect.appendLockHint( lockOptions, "tab1" );
@ -617,7 +617,7 @@ public class SQLServer2005DialectTestCase extends BaseUnitTestCase {
public void testAppendLockHintPessimisticWrite() {
final String expectedLockHint = "tab1 with (updlock,holdlock,rowlock)";
LockOptions lockOptions = new LockOptions( LockMode.UPGRADE );
LockOptions lockOptions = new LockOptions( LockMode.PESSIMISTIC_WRITE );
String lockHint = dialect.appendLockHint( lockOptions, "tab1" );
assertEquals( expectedLockHint, lockHint );
@ -628,7 +628,7 @@ public class SQLServer2005DialectTestCase extends BaseUnitTestCase {
public void testAppendLockHintPessimisticWriteNoTimeOut() {
final String expectedLockHint = "tab1 with (updlock,holdlock,rowlock,nowait)";
LockOptions lockOptions = new LockOptions( LockMode.UPGRADE );
LockOptions lockOptions = new LockOptions( LockMode.PESSIMISTIC_WRITE );
lockOptions.setTimeOut( LockOptions.NO_WAIT );
String lockHint = dialect.appendLockHint( lockOptions, "tab1" );

View File

@ -47,8 +47,8 @@ public abstract class AbstractLockHintTest extends BaseUnitTestCase {
}
protected LockOptions lockOptions(String aliasToLock) {
LockOptions lockOptions = new LockOptions(LockMode.UPGRADE);
lockOptions.setAliasSpecificLockMode( aliasToLock, LockMode.UPGRADE );
LockOptions lockOptions = new LockOptions(LockMode.PESSIMISTIC_WRITE);
lockOptions.setAliasSpecificLockMode( aliasToLock, LockMode.PESSIMISTIC_WRITE );
return lockOptions;
}

View File

@ -148,8 +148,8 @@ public class JoinedSubclassTest {
scope.inTransaction(
session -> {
session.lock( p, LockMode.UPGRADE );
session.lock( q, LockMode.UPGRADE );
session.lock( p, LockMode.PESSIMISTIC_WRITE );
session.lock( q, LockMode.PESSIMISTIC_WRITE );
session.delete( p );
session.delete( q );
}

View File

@ -105,7 +105,7 @@ public class JPALockTest extends AbstractJPATest {
s1 = sessionFactory().openSession();
s1.beginTransaction();
item = s1.get( Item.class, itemId );
s1.lock( item, LockMode.UPGRADE );
s1.lock( item, LockMode.PESSIMISTIC_WRITE );
item.setName( "updated" );
s1.flush();
@ -174,9 +174,6 @@ public class JPALockTest extends AbstractJPATest {
* must always prevent the phenomena P1 and P2. For non-versioned objects, whether or
* not LockModeType.WRITE has any additional behaviour is vendor-specific. Applications that call
* lock(entity, LockModeType.WRITE) on non-versioned objects will not be portable.
* <p/>
* Due to the requirement that LockModeType.WRITE needs to force a version increment,
* a new Hibernate LockMode was added to support this behavior: {@link LockMode#FORCE}.
*/
@Test
public void testLockModeTypeWrite() {
@ -206,14 +203,14 @@ public class JPALockTest extends AbstractJPATest {
s1 = sessionFactory().openSession();
s1.beginTransaction();
item = s1.get( Item.class, itemId );
s1.lock( item, LockMode.FORCE );
s1.lock( item, LockMode.PESSIMISTIC_FORCE_INCREMENT );
assertEquals( initialVersion + 1, item.getVersion(), "no forced version increment" );
myEntity = s1.get( MyEntity.class, entity.getId() );
s1.lock( myEntity, LockMode.FORCE );
assertTrue( true, "LockMode.FORCE on a un-versioned entity should degrade nicely to UPGRADE" );
s1.lock( myEntity, LockMode.PESSIMISTIC_FORCE_INCREMENT );
assertTrue( true, "LockMode.PESSIMISTIC_FORCE_INCREMENT on a un-versioned entity should degrade nicely to UPGRADE" );
s1.lock( item, LockMode.FORCE );
s1.lock( item, LockMode.PESSIMISTIC_FORCE_INCREMENT );
assertEquals( initialVersion + 1, item.getVersion(), "subsequent LockMode.FORCE did not no-op" );
s2 = sessionFactory().openSession();

View File

@ -145,7 +145,7 @@ public class RepeatableReadTest extends AbstractJPATest {
// attempt to acquire an UPGRADE lock; this should fail
s1.lock( item, LockMode.UPGRADE );
s1.lock( item, LockMode.PESSIMISTIC_WRITE );
fail( "expected UPGRADE lock failure" );
}
catch (StaleObjectStateException expected) {
@ -262,7 +262,7 @@ public class RepeatableReadTest extends AbstractJPATest {
// then acquire an UPGRADE lock; this should fail
try {
s1.lock( part, LockMode.UPGRADE );
s1.lock( part, LockMode.PESSIMISTIC_WRITE );
}
catch (Throwable t) {
// SQLServer, for example, immediately throws an exception here...

View File

@ -72,7 +72,7 @@ public abstract class CustomSQLTestSupport extends BaseCoreFunctionalTestCase {
emp = ( Employment ) jboss.getEmployments().iterator().next();
gavin = emp.getEmployee();
assertEquals( "GAVIN" , gavin.getName() );
assertEquals( LockMode.UPGRADE , s.getCurrentLockMode( gavin ));
assertEquals( LockMode.PESSIMISTIC_WRITE , s.getCurrentLockMode( gavin ));
emp.setEndDate( new Date() );
Employment emp3 = new Employment( gavin, jboss, "US" );
s.save( emp3 );

View File

@ -6,7 +6,6 @@
*/
package org.hibernate.orm.test.sql.storedproc;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.EntityResult;
import jakarta.persistence.FieldResult;
@ -19,7 +18,7 @@ import jakarta.persistence.StoredProcedureParameter;
import org.hibernate.boot.MetadataBuilder;
import org.hibernate.boot.model.relational.AuxiliaryDatabaseObject;
import org.hibernate.cfg.Configuration;
import org.hibernate.boot.model.relational.SqlStringGenerationContext;
import org.hibernate.dialect.Dialect;
import org.hibernate.dialect.H2Dialect;
@ -37,7 +36,7 @@ public class H2ProcTesting {
@Override
public boolean appliesToDialect(Dialect dialect) {
return H2Dialect.class.isInstance( dialect );
return dialect instanceof H2Dialect;
}
@Override
@ -46,7 +45,7 @@ public class H2ProcTesting {
}
@Override
public String[] sqlCreateStrings(Dialect dialect) {
public String[] sqlCreateStrings(SqlStringGenerationContext context) {
return new String[] {
"CREATE ALIAS findOneUser AS $$\n" +
"import org.h2.tools.SimpleResultSet;\n" +
@ -64,7 +63,7 @@ public class H2ProcTesting {
}
@Override
public String[] sqlDropStrings(Dialect dialect) {
public String[] sqlDropStrings(SqlStringGenerationContext context) {
return new String[] {
"DROP ALIAS findUser IF EXISTS"
};
@ -81,7 +80,7 @@ public class H2ProcTesting {
@Override
public boolean appliesToDialect(Dialect dialect) {
return H2Dialect.class.isInstance( dialect );
return dialect instanceof H2Dialect;
}
@Override
@ -90,7 +89,7 @@ public class H2ProcTesting {
}
@Override
public String[] sqlCreateStrings(Dialect dialect) {
public String[] sqlCreateStrings(SqlStringGenerationContext context) {
return new String[] {
"CREATE ALIAS findUsers AS $$\n" +
"import org.h2.tools.SimpleResultSet;\n" +
@ -110,7 +109,7 @@ public class H2ProcTesting {
}
@Override
public String[] sqlDropStrings(Dialect dialect) {
public String[] sqlDropStrings(SqlStringGenerationContext context) {
return new String[] {"DROP ALIAS findUser IF EXISTS"};
}
}
@ -125,7 +124,7 @@ public class H2ProcTesting {
@Override
public boolean appliesToDialect(Dialect dialect) {
return H2Dialect.class.isInstance( dialect );
return dialect instanceof H2Dialect;
}
@Override
@ -134,7 +133,7 @@ public class H2ProcTesting {
}
@Override
public String[] sqlCreateStrings(Dialect dialect) {
public String[] sqlCreateStrings(SqlStringGenerationContext context) {
return new String[] {
"CREATE ALIAS findUserRange AS $$\n" +
"import org.h2.tools.SimpleResultSet;\n" +
@ -154,7 +153,7 @@ public class H2ProcTesting {
}
@Override
public String[] sqlDropStrings(Dialect dialect) {
public String[] sqlDropStrings(SqlStringGenerationContext context) {
return new String[] {"DROP ALIAS findUserRange IF EXISTS"};
}
}

View File

@ -17,6 +17,7 @@ import jakarta.persistence.TemporalType;
import org.hibernate.boot.MetadataBuilder;
import org.hibernate.boot.model.relational.AuxiliaryDatabaseObject;
import org.hibernate.boot.model.relational.SqlStringGenerationContext;
import org.hibernate.dialect.Dialect;
import org.hibernate.dialect.PostgreSQL81Dialect;
import org.hibernate.dialect.PostgreSQL82Dialect;
@ -67,7 +68,7 @@ public class PostgresRefCursorSupportTest extends BaseSessionFactoryFunctionalTe
}
@Override
public String[] sqlCreateStrings(Dialect dialect) {
public String[] sqlCreateStrings(SqlStringGenerationContext context) {
return new String[] {
"create function all_items() return refcursor as \n" +
" 'declare someCursor refcursor;\n" +
@ -79,7 +80,7 @@ public class PostgresRefCursorSupportTest extends BaseSessionFactoryFunctionalTe
}
@Override
public String[] sqlDropStrings(Dialect dialect) {
public String[] sqlDropStrings(SqlStringGenerationContext context) {
return new String[] {
"drop function all_items()"
};

View File

@ -18,6 +18,7 @@ import jakarta.persistence.TemporalType;
import org.hibernate.boot.MetadataBuilder;
import org.hibernate.boot.model.relational.AuxiliaryDatabaseObject;
import org.hibernate.boot.model.relational.SqlStringGenerationContext;
import org.hibernate.dialect.Dialect;
import org.hibernate.dialect.H2Dialect;
import org.hibernate.procedure.ProcedureCall;
@ -88,7 +89,7 @@ public class StoredProcedureResultSetMappingTest extends BaseSessionFactoryFunct
}
@Override
public String[] sqlCreateStrings(Dialect dialect) {
public String[] sqlCreateStrings(SqlStringGenerationContext context) {
return new String[] {
"CREATE ALIAS allEmployeeNames AS $$\n" +
"import org.h2.tools.SimpleResultSet;\n" +
@ -109,7 +110,7 @@ public class StoredProcedureResultSetMappingTest extends BaseSessionFactoryFunct
}
@Override
public String[] sqlDropStrings(Dialect dialect) {
public String[] sqlDropStrings(SqlStringGenerationContext context) {
return new String[] {"DROP ALIAS allEmployeeNames IF EXISTS"};
}