Merge remote-tracking branch 'upstream/master' into wip/6.0

This commit is contained in:
Andrea Boriero 2020-10-26 08:14:01 +00:00
commit 9cf4edfbee
21 changed files with 90 additions and 203 deletions

View File

@ -11,7 +11,7 @@ hibernate-spatial:: Hibernate's Spatial/GIS data-type support
hibernate-osgi:: Hibernate support for running in OSGi containers.
hibernate-agroal:: Integrates the http://agroal.github.io/[Agroal] connection pooling library into Hibernate
hibernate-c3p0:: Integrates the http://www.mchange.com/projects/c3p0/[C3P0] connection pooling library into Hibernate
hibernate-hikaricp:: Integrates the http://brettwooldridge.github.io/HikariCP/[HikariCP] connection pooling library into Hibernate
hibernate-hikaricp:: Integrates the https://github.com/brettwooldridge/HikariCP/[HikariCP] connection pooling library into Hibernate
hibernate-vibur:: Integrates the http://www.vibur.org/[Vibur DBCP] connection pooling library into Hibernate
hibernate-proxool:: Integrates the http://proxool.sourceforge.net/[Proxool] connection pooling library into Hibernate
hibernate-jcache:: Integrates the https://jcp.org/en/jsr/detail?id=107$$[JCache] caching specification into Hibernate,

View File

@ -255,7 +255,7 @@ Both `READ_WRITE` and `TRANSACTIONAL` use write-through caching, while `NONSTRIC
For this reason, `NONSTRICT_READ_WRITE` is not very suitable if entities are changed frequently.
When using clustering, the second-level cache entries are spread across multiple nodes.
When using http://blog.infinispan.org/2015/10/hibernate-second-level-cache.html[Infinispan distributed cache], only `READ_WRITE` and `NONSTRICT_READ_WRITE` are available for read-write caches.
When using https://infinispan.org/blog/2015/10/01/hibernate-second-level-cache/[Infinispan distributed cache], only `READ_WRITE` and `NONSTRICT_READ_WRITE` are available for read-write caches.
Bear in mind that `NONSTRICT_READ_WRITE` offers a weaker consistency guarantee since stale updates are possible.
[NOTE]

View File

@ -684,7 +684,7 @@ Sets the associated collection cache concurrency strategy for the designated reg
=== Infinispan properties
For more details about how to customize the Infinispan second-level cache provider, check out the
http://infinispan.org/docs/stable/titles/integrating/integrating.html#configuration_properties[Infinispan User Guide].
https://infinispan.org/docs/stable/titles/integrating/integrating.html#configuration_properties[Infinispan User Guide].
[[configurations-transactions]]
=== Transactions properties

View File

@ -707,4 +707,4 @@ Infinispan is a distributed in-memory key/value data store, available as a cache
It supports advanced functionality such as transactions, events, querying, distributed processing, off-heap and geographical failover.
For more details, check out the
http://infinispan.org/docs/stable/titles/integrating/integrating.html#integrating_jpa_hibernate[Infinispan User Guide].
https://infinispan.org/docs/stable/titles/integrating/integrating.html#integrating_jpa_hibernate[Infinispan User Guide].

View File

@ -1136,7 +1136,7 @@ Programmatically::
TimeZone.setDefault( TimeZone.getTimeZone( "UTC" ) );
----
However, as explained in http://in.relation.to/2016/09/12/jdbc-time-zone-configuration-property/[this article], this is not always practical, especially for front-end nodes.
However, as explained in https://in.relation.to/2016/09/12/jdbc-time-zone-configuration-property/[this article], this is not always practical, especially for front-end nodes.
For this reason, Hibernate offers the `hibernate.jdbc.time_zone` configuration property which can be configured:
Declaratively, at the `SessionFactory` level::

View File

@ -110,7 +110,7 @@ See http://proxool.sourceforge.net/configure.html[proxool configuration].
To use the HikariCP this integration, the application must include the `hibernate-hikari` module jar (as well as its dependencies) on the classpath.
====
Hibernate also provides support for applications to use http://brettwooldridge.github.io/HikariCP/[Hikari] connection pool.
Hibernate also provides support for applications to use https://github.com/brettwooldridge/HikariCP/[HikariCP] connection pool.
Set all of your Hikari settings in Hibernate prefixed by `hibernate.hikari.` and this `ConnectionProvider` will pick them up and pass them along to Hikari.
Additionally, this `ConnectionProvider` will pick up the following Hibernate-specific properties and map them to the corresponding Hikari ones (any `hibernate.hikari.` prefixed ones have precedence):

View File

@ -18,13 +18,10 @@ import org.hibernate.boot.archive.scan.spi.ScanOptions;
import org.hibernate.boot.archive.scan.spi.ScanParameters;
import org.hibernate.boot.archive.scan.spi.ScanResult;
import org.jboss.logging.Logger;
/**
* @author Steve Ebersole
*/
public class ScanResultCollector {
private static final Logger log = Logger.getLogger( ScanResultCollector.class );
private final ScanEnvironment environment;
private final ScanOptions options;

View File

@ -199,7 +199,6 @@ public class ImplicitNamingStrategyJpaCompliantImpl implements ImplicitNamingStr
@Override
public Identifier determineForeignKeyName(ImplicitForeignKeyNameSource source) {
Identifier userProvidedIdentifier = source.getUserProvidedIdentifier();
source.getBuildingContext().getBuildingOptions().getSchemaCharset();
return userProvidedIdentifier != null ? userProvidedIdentifier : toIdentifier(
NamingHelper.withCharset( source.getBuildingContext().getBuildingOptions().getSchemaCharset() ).generateHashedFkName(
"FK",

View File

@ -32,7 +32,6 @@ import org.hibernate.internal.CoreLogging;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.mapping.Collection;
import org.hibernate.mapping.IndexedCollection;
import org.hibernate.mapping.Join;
import org.hibernate.mapping.KeyValue;
import org.hibernate.mapping.PersistentClass;
@ -409,35 +408,6 @@ public class CollectionPropertyHolder extends AbstractPropertyHolder {
.findAutoApplyConverterForCollectionElement( collectionXProperty, getContext() );
}
private Class determineElementClass(XClass elementXClass) {
if ( elementXClass != null ) {
try {
return getContext().getBootstrapContext().getReflectionManager().toClass( elementXClass );
}
catch (Exception e) {
log.debugf(
"Unable to resolve XClass [%s] to Class for collection elements [%s]",
elementXClass.getName(),
collection.getRole()
);
}
}
if ( collection.getElement() != null ) {
if ( collection.getElement().getType() != null ) {
return collection.getElement().getType().getReturnedClass();
}
}
// currently this is called from paths where the element type really should be known,
// so log the fact that we could not resolve the collection element info
log.debugf(
"Unable to resolve element information for collection [%s]",
collection.getRole()
);
return null;
}
public ConverterDescriptor mapKeyAttributeConverterDescriptor(XProperty mapXProperty, XClass keyXClass) {
AttributeConversionInfo info = locateAttributeConversionInfo( "key" );
if ( info != null ) {
@ -466,33 +436,4 @@ public class CollectionPropertyHolder extends AbstractPropertyHolder {
.findAutoApplyConverterForMapKey( mapXProperty, getContext() );
}
private Class determineKeyClass(XClass keyXClass) {
if ( keyXClass != null ) {
try {
return getContext().getBootstrapContext().getReflectionManager().toClass( keyXClass );
}
catch (Exception e) {
log.debugf(
"Unable to resolve XClass [%s] to Class for collection key [%s]",
keyXClass.getName(),
collection.getRole()
);
}
}
final IndexedCollection indexedCollection = (IndexedCollection) collection;
if ( indexedCollection.getIndex() != null ) {
if ( indexedCollection.getIndex().getType() != null ) {
return indexedCollection.getIndex().getType().getReturnedClass();
}
}
// currently this is called from paths where the element type really should be known,
// so log the fact that we could not resolve the collection element info
log.debugf(
"Unable to resolve key information for collection [%s]",
collection.getRole()
);
return null;
}
}

View File

@ -102,8 +102,8 @@ public class GroupsPerOperation {
DDL("ddl", HIBERNATE_GROUP_PREFIX + "ddl");
private String exposedName;
private String groupPropertyName;
private final String exposedName;
private final String groupPropertyName;
Operation(String exposedName, String groupProperty) {
this.exposedName = exposedName;

View File

@ -86,18 +86,7 @@ public class StandardRefCursorSupport implements RefCursorSupport {
public ResultSet getResultSet(CallableStatement statement, int position) {
if ( jdbcServices.getExtractedMetaDataSupport().supportsRefCursors() ) {
try {
return (ResultSet) getResultSetByPositionMethod().invoke( statement, position, ResultSet.class );
}
catch (InvocationTargetException e) {
if ( e.getTargetException() instanceof SQLException ) {
throw jdbcServices.getSqlExceptionHelper().convert(
(SQLException) e.getTargetException(),
"Error extracting REF_CURSOR parameter [" + position + "]"
);
}
else {
throw new HibernateException( "Unexpected error extracting REF_CURSOR parameter [" + position + "]", e.getTargetException() );
}
return statement.getObject( position, ResultSet.class );
}
catch (Exception e) {
throw new HibernateException( "Unexpected error extracting REF_CURSOR parameter [" + position + "]", e );
@ -120,18 +109,7 @@ public class StandardRefCursorSupport implements RefCursorSupport {
public ResultSet getResultSet(CallableStatement statement, String name) {
if ( jdbcServices.getExtractedMetaDataSupport().supportsRefCursors() ) {
try {
return (ResultSet) getResultSetByNameMethod().invoke( statement, name, ResultSet.class );
}
catch (InvocationTargetException e) {
if ( e.getTargetException() instanceof SQLException ) {
throw jdbcServices.getSqlExceptionHelper().convert(
(SQLException) e.getTargetException(),
"Error extracting REF_CURSOR parameter [" + name + "]"
);
}
else {
throw new HibernateException( "Unexpected error extracting REF_CURSOR parameter [" + name + "]", e.getTargetException() );
}
return statement.getObject( name, ResultSet.class );
}
catch (Exception e) {
throw new HibernateException( "Unexpected error extracting REF_CURSOR parameter [" + name + "]", e );
@ -158,58 +136,17 @@ public class StandardRefCursorSupport implements RefCursorSupport {
* @return {@code true} if the metadata indicates that the driver defines REF_CURSOR support
*/
public static boolean supportsRefCursors(DatabaseMetaData meta) {
// Standard JDBC REF_CURSOR support was not added until Java 8, so we need to use reflection to attempt to
// access these fields/methods...
try {
return (Boolean) meta.getClass().getMethod( "supportsRefCursors" ).invoke( meta );
return meta.supportsRefCursors();
}
catch (NoSuchMethodException e) {
log.trace( "JDBC DatabaseMetaData class does not define supportsRefCursors method..." );
catch (SQLException throwable) {
log.debug( "Unexpected error trying to gauge level of JDBC REF_CURSOR support : " + throwable.getMessage() );
return false;
}
catch (Exception e) {
log.debug( "Unexpected error trying to gauge level of JDBC REF_CURSOR support : " + e.getMessage() );
}
return false;
}
private int refCursorTypeCode() {
return Types.REF_CURSOR;
}
private static Method getResultSetByPositionMethod;
private Method getResultSetByPositionMethod() {
if ( getResultSetByPositionMethod == null ) {
try {
getResultSetByPositionMethod = CallableStatement.class.getMethod( "getObject", int.class, Class.class );
}
catch (NoSuchMethodException e) {
throw new HibernateException( "CallableStatement class does not define getObject(int,Class) method" );
}
catch (Exception e) {
throw new HibernateException( "Unexpected error trying to access CallableStatement#getObject(int,Class)" );
}
}
return getResultSetByPositionMethod;
}
private static Method getResultSetByNameMethod;
private Method getResultSetByNameMethod() {
if ( getResultSetByNameMethod == null ) {
try {
getResultSetByNameMethod = CallableStatement.class.getMethod( "getObject", String.class, Class.class );
}
catch (NoSuchMethodException e) {
throw new HibernateException( "CallableStatement class does not define getObject(String,Class) method" );
}
catch (Exception e) {
throw new HibernateException( "Unexpected error trying to access CallableStatement#getObject(String,Class)" );
}
}
return getResultSetByNameMethod;
}
}

View File

@ -241,11 +241,15 @@ public class EnhancementTask extends Task {
private void walkDir(File dir, FileFilter classesFilter, FileFilter dirFilter) {
File[] dirs = dir.listFiles( dirFilter );
for ( File dir1 : dirs ) {
walkDir( dir1, classesFilter, dirFilter );
if ( dirs != null ) {
for ( File dir1 : dirs ) {
walkDir( dir1, classesFilter, dirFilter );
}
}
File[] files = dir.listFiles( classesFilter );
Collections.addAll( sourceSet, files );
if ( files != null ) {
Collections.addAll( sourceSet, files );
}
}
private void writeOutEnhancedClass(byte[] enhancedBytecode, File file) throws BuildException {

View File

@ -18,12 +18,13 @@ import org.hibernate.type.Type;
* @author Steve Ebersole
*/
public class IdentifierProperty extends AbstractAttribute implements IdentifierAttribute {
private boolean virtual;
private boolean embedded;
private IdentifierValue unsavedValue;
private IdentifierGenerator identifierGenerator;
private boolean identifierAssignedByInsert;
private boolean hasIdentifierMapper;
private final boolean virtual;
private final boolean embedded;
private final IdentifierValue unsavedValue;
private final IdentifierGenerator identifierGenerator;
private final boolean identifierAssignedByInsert;
private final boolean hasIdentifierMapper;
/**
* Construct a non-virtual identifier property.

View File

@ -369,7 +369,6 @@ public abstract class AbstractEntityTuplizer implements EntityTuplizer {
public Object getIdentifier(Object entity, EntityMode entityMode, SharedSessionContractImplementor session) {
final Object id = mappedIdentifierType.instantiate( entityMode );
final Object[] propertyValues = virtualIdComponent.getPropertyValues( entity, entityMode );
final String[] names = virtualIdComponent.getPropertyNames();
final Type[] subTypes = virtualIdComponent.getSubtypes();
final Type[] copierSubTypes = mappedIdentifierType.getSubtypes();
final int length = subTypes.length;

View File

@ -47,10 +47,11 @@ public class UpdateBuilder {
sb.append( "update " ).append( entityName ).append( " " ).append( alias );
sb.append( " set " );
int i = 1;
for ( String property : updates.keySet() ) {
for ( java.util.Map.Entry<String, Object> entry : updates.entrySet() ) {
final String property = entry.getKey();
final String paramName = generateParameterName();
sb.append( alias ).append( "." ).append( property ).append( " = " ).append( ":" ).append( paramName );
updateParamValues.put( paramName, updates.get( property ) );
updateParamValues.put( paramName, entry.getValue() );
if ( i < updates.size() ) {
sb.append( ", " );
}

View File

@ -87,8 +87,8 @@ public class OsgiServiceUtil implements Stoppable {
@Override
public void stop() {
for (String key : serviceTrackers.keySet()) {
serviceTrackers.get( key ).close();
for ( java.util.Map.Entry<String, org.osgi.util.tracker.ServiceTracker> entry : serviceTrackers.entrySet() ) {
entry.getValue().close();
}
serviceTrackers.clear();
}

View File

@ -17,7 +17,7 @@ import org.locationtech.jts.geom.Polygon;
*/
public class EnvelopeAdapter {
private static GeometryFactory geomFactory = new GeometryFactory();
private static volatile GeometryFactory geomFactory = new GeometryFactory();
private EnvelopeAdapter() {
}

View File

@ -510,7 +510,7 @@ public class BaseNonConfigCoreFunctionalTestCase extends BaseUnitTestCase {
session = null;
}
public class RollbackWork implements Work {
public static class RollbackWork implements Work {
@Override
public void execute(Connection connection) throws SQLException {

View File

@ -216,7 +216,7 @@ public final class Context {
}
public boolean isFullyXmlConfigured() {
return Boolean.TRUE == fullyXmlConfigured;
return fullyXmlConfigured != null && fullyXmlConfigured.booleanValue();
}
public void mappingDocumentFullyXmlConfigured(boolean fullyXmlConfigured) {

View File

@ -6,8 +6,6 @@
*/
package org.hibernate.jpamodelgen.util;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@ -34,46 +32,52 @@ public final class Constants {
public static final String CONVERT = "javax.persistence.Convert";
public static final String HIBERNATE_TYPE = "org.hibernate.annotations.Type";
public static final Map<String, String> COLLECTIONS = new HashMap<String, String>();
public static final Map<String, String> COLLECTIONS = allCollectionTypes();
static {
COLLECTIONS.put( java.util.Collection.class.getName(), "javax.persistence.metamodel.CollectionAttribute" );
COLLECTIONS.put( java.util.Set.class.getName(), "javax.persistence.metamodel.SetAttribute" );
COLLECTIONS.put( java.util.List.class.getName(), "javax.persistence.metamodel.ListAttribute" );
COLLECTIONS.put( java.util.Map.class.getName(), "javax.persistence.metamodel.MapAttribute" );
private static java.util.Map<String, String> allCollectionTypes() {
Map<String, String> map = new java.util.HashMap<>();
map.put( java.util.Collection.class.getName(), "javax.persistence.metamodel.CollectionAttribute" );
map.put( java.util.Set.class.getName(), "javax.persistence.metamodel.SetAttribute" );
map.put( java.util.List.class.getName(), "javax.persistence.metamodel.ListAttribute" );
map.put( java.util.Map.class.getName(), "javax.persistence.metamodel.MapAttribute" );
// Hibernate also supports the SortedSet and SortedMap interfaces
COLLECTIONS.put( java.util.SortedSet.class.getName(), "javax.persistence.metamodel.SetAttribute" );
COLLECTIONS.put( java.util.SortedMap.class.getName(), "javax.persistence.metamodel.MapAttribute" );
map.put( java.util.SortedSet.class.getName(), "javax.persistence.metamodel.SetAttribute" );
map.put( java.util.SortedMap.class.getName(), "javax.persistence.metamodel.MapAttribute" );
return java.util.Collections.unmodifiableMap( map );
}
public static final List<String> BASIC_TYPES = new ArrayList<String>();
public static final List<String> BASIC_TYPES = allBasicTypes();
static {
BASIC_TYPES.add( java.lang.String.class.getName() );
BASIC_TYPES.add( java.lang.Boolean.class.getName() );
BASIC_TYPES.add( java.lang.Byte.class.getName() );
BASIC_TYPES.add( java.lang.Character.class.getName() );
BASIC_TYPES.add( java.lang.Short.class.getName() );
BASIC_TYPES.add( java.lang.Integer.class.getName() );
BASIC_TYPES.add( java.lang.Long.class.getName() );
BASIC_TYPES.add( java.lang.Float.class.getName() );
BASIC_TYPES.add( java.lang.Double.class.getName() );
BASIC_TYPES.add( java.math.BigInteger.class.getName() );
BASIC_TYPES.add( java.math.BigDecimal.class.getName() );
BASIC_TYPES.add( java.util.Date.class.getName() );
BASIC_TYPES.add( java.util.Calendar.class.getName() );
BASIC_TYPES.add( java.sql.Date.class.getName() );
BASIC_TYPES.add( java.sql.Time.class.getName() );
BASIC_TYPES.add( java.sql.Timestamp.class.getName() );
BASIC_TYPES.add( java.sql.Blob.class.getName() );
private static java.util.List<String> allBasicTypes() {
java.util.ArrayList<String> strings = new java.util.ArrayList<>();
strings.add( java.lang.String.class.getName() );
strings.add( java.lang.Boolean.class.getName() );
strings.add( java.lang.Byte.class.getName() );
strings.add( java.lang.Character.class.getName() );
strings.add( java.lang.Short.class.getName() );
strings.add( java.lang.Integer.class.getName() );
strings.add( java.lang.Long.class.getName() );
strings.add( java.lang.Float.class.getName() );
strings.add( java.lang.Double.class.getName() );
strings.add( java.math.BigInteger.class.getName() );
strings.add( java.math.BigDecimal.class.getName() );
strings.add( java.util.Date.class.getName() );
strings.add( java.util.Calendar.class.getName() );
strings.add( java.sql.Date.class.getName() );
strings.add( java.sql.Time.class.getName() );
strings.add( java.sql.Timestamp.class.getName() );
strings.add( java.sql.Blob.class.getName() );
return java.util.Collections.unmodifiableList( strings );
}
public static final List<String> BASIC_ARRAY_TYPES = new ArrayList<String>();
public static final List<String> BASIC_ARRAY_TYPES = allBasicArrayTypes();
static {
BASIC_ARRAY_TYPES.add( java.lang.Character.class.getName() );
BASIC_ARRAY_TYPES.add( java.lang.Byte.class.getName() );
private static java.util.List<String> allBasicArrayTypes() {
java.util.ArrayList<String> strings = new java.util.ArrayList<>();
strings.add( java.lang.Character.class.getName() );
strings.add( java.lang.Byte.class.getName() );
return java.util.Collections.unmodifiableList( strings );
}
public static final String PATH_SEPARATOR = "/";

View File

@ -218,21 +218,25 @@ public class JpaDescriptorParser {
}
private FileTimeStampChecker loadTimeStampCache() {
FileTimeStampChecker serializedTimeStampCheck = new FileTimeStampChecker();
File file = null;
try {
file = getSerializationTmpFile();
if ( file.exists() ) {
ObjectInputStream in = new ObjectInputStream( new FileInputStream( file ) );
serializedTimeStampCheck = (FileTimeStampChecker) in.readObject();
in.close();
final File file = getSerializationTmpFile();
if ( file.exists() ) {
try {
try ( java.io.FileInputStream fileInputStream = new java.io.FileInputStream( file ) ) {
try ( java.io.ObjectInputStream in = new java.io.ObjectInputStream( fileInputStream ) ) {
return (org.hibernate.jpamodelgen.util.FileTimeStampChecker) in.readObject();
}
}
}
catch (java.io.IOException e) {
//handled in the outer scope
}
catch (ClassNotFoundException e) {
//handled in the outer scope
}
}
catch (Exception e) {
// ignore - if the de-serialization failed we just have to keep parsing the xml
context.logMessage( Diagnostic.Kind.OTHER, "Error de-serializing " + file );
}
return serializedTimeStampCheck;
// ignore - if the de-serialization failed we just have to keep parsing the xml
context.logMessage( Diagnostic.Kind.OTHER, "Error de-serializing " + file );
return new FileTimeStampChecker();
}
private void parseEntities(Collection<Entity> entities, String defaultPackageName) {