HHH-15100 Limitation of metamodel imports cache causes severe performance drops in large projects
This commit is contained in:
parent
3ece671697
commit
e5e2931b1c
|
@ -70,7 +70,6 @@ import org.hibernate.type.spi.TypeConfiguration;
|
||||||
*/
|
*/
|
||||||
public class JpaMetamodelImpl implements JpaMetamodelImplementor, Serializable {
|
public class JpaMetamodelImpl implements JpaMetamodelImplementor, Serializable {
|
||||||
private static final EntityManagerMessageLogger log = HEMLogging.messageLogger( JpaMetamodel.class );
|
private static final EntityManagerMessageLogger log = HEMLogging.messageLogger( JpaMetamodel.class );
|
||||||
private static final ImportInfo<?> INVALID_IMPORT = new ImportInfo<>( null, null );
|
|
||||||
|
|
||||||
private static class ImportInfo<T> {
|
private static class ImportInfo<T> {
|
||||||
final String importedName;
|
final String importedName;
|
||||||
|
@ -98,6 +97,7 @@ public class JpaMetamodelImpl implements JpaMetamodelImplementor, Serializable {
|
||||||
private final Map<Class<?>, String> entityProxyInterfaceMap = new HashMap<>();
|
private final Map<Class<?>, String> entityProxyInterfaceMap = new HashMap<>();
|
||||||
|
|
||||||
private final Map<String, ImportInfo<?>> nameToImportMap = new ConcurrentHashMap<>();
|
private final Map<String, ImportInfo<?>> nameToImportMap = new ConcurrentHashMap<>();
|
||||||
|
private final Map<String,Object> knownInvalidnameToImportMap = new ConcurrentHashMap<>();
|
||||||
|
|
||||||
|
|
||||||
public JpaMetamodelImpl(TypeConfiguration typeConfiguration, JpaCompliance jpaCompliance) {
|
public JpaMetamodelImpl(TypeConfiguration typeConfiguration, JpaCompliance jpaCompliance) {
|
||||||
|
@ -281,29 +281,42 @@ public class JpaMetamodelImpl implements JpaMetamodelImplementor, Serializable {
|
||||||
|
|
||||||
private <T> ImportInfo<T> resolveImport(final String name) {
|
private <T> ImportInfo<T> resolveImport(final String name) {
|
||||||
final ImportInfo<?> importInfo = nameToImportMap.get( name );
|
final ImportInfo<?> importInfo = nameToImportMap.get( name );
|
||||||
|
//optimal path first
|
||||||
if ( importInfo != null ) {
|
if ( importInfo != null ) {
|
||||||
//noinspection unchecked
|
//noinspection unchecked
|
||||||
return importInfo == INVALID_IMPORT ? null : (ImportInfo<T>) importInfo;
|
return (ImportInfo<T>) importInfo;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
// see if the name is a fully-qualified class name
|
//then check the negative cache, to avoid bothering the classloader unnecessarily
|
||||||
final Class<T> loadedClass = resolveRequestedClass( name );
|
if ( knownInvalidnameToImportMap.containsKey( name ) ) {
|
||||||
if ( loadedClass == null ) {
|
|
||||||
// it is NOT a fully-qualified class name - add a marker entry so we do not keep trying later
|
|
||||||
// note that ConcurrentHashMap does not support null value so a marker entry is needed
|
|
||||||
// [HHH-14948] But only add it if the cache size isn't getting too large, as in some use cases
|
|
||||||
// the queries are dynamically generated and this cache could lead to memory leaks when left unbounded.
|
|
||||||
if ( nameToImportMap.size() < 1_000 ) {
|
|
||||||
nameToImportMap.put( name, INVALID_IMPORT );
|
|
||||||
}
|
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
// it is a fully-qualified class name - add it to the cache
|
// see if the name is a fully-qualified class name
|
||||||
// so we do not keep trying later
|
final Class<T> loadedClass = resolveRequestedClass( name );
|
||||||
final ImportInfo<T> info = new ImportInfo<>( name, loadedClass );
|
if ( loadedClass == null ) {
|
||||||
nameToImportMap.put( name, info );
|
// it is NOT a fully-qualified class name - add a marker entry so we do not keep trying later
|
||||||
return info;
|
// note that ConcurrentHashMap does not support null value so a marker entry is needed
|
||||||
|
// [HHH-14948] But only add it if the cache size isn't getting too large, as in some use cases
|
||||||
|
// the queries are dynamically generated and this cache could lead to memory leaks when left unbounded.
|
||||||
|
if ( knownInvalidnameToImportMap.size() < 1_000 ) {
|
||||||
|
//TODO this collection might benefit from a LRU eviction algorithm,
|
||||||
|
//we currently have no evidence for this need but this could be explored further.
|
||||||
|
//To consider that we don't have a hard dependency on a cache implementation providing LRU semantics.
|
||||||
|
//Alternatively - even better - would be to precompute all possible valid options and
|
||||||
|
//store them in nameToImportMap on bootstrap: if that can be filled with all (comprehensive)
|
||||||
|
//valid values, then there is no need for ever bothering the classloader.
|
||||||
|
knownInvalidnameToImportMap.put( name, name );
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// it is a fully-qualified class name - add it to the cache
|
||||||
|
// so to not needing to load from the classloader again
|
||||||
|
final ImportInfo<T> info = new ImportInfo<>( name, loadedClass );
|
||||||
|
nameToImportMap.put( name, info );
|
||||||
|
return info;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -38,15 +38,22 @@ public class MetamodelBoundedCacheTest extends BaseNonConfigCoreFunctionalTestCa
|
||||||
jpaMetamodel.qualifyImportableName( "nonexistend" + i );
|
jpaMetamodel.qualifyImportableName( "nonexistend" + i );
|
||||||
}
|
}
|
||||||
|
|
||||||
Field field = JpaMetamodelImpl.class.getDeclaredField( "nameToImportMap" );
|
Map<?, ?> validImports = extractMapFromMetamodel( jpaMetamodel, "nameToImportMap" );
|
||||||
field.setAccessible( true );
|
Map<?, ?> invalidImports = extractMapFromMetamodel( jpaMetamodel, "knownInvalidnameToImportMap" );
|
||||||
|
|
||||||
//noinspection unchecked
|
assertEquals( 2, validImports.size() );
|
||||||
Map<String, String> imports = (Map<String, String>) field.get( jpaMetamodel );
|
|
||||||
|
|
||||||
// VERY hard-coded, but considering the possibility of a regression of a memory-related issue,
|
// VERY hard-coded, but considering the possibility of a regression of a memory-related issue,
|
||||||
// it should be worth it
|
// it should be worth it
|
||||||
assertEquals( 1000, imports.size() );
|
assertEquals( 1000, invalidImports.size() );
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private Map<?, ?> extractMapFromMetamodel(final JpaMetamodel jpaMetamodel, final String fieldName) throws NoSuchFieldException, IllegalAccessException {
|
||||||
|
Field field = JpaMetamodelImpl.class.getDeclaredField( fieldName );
|
||||||
|
field.setAccessible( true );
|
||||||
|
//noinspection unchecked
|
||||||
|
return (Map<?,?>) field.get( jpaMetamodel );
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
Loading…
Reference in New Issue