HHH-8893 - Develop Hibernate mapping XSD extending the JPA mapping (orm) XSD;

HHH-7556 - Clean up packages;
HHH-7164 - Finish out SessionFactoryBuilder api
This commit is contained in:
Steve Ebersole 2014-01-31 09:37:21 -06:00
parent e0d118d8c6
commit 70e965d750
241 changed files with 5045 additions and 17230 deletions

View File

@ -23,7 +23,7 @@
*/
package org.hibernate;
import org.hibernate.jaxb.spi.Origin;
import org.hibernate.xml.spi.Origin;
import org.hibernate.internal.util.xml.XmlDocument;
/**

View File

@ -23,6 +23,8 @@
*/
package org.hibernate.boot.registry;
import java.io.File;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashSet;
@ -106,9 +108,6 @@ public BootstrapServiceRegistry getBootstrapServiceRegistry() {
* @param resourceName The name by which to perform a resource look up for the properties file.
*
* @return this, for method chaining
*
* @see #configure()
* @see #configure(String)
*/
@SuppressWarnings( {"unchecked"})
public StandardServiceRegistryBuilder loadProperties(String resourceName) {
@ -116,6 +115,22 @@ public StandardServiceRegistryBuilder loadProperties(String resourceName) {
return this;
}
/**
* Read settings from a {@link java.util.Properties} file.
*
* Differs from {@link #configure()} and {@link #configure(String)} in that here we expect to read a
* {@link java.util.Properties} file while for {@link #configure} we read the XML variant.
*
* @param propertyFile The property file reference
*
* @return this, for method chaining
*/
@SuppressWarnings( {"unchecked"})
public StandardServiceRegistryBuilder loadProperties(File propertyFile) {
settings.putAll( configLoader.loadProperties( propertyFile ) );
return this;
}
/**
* Read setting information from an XML file using the standard resource location.
*
@ -148,6 +163,44 @@ public StandardServiceRegistryBuilder configure(String resourceName) {
return this;
}
/**
* Read setting information from an XML file using the named resource location.
*
* @param file The config file reference
*
* @return this, for method chaining
*
* @see #loadProperties(String)
*/
@SuppressWarnings( {"unchecked"})
public StandardServiceRegistryBuilder configure(File file) {
final JaxbHibernateConfiguration configurationElement = configLoader.loadConfigFile( file );
for ( JaxbHibernateConfiguration.JaxbSessionFactory.JaxbProperty xmlProperty : configurationElement.getSessionFactory().getProperty() ) {
settings.put( xmlProperty.getName(), xmlProperty.getValue() );
}
return this;
}
/**
* Read setting information from an XML file using the named resource location.
*
* @param configFileUrl The config file url reference
*
* @return this, for method chaining
*
* @see #loadProperties(String)
*/
@SuppressWarnings( {"unchecked"})
public StandardServiceRegistryBuilder configure(URL configFileUrl) {
final JaxbHibernateConfiguration configurationElement = configLoader.loadConfig( configFileUrl );
for ( JaxbHibernateConfiguration.JaxbSessionFactory.JaxbProperty xmlProperty : configurationElement.getSessionFactory().getProperty() ) {
settings.put( xmlProperty.getName(), xmlProperty.getValue() );
}
return this;
}
/**
* Apply a setting value.
*

View File

@ -1,7 +1,7 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2012, Red Hat Inc. or third-party contributors as
* Copyright (c) 2014, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
@ -21,28 +21,21 @@
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.metamodel.internal;
import javax.persistence.metamodel.MappedSuperclassType;
package org.hibernate.boot.spi;
/**
* @author Emmanuel Bernard
*/
public class MappedSuperclassTypeImpl<X> extends AbstractIdentifiableType<X> implements MappedSuperclassType<X> {
public MappedSuperclassTypeImpl(
Class<X> javaType,
AbstractIdentifiableType<? super X> superType,
boolean hasIdentifierProperty,
boolean versioned) {
super( javaType, superType, hasIdentifierProperty, versioned );
}
* @author Steve Ebersole
*/
public class JaccPermissionDefinition {
public final String contextId;
public final String role;
public final String clazz;
public final String actions;
public PersistenceType getPersistenceType() {
return PersistenceType.MAPPED_SUPERCLASS;
}
@Override
protected boolean requiresSupertypeForNonDeclaredIdentifier() {
return false;
public JaccPermissionDefinition(String contextId, String role, String clazz, String actions) {
this.contextId = contextId;
this.role = role;
this.clazz = clazz;
this.actions = actions;
}
}

View File

@ -66,10 +66,6 @@ public AnnotationConfiguration addPackage(String packageName) throws MappingExce
return (AnnotationConfiguration) super.addPackage( packageName );
}
public ExtendedMappings createExtendedMappings() {
return new ExtendedMappingsImpl();
}
@Override
public AnnotationConfiguration addFile(String xmlFile) throws MappingException {
super.addFile( xmlFile );
@ -160,12 +156,6 @@ public AnnotationConfiguration addProperties(Properties extraProperties) {
return this;
}
@Override
public AnnotationConfiguration mergeProperties(Properties properties) {
super.mergeProperties( properties );
return this;
}
@Override
public AnnotationConfiguration setProperty(String propertyName, String value) {
super.setProperty( propertyName, value );
@ -196,50 +186,15 @@ public AnnotationConfiguration configure(File configFile) throws HibernateExcept
return this;
}
@Override
protected AnnotationConfiguration doConfigure(InputStream stream, String resourceName) throws HibernateException {
super.doConfigure( stream, resourceName );
return this;
}
@Override
public AnnotationConfiguration configure(org.w3c.dom.Document document) throws HibernateException {
super.configure( document );
return this;
}
@Override
protected AnnotationConfiguration doConfigure(Document doc) throws HibernateException {
super.doConfigure( doc );
return this;
}
@Override
public AnnotationConfiguration setCacheConcurrencyStrategy(String clazz, String concurrencyStrategy) {
super.setCacheConcurrencyStrategy( clazz, concurrencyStrategy );
return this;
}
@Override
public AnnotationConfiguration setCacheConcurrencyStrategy(String clazz, String concurrencyStrategy, String region) {
super.setCacheConcurrencyStrategy( clazz, concurrencyStrategy, region );
return this;
}
@Override
public AnnotationConfiguration setCollectionCacheConcurrencyStrategy(String collectionRole, String concurrencyStrategy)
throws MappingException {
super.setCollectionCacheConcurrencyStrategy( collectionRole, concurrencyStrategy );
return this;
}
@Override
public AnnotationConfiguration setNamingStrategy(NamingStrategy namingStrategy) {
super.setNamingStrategy( namingStrategy );
return this;
}
@Deprecated
protected class ExtendedMappingsImpl extends MappingsImpl {
}
}

View File

@ -23,10 +23,11 @@
*/
package org.hibernate.cfg.beanvalidation;
import java.util.Map;
import java.util.Set;
import org.hibernate.cfg.Configuration;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.metamodel.spi.MetadataImplementor;
import org.hibernate.service.spi.SessionFactoryServiceRegistry;
/**
@ -48,11 +49,18 @@ public interface ActivationContext {
public Set<ValidationMode> getValidationModes();
/**
* Access the Configuration
* Access the Metadata object (processed mapping information)
*
* @return The Hibernate Configuration object
* @return The Hibernate Metadata object
*/
public Configuration getConfiguration();
public MetadataImplementor getMetadata();
/**
* Access to all settings
*
* @return The settings
*/
public Map getSettings();
/**
* Access the SessionFactory being built to trigger this BV activation

View File

@ -24,6 +24,7 @@
package org.hibernate.cfg.beanvalidation;
import java.util.HashSet;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
@ -79,15 +80,14 @@ public BeanValidationEventListener() {
* @param factory The {@code ValidatorFactory} to use to create {@code Validator} instance(s)
* @param properties Configued properties
*/
public BeanValidationEventListener(ValidatorFactory factory, Properties properties) {
public BeanValidationEventListener(ValidatorFactory factory, Map properties) {
init( factory, properties );
}
public void initialize(Configuration cfg) {
public void initialize(Map settings) {
if ( !initialized ) {
ValidatorFactory factory = Validation.buildDefaultValidatorFactory();
Properties props = cfg.getProperties();
init( factory, props );
init( factory, settings );
}
}
@ -115,7 +115,7 @@ public boolean onPreDelete(PreDeleteEvent event) {
return false;
}
private void init(ValidatorFactory factory, Properties properties) {
private void init(ValidatorFactory factory, Map properties) {
this.factory = factory;
groupsPerOperation = new GroupsPerOperation( properties );
initialized = true;

View File

@ -25,11 +25,13 @@
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Map;
import java.util.Set;
import org.hibernate.HibernateException;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.cfg.Configuration;
import org.hibernate.engine.config.spi.ConfigurationService;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.integrator.spi.Integrator;
import org.hibernate.internal.CoreMessageLogger;
@ -103,8 +105,51 @@ public void integrate(
final Configuration configuration,
final SessionFactoryImplementor sessionFactory,
final SessionFactoryServiceRegistry serviceRegistry) {
throw new HibernateException( "UGH!" );
}
private boolean isBeanValidationApiAvailable(ClassLoaderService classLoaderService) {
try {
classLoaderService.classForName( BV_CHECK_CLASS );
return true;
}
catch (Exception e) {
return false;
}
}
/**
* Used to validate the case when the Bean Validation API is not available.
*
* @param modes The requested validation modes.
*/
private void validateMissingBeanValidationApi(Set<ValidationMode> modes) {
if ( modes.contains( ValidationMode.CALLBACK ) ) {
throw new IntegrationException( "Bean Validation API was not available, but 'callback' validation was requested" );
}
if ( modes.contains( ValidationMode.DDL ) ) {
throw new IntegrationException( "Bean Validation API was not available, but 'ddl' validation was requested" );
}
}
private Class loadTypeSafeActivatorClass(ClassLoaderService classLoaderService) {
try {
return classLoaderService.classForName( ACTIVATOR_CLASS_NAME );
}
catch (Exception e) {
throw new HibernateException( "Unable to load TypeSafeActivator class", e );
}
}
@Override
public void integrate(
final MetadataImplementor metadata,
final SessionFactoryImplementor sessionFactory,
final SessionFactoryServiceRegistry serviceRegistry ) {
final ConfigurationService configurationService = serviceRegistry.getService( ConfigurationService.class );
// IMPL NOTE : see the comments on ActivationContext.getValidationModes() as to why this is multi-valued...
final Set<ValidationMode> modes = ValidationMode.getModes( configuration.getProperties().get( MODE_PROPERTY ) );
final Set<ValidationMode> modes = ValidationMode.getModes( configurationService.getSettings().get( MODE_PROPERTY ) );
if ( modes.size() > 1 ) {
LOG.multipleValidationModes( ValidationMode.loggable( modes ) );
}
@ -129,8 +174,13 @@ public Set<ValidationMode> getValidationModes() {
}
@Override
public Configuration getConfiguration() {
return configuration;
public MetadataImplementor getMetadata() {
return metadata;
}
@Override
public Map getSettings() {
return configurationService.getSettings();
}
@Override
@ -168,48 +218,6 @@ public SessionFactoryServiceRegistry getServiceRegistry() {
}
}
private boolean isBeanValidationApiAvailable(ClassLoaderService classLoaderService) {
try {
classLoaderService.classForName( BV_CHECK_CLASS );
return true;
}
catch (Exception e) {
return false;
}
}
/**
* Used to validate the case when the Bean Validation API is not available.
*
* @param modes The requested validation modes.
*/
private void validateMissingBeanValidationApi(Set<ValidationMode> modes) {
if ( modes.contains( ValidationMode.CALLBACK ) ) {
throw new IntegrationException( "Bean Validation API was not available, but 'callback' validation was requested" );
}
if ( modes.contains( ValidationMode.DDL ) ) {
throw new IntegrationException( "Bean Validation API was not available, but 'ddl' validation was requested" );
}
}
private Class loadTypeSafeActivatorClass(ClassLoaderService classLoaderService) {
try {
return classLoaderService.classForName( ACTIVATOR_CLASS_NAME );
}
catch (Exception e) {
throw new HibernateException( "Unable to load TypeSafeActivator class", e );
}
}
@Override
public void integrate(
MetadataImplementor metadata,
SessionFactoryImplementor sessionFactory,
SessionFactoryServiceRegistry serviceRegistry ) {
}
@Override
public void disintegrate(SessionFactoryImplementor sessionFactory, SessionFactoryServiceRegistry serviceRegistry) {
// nothing to do here afaik

View File

@ -27,7 +27,6 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import javax.validation.groups.Default;
import org.hibernate.HibernateException;
@ -45,14 +44,14 @@ public class GroupsPerOperation {
private Map<Operation, Class<?>[]> groupsPerOperation = new HashMap<Operation, Class<?>[]>(4);
public GroupsPerOperation(Properties properties) {
public GroupsPerOperation(Map properties) {
setGroupsForOperation( Operation.INSERT, properties );
setGroupsForOperation( Operation.UPDATE, properties );
setGroupsForOperation( Operation.DELETE, properties );
setGroupsForOperation( Operation.DDL, properties );
}
private void setGroupsForOperation(Operation operation, Properties properties) {
private void setGroupsForOperation(Operation operation, Map properties) {
Object property = properties.get( operation.getGroupPropertyName() );
Class<?>[] groups;

View File

@ -24,12 +24,10 @@
package org.hibernate.cfg.beanvalidation;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.StringTokenizer;
import javax.validation.Validation;
@ -56,10 +54,13 @@
import org.hibernate.internal.util.config.ConfigurationHelper;
import org.hibernate.mapping.Column;
import org.hibernate.mapping.Component;
import org.hibernate.mapping.PersistentClass;
import org.hibernate.mapping.Property;
import org.hibernate.mapping.Selectable;
import org.hibernate.mapping.SingleTableSubclass;
import org.hibernate.metamodel.spi.binding.AttributeBinding;
import org.hibernate.metamodel.spi.binding.EntityBinding;
import org.hibernate.metamodel.spi.domain.Attribute;
import org.hibernate.metamodel.spi.domain.SingularAttribute;
import org.jboss.logging.Logger;
@ -91,7 +92,7 @@ public static void validateSuppliedFactory(Object object) {
@SuppressWarnings("UnusedDeclaration")
public static void activate(ActivationContext activationContext) {
final Properties properties = activationContext.getConfiguration().getProperties();
final Map properties = activationContext.getSettings();
final ValidatorFactory factory;
try {
factory = getValidatorFactory( properties );
@ -122,13 +123,13 @@ public static void applyCallbackListeners(ValidatorFactory validatorFactory, Act
// de-activate not-null tracking at the core level when Bean Validation is present unless the user explicitly
// asks for it
if ( activationContext.getConfiguration().getProperty( Environment.CHECK_NULLABILITY ) == null ) {
if ( activationContext.getSettings().get( Environment.CHECK_NULLABILITY ) == null ) {
activationContext.getSessionFactory().getSettings().setCheckNullability( false );
}
final BeanValidationEventListener listener = new BeanValidationEventListener(
validatorFactory,
activationContext.getConfiguration().getProperties()
activationContext.getSettings()
);
final EventListenerRegistry listenerRegistry = activationContext.getServiceRegistry()
@ -140,12 +141,12 @@ public static void applyCallbackListeners(ValidatorFactory validatorFactory, Act
listenerRegistry.appendListeners( EventType.PRE_UPDATE, listener );
listenerRegistry.appendListeners( EventType.PRE_DELETE, listener );
listener.initialize( activationContext.getConfiguration() );
listener.initialize( activationContext.getSettings() );
}
@SuppressWarnings({"unchecked", "UnusedParameters"})
private static void applyRelationalConstraints(ValidatorFactory factory, ActivationContext activationContext) {
final Properties properties = activationContext.getConfiguration().getProperties();
final Map properties = activationContext.getSettings();
if ( ! ConfigurationHelper.getBoolean( BeanValidationIntegrator.APPLY_CONSTRAINTS, properties, true ) ){
LOG.debug( "Skipping application of relational constraints from legacy Hibernate Validator" );
return;
@ -156,25 +157,15 @@ private static void applyRelationalConstraints(ValidatorFactory factory, Activat
return;
}
applyRelationalConstraints(
factory,
activationContext.getConfiguration().createMappings().getClasses().values(),
properties,
activationContext.getServiceRegistry().getService( JdbcServices.class ).getDialect()
);
}
final Dialect dialect = activationContext.getServiceRegistry().getService( JdbcServices.class ).getDialect();
@SuppressWarnings( {"UnusedDeclaration"})
public static void applyRelationalConstraints(
ValidatorFactory factory,
Collection<PersistentClass> persistentClasses,
Properties properties,
Dialect dialect) {
Class<?>[] groupsArray = new GroupsPerOperation( properties ).get( GroupsPerOperation.Operation.DDL );
Set<Class<?>> groups = new HashSet<Class<?>>( Arrays.asList( groupsArray ) );
for ( PersistentClass persistentClass : persistentClasses ) {
final String className = persistentClass.getClassName();
// Hardy : I started working on this as part of a big metamodel clean up I am doing, but this got to be too
// much of a rabbit hole. - Steve
for ( EntityBinding entityBinding : activationContext.getMetadata().getEntityBindings() ) {
final String className = entityBinding.getEntity().getClassName();
if ( className == null || className.length() == 0 ) {
continue;
@ -188,7 +179,7 @@ public static void applyRelationalConstraints(
}
try {
applyDDL( "", persistentClass, clazz, factory, groups, true, dialect );
applyDDL( "", entityBinding, clazz, factory, groups, true, dialect );
}
catch (Exception e) {
LOG.unableToApplyConstraints( className, e );
@ -198,7 +189,7 @@ public static void applyRelationalConstraints(
private static void applyDDL(
String prefix,
PersistentClass persistentClass,
EntityBinding entityBinding,
Class<?> clazz,
ValidatorFactory factory,
Set<Class<?>> groups,
@ -208,36 +199,48 @@ private static void applyDDL(
//no bean level constraints can be applied, go to the properties
for ( PropertyDescriptor propertyDesc : descriptor.getConstrainedProperties() ) {
Property property = findPropertyByName( persistentClass, prefix + propertyDesc.getPropertyName() );
AttributeBinding attributeBinding = findPropertyByName(
entityBinding,
prefix + propertyDesc.getPropertyName()
);
boolean hasNotNull;
if ( property != null ) {
if ( attributeBinding != null ) {
hasNotNull = applyConstraints(
propertyDesc.getConstraintDescriptors(), property, propertyDesc, groups, activateNotNull, dialect
propertyDesc.getConstraintDescriptors(),
attributeBinding,
propertyDesc,
groups,
activateNotNull,
dialect
);
if ( property.isComposite() && propertyDesc.isCascaded() ) {
Class<?> componentClass = ( (Component) property.getValue() ).getComponentClass();
/*
* we can apply not null if the upper component let's us activate not null
* and if the property is not null.
* Otherwise, all sub columns should be left nullable
*/
final boolean canSetNotNullOnColumns = activateNotNull && hasNotNull;
applyDDL(
prefix + propertyDesc.getPropertyName() + ".",
persistentClass, componentClass, factory, groups,
canSetNotNullOnColumns,
dialect
);
if ( propertyDesc.isCascaded() ) {
// if it is a composite, visit its attributes
final Attribute attribute = attributeBinding.getAttribute();
if ( attribute.isSingular() ) {
final SingularAttribute singularAttribute = (SingularAttribute) attribute;
if ( singularAttribute.getSingularAttributeType().isAggregate() ) {
final Class componentClass = singularAttribute.getSingularAttributeType().getClassReference();
final boolean canSetNotNullOnColumns = activateNotNull && hasNotNull;
applyDDL(
prefix + propertyDesc.getPropertyName() + ".",
entityBinding,
componentClass,
factory,
groups,
canSetNotNullOnColumns,
dialect
);
}
}
}
//FIXME add collection of components
}
}
}
private static boolean applyConstraints(
Set<ConstraintDescriptor<?>> constraintDescriptors,
Property property,
AttributeBinding property,
PropertyDescriptor propertyDesc,
Set<Class<?>> groups,
boolean canApplyNotNull,
@ -248,20 +251,24 @@ private static boolean applyConstraints(
continue;
}
if ( canApplyNotNull ) {
hasNotNull = hasNotNull || applyNotNull( property, descriptor );
}
// todo : Hardy - here are the rabbit holes...
// first...
// if ( canApplyNotNull ) {
// hasNotNull = hasNotNull || applyNotNull( property, descriptor );
// }
// apply bean validation specific constraints
applyDigits( property, descriptor );
applySize( property, descriptor, propertyDesc );
applyMin( property, descriptor, dialect );
applyMax( property, descriptor, dialect );
// second...
// applyDigits( property, descriptor );
// applySize( property, descriptor, propertyDesc );
// applyMin( property, descriptor, dialect );
// applyMax( property, descriptor, dialect );
// apply hibernate validator specific constraints - we cannot import any HV specific classes though!
// no need to check explicitly for @Range. @Range is a composed constraint using @Min and @Max which
// will be taken care later
applyLength( property, descriptor, propertyDesc );
// third
// applyLength( property, descriptor, propertyDesc );
// pass an empty set as composing constraints inherit the main constraint and thus are matching already
hasNotNull = hasNotNull || applyConstraints(
@ -374,71 +381,79 @@ private static void applyLength(Property property, ConstraintDescriptor<?> descr
}
}
/**
* @param associatedClass
* @param propertyName
* @return the property by path in a recursive way, including IdentifierProperty in the loop if propertyName is
* <code>null</code>. If propertyName is <code>null</code> or empty, the IdentifierProperty is returned
*/
private static Property findPropertyByName(PersistentClass associatedClass, String propertyName) {
Property property = null;
Property idProperty = associatedClass.getIdentifierProperty();
String idName = idProperty != null ? idProperty.getName() : null;
private static AttributeBinding findPropertyByName(EntityBinding entityBinding, String propertyName) {
// Returns the property by path in a recursive way, including IdentifierProperty in the loop
// if propertyName is null. If propertyName is null or empty, the IdentifierProperty is returned
final AttributeBinding idAttributeBinding = entityBinding.getHierarchyDetails().getEntityIdentifier().getAttributeBinding();
final String idAttributeName = idAttributeBinding == null ? null : idAttributeBinding.getAttribute().getName();
AttributeBinding property = null;
try {
if ( propertyName == null
|| propertyName.length() == 0
|| propertyName.equals( idName ) ) {
|| propertyName.equals( idAttributeName ) ) {
//default to id
property = idProperty;
property = idAttributeBinding;
}
else {
if ( propertyName.indexOf( idName + "." ) == 0 ) {
property = idProperty;
propertyName = propertyName.substring( idName.length() + 1 );
if ( propertyName.indexOf( idAttributeName + "." ) == 0 ) {
property = idAttributeBinding;
propertyName = propertyName.substring( idAttributeName.length() + 1 );
}
StringTokenizer st = new StringTokenizer( propertyName, ".", false );
while ( st.hasMoreElements() ) {
String element = (String) st.nextElement();
if ( property == null ) {
property = associatedClass.getProperty( element );
property = entityBinding.locateAttributeBinding( element );
}
else {
if ( !property.isComposite() ) {
if ( !isComposite( property ) ) {
return null;
}
property = ( (Component) property.getValue() ).getProperty( element );
// todo : Hardy - not sure exactly what to do here...
//property = ( (Component) property.getValue() ).getProperty( element );
}
}
}
}
catch ( MappingException e ) {
try {
//if we do not find it try to check the identifier mapper
if ( associatedClass.getIdentifierMapper() == null ) {
return null;
}
StringTokenizer st = new StringTokenizer( propertyName, ".", false );
while ( st.hasMoreElements() ) {
String element = (String) st.nextElement();
if ( property == null ) {
property = associatedClass.getIdentifierMapper().getProperty( element );
}
else {
if ( !property.isComposite() ) {
return null;
}
property = ( (Component) property.getValue() ).getProperty( element );
}
}
}
catch ( MappingException ee ) {
return null;
}
// todo : Hardy - nor here...
// try {
// //if we do not find it try to check the identifier mapper
// if ( associatedClass.getIdentifierMapper() == null ) {
// return null;
// }
// StringTokenizer st = new StringTokenizer( propertyName, ".", false );
// while ( st.hasMoreElements() ) {
// String element = (String) st.nextElement();
// if ( property == null ) {
// property = associatedClass.getIdentifierMapper().getProperty( element );
// }
// else {
// if ( !property.isComposite() ) {
// return null;
// }
// property = ( (Component) property.getValue() ).getProperty( element );
// }
// }
// }
// catch ( MappingException ee ) {
// return null;
// }
}
return property;
}
private static ValidatorFactory getValidatorFactory(Map<Object, Object> properties) {
private static boolean isComposite(AttributeBinding property) {
if ( property.getAttribute().isSingular() ) {
final SingularAttribute singularAttribute = (SingularAttribute) property.getAttribute();
return singularAttribute.getSingularAttributeType().isAggregate();
}
return false;
}
private static ValidatorFactory getValidatorFactory(Map properties) {
ValidatorFactory factory = null;
if ( properties != null ) {
Object unsafeProperty = properties.get( FACTORY_PROPERTY );

View File

@ -1010,7 +1010,6 @@ protected String getDropSequenceString(String sequenceName) throws MappingExcept
* Get the select command used retrieve the names of all sequences.
*
* @return The select command; or null if sequences are not supported.
* @see org.hibernate.tool.hbm2ddl.SchemaUpdate
*/
public String getQuerySequencesString() {
return null;

View File

@ -32,6 +32,8 @@
import org.hibernate.id.enhanced.AccessCallback;
import org.hibernate.id.enhanced.LegacyHiLoAlgorithmOptimizer;
import org.hibernate.internal.util.config.ConfigurationHelper;
import org.hibernate.metamodel.spi.relational.Database;
import org.hibernate.metamodel.spi.relational.Schema;
import org.hibernate.type.Type;
/**

View File

@ -40,10 +40,8 @@
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import javax.naming.Reference;
import javax.naming.StringRefAddr;
import javax.persistence.metamodel.Metamodel;
import org.hibernate.AssertionFailure;
import org.hibernate.Cache;
@ -55,7 +53,6 @@
import org.hibernate.Interceptor;
import org.hibernate.MappingException;
import org.hibernate.MultiTenancyStrategy;
import org.hibernate.ObjectNotFoundException;
import org.hibernate.Session;
import org.hibernate.SessionBuilder;
import org.hibernate.SessionEventListener;
@ -64,10 +61,8 @@
import org.hibernate.StatelessSession;
import org.hibernate.StatelessSessionBuilder;
import org.hibernate.TypeHelper;
import org.hibernate.boot.registry.StandardServiceRegistry;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.boot.registry.classloading.spi.ClassLoadingException;
import org.hibernate.boot.registry.selector.spi.StrategySelector;
import org.hibernate.cache.internal.CacheDataDescriptionImpl;
import org.hibernate.cache.spi.CollectionRegion;
import org.hibernate.cache.spi.EntityRegion;
@ -110,7 +105,6 @@
import org.hibernate.engine.spi.ActionQueue;
import org.hibernate.engine.spi.CacheImplementor;
import org.hibernate.engine.spi.FilterDefinition;
import org.hibernate.engine.spi.Mapping;
import org.hibernate.engine.spi.NamedQueryDefinition;
import org.hibernate.engine.spi.NamedSQLQueryDefinition;
import org.hibernate.engine.spi.SessionBuilderImplementor;
@ -126,16 +120,11 @@
import org.hibernate.id.factory.IdentifierGeneratorFactory;
import org.hibernate.integrator.spi.Integrator;
import org.hibernate.integrator.spi.IntegratorService;
import org.hibernate.jpa.metamodel.internal.JpaMetaModelPopulationSetting;
import org.hibernate.jpa.metamodel.internal.builder.MetamodelBuilder;
import org.hibernate.mapping.Collection;
import org.hibernate.mapping.PersistentClass;
import org.hibernate.mapping.RootClass;
import org.hibernate.metadata.ClassMetadata;
import org.hibernate.metadata.CollectionMetadata;
import org.hibernate.metamodel.spi.MetadataImplementor;
import org.hibernate.metamodel.spi.binding.EntityBinding;
import org.hibernate.metamodel.spi.binding.PluralAttributeBinding;
import org.hibernate.metamodel.spi.MetadataImplementor;
import org.hibernate.persister.collection.CollectionPersister;
import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.persister.entity.Loadable;
@ -143,7 +132,6 @@
import org.hibernate.persister.spi.PersisterFactory;
import org.hibernate.procedure.ProcedureCallMemento;
import org.hibernate.proxy.EntityNotFoundDelegate;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.service.spi.ServiceRegistryImplementor;
import org.hibernate.service.spi.SessionFactoryServiceRegistry;
import org.hibernate.service.spi.SessionFactoryServiceRegistryFactory;
@ -151,12 +139,11 @@
import org.hibernate.stat.spi.StatisticsImplementor;
import org.hibernate.tool.hbm2ddl.ImportSqlCommandExtractor;
import org.hibernate.tool.hbm2ddl.SchemaExport;
import org.hibernate.tool.hbm2ddl.SchemaUpdate;
import org.hibernate.tool.hbm2ddl.SchemaValidator;
import org.hibernate.tuple.entity.EntityTuplizer;
import org.hibernate.type.AssociationType;
import org.hibernate.type.Type;
import org.hibernate.type.TypeResolver;
import org.jboss.logging.Logger;
@ -219,443 +206,442 @@ public final class SessionFactoryImpl
private final transient TypeHelper typeHelper;
private final transient TransactionEnvironment transactionEnvironment;
private final transient SessionFactoryOptions sessionFactoryOptions;
private final transient Metamodel jpaMetamodel;
@SuppressWarnings( {"unchecked", "ThrowableResultOfMethodCallIgnored"})
public SessionFactoryImpl(
final Configuration cfg,
Mapping mapping,
final ServiceRegistry serviceRegistry,
final Settings settings,
final SessionFactoryObserver userObserver) throws HibernateException {
LOG.debug( "Building session factory" );
sessionFactoryOptions = new SessionFactoryOptions() {
private final Interceptor interceptor;
private final CustomEntityDirtinessStrategy customEntityDirtinessStrategy;
private final CurrentTenantIdentifierResolver currentTenantIdentifierResolver;
private final EntityNotFoundDelegate entityNotFoundDelegate;
{
interceptor = cfg.getInterceptor();
customEntityDirtinessStrategy = serviceRegistry.getService( StrategySelector.class ).resolveDefaultableStrategy(
CustomEntityDirtinessStrategy.class,
cfg.getProperties().get( AvailableSettings.CUSTOM_ENTITY_DIRTINESS_STRATEGY ),
DefaultCustomEntityDirtinessStrategy.INSTANCE
);
if ( cfg.getCurrentTenantIdentifierResolver() != null ) {
currentTenantIdentifierResolver = cfg.getCurrentTenantIdentifierResolver();
}
else {
currentTenantIdentifierResolver =serviceRegistry.getService( StrategySelector.class ).resolveStrategy(
CurrentTenantIdentifierResolver.class,
cfg.getProperties().get( AvailableSettings.MULTI_TENANT_IDENTIFIER_RESOLVER )
);
}
if ( cfg.getEntityNotFoundDelegate() != null ) {
entityNotFoundDelegate = cfg.getEntityNotFoundDelegate();
}
else {
entityNotFoundDelegate = new EntityNotFoundDelegate() {
public void handleEntityNotFound(String entityName, Serializable id) {
throw new ObjectNotFoundException( id, entityName );
}
};
}
}
@Override
public StandardServiceRegistry getServiceRegistry() {
return (StandardServiceRegistry) serviceRegistry;
}
@Override
public Interceptor getInterceptor() {
return interceptor;
}
@Override
public CustomEntityDirtinessStrategy getCustomEntityDirtinessStrategy() {
return customEntityDirtinessStrategy;
}
@Override
public CurrentTenantIdentifierResolver getCurrentTenantIdentifierResolver() {
return currentTenantIdentifierResolver;
}
@Override
public SessionFactoryObserver[] getSessionFactoryObservers() {
return userObserver == null
? new SessionFactoryObserver[0]
: new SessionFactoryObserver[] { userObserver };
}
@Override
public EntityNameResolver[] getEntityNameResolvers() {
return new EntityNameResolver[0];
}
@Override
public Settings getSettings() {
return settings;
}
@Override
public EntityNotFoundDelegate getEntityNotFoundDelegate() {
if ( entityNotFoundDelegate == null ) {
}
return entityNotFoundDelegate;
}
};
this.settings = settings;
this.properties = new Properties();
this.properties.putAll( cfg.getProperties() );
this.serviceRegistry = serviceRegistry.getService( SessionFactoryServiceRegistryFactory.class ).buildServiceRegistry(
this,
cfg
);
this.jdbcServices = this.serviceRegistry.getService( JdbcServices.class );
this.dialect = this.jdbcServices.getDialect();
this.cacheAccess = this.serviceRegistry.getService( CacheImplementor.class );
this.sqlFunctionRegistry = new SQLFunctionRegistry( getDialect(), cfg.getSqlFunctions() );
for ( SessionFactoryObserver observer : sessionFactoryOptions.getSessionFactoryObservers() ) {
this.observer.addObserver( observer );
}
this.typeResolver = cfg.getTypeResolver().scope( this );
this.typeHelper = new TypeLocatorImpl( typeResolver );
this.filters = new HashMap<String, FilterDefinition>();
this.filters.putAll( cfg.getFilterDefinitions() );
LOG.debugf( "Session factory constructed with filter configurations : %s", filters );
LOG.debugf( "Instantiating session factory with properties: %s", properties );
this.queryPlanCache = new QueryPlanCache( this );
// todo : everything above here consider implementing as standard SF service. specifically: stats, caches, types, function-reg
class IntegratorObserver implements SessionFactoryObserver {
private ArrayList<Integrator> integrators = new ArrayList<Integrator>();
@Override
public void sessionFactoryCreated(SessionFactory factory) {
}
@Override
public void sessionFactoryClosed(SessionFactory factory) {
for ( Integrator integrator : integrators ) {
integrator.disintegrate( SessionFactoryImpl.this, SessionFactoryImpl.this.serviceRegistry );
}
integrators.clear();
}
}
final IntegratorObserver integratorObserver = new IntegratorObserver();
this.observer.addObserver( integratorObserver );
for ( Integrator integrator : serviceRegistry.getService( IntegratorService.class ).getIntegrators() ) {
integrator.integrate( cfg, this, this.serviceRegistry );
integratorObserver.integrators.add( integrator );
}
//Generators:
identifierGenerators = new HashMap();
Iterator classes = cfg.getClassMappings();
while ( classes.hasNext() ) {
PersistentClass model = (PersistentClass) classes.next();
if ( !model.isInherited() ) {
IdentifierGenerator generator = model.getIdentifier().createIdentifierGenerator(
cfg.getIdentifierGeneratorFactory(),
getDialect(),
settings.getDefaultCatalogName(),
settings.getDefaultSchemaName(),
(RootClass) model
);
identifierGenerators.put( model.getEntityName(), generator );
}
}
imports = new HashMap<String,String>( cfg.getImports() );
///////////////////////////////////////////////////////////////////////
// Prepare persisters and link them up with their cache
// region/access-strategy
final RegionFactory regionFactory = cacheAccess.getRegionFactory();
final String cacheRegionPrefix = settings.getCacheRegionPrefix() == null ? "" : settings.getCacheRegionPrefix() + ".";
final PersisterFactory persisterFactory = serviceRegistry.getService( PersisterFactory.class );
// todo : consider removing this silliness and just have EntityPersister directly implement ClassMetadata
// EntityPersister.getClassMetadata() for the internal impls simply "return this";
// collapsing those would allow us to remove this "extra" Map
//
// todo : similar for CollectionPersister/CollectionMetadata
entityPersisters = new HashMap();
Map entityAccessStrategies = new HashMap();
Map<String,ClassMetadata> classMeta = new HashMap<String,ClassMetadata>();
classes = cfg.getClassMappings();
while ( classes.hasNext() ) {
final PersistentClass model = (PersistentClass) classes.next();
model.prepareTemporaryTables( mapping, getDialect() );
final String cacheRegionName = cacheRegionPrefix + model.getRootClass().getCacheRegionName();
// cache region is defined by the root-class in the hierarchy...
EntityRegionAccessStrategy accessStrategy = ( EntityRegionAccessStrategy ) entityAccessStrategies.get( cacheRegionName );
if ( accessStrategy == null && settings.isSecondLevelCacheEnabled() ) {
final AccessType accessType = AccessType.fromExternalName( model.getCacheConcurrencyStrategy() );
if ( accessType != null ) {
LOG.tracef( "Building shared cache region for entity data [%s]", model.getEntityName() );
EntityRegion entityRegion = regionFactory.buildEntityRegion( cacheRegionName, properties, CacheDataDescriptionImpl.decode( model ) );
accessStrategy = entityRegion.buildAccessStrategy( accessType );
entityAccessStrategies.put( cacheRegionName, accessStrategy );
cacheAccess.addCacheRegion( cacheRegionName, entityRegion );
}
}
NaturalIdRegionAccessStrategy naturalIdAccessStrategy = null;
if ( model.hasNaturalId() && model.getNaturalIdCacheRegionName() != null ) {
final String naturalIdCacheRegionName = cacheRegionPrefix + model.getNaturalIdCacheRegionName();
naturalIdAccessStrategy = ( NaturalIdRegionAccessStrategy ) entityAccessStrategies.get( naturalIdCacheRegionName );
if ( naturalIdAccessStrategy == null && settings.isSecondLevelCacheEnabled() ) {
final CacheDataDescriptionImpl cacheDataDescription = CacheDataDescriptionImpl.decode( model );
NaturalIdRegion naturalIdRegion = null;
try {
naturalIdRegion = regionFactory.buildNaturalIdRegion( naturalIdCacheRegionName, properties,
cacheDataDescription );
}
catch ( UnsupportedOperationException e ) {
LOG.warnf(
"Shared cache region factory [%s] does not support natural id caching; " +
"shared NaturalId caching will be disabled for not be enabled for %s",
regionFactory.getClass().getName(),
model.getEntityName()
);
}
if (naturalIdRegion != null) {
naturalIdAccessStrategy = naturalIdRegion.buildAccessStrategy( regionFactory.getDefaultAccessType() );
entityAccessStrategies.put( naturalIdCacheRegionName, naturalIdAccessStrategy );
cacheAccess.addCacheRegion( naturalIdCacheRegionName, naturalIdRegion );
}
}
}
EntityPersister cp = persisterFactory.createEntityPersister(
model,
accessStrategy,
naturalIdAccessStrategy,
this,
mapping
);
entityPersisters.put( model.getEntityName(), cp );
classMeta.put( model.getEntityName(), cp.getClassMetadata() );
}
this.classMetadata = Collections.unmodifiableMap(classMeta);
Map<String,Set<String>> tmpEntityToCollectionRoleMap = new HashMap<String,Set<String>>();
collectionPersisters = new HashMap<String,CollectionPersister>();
Map<String,CollectionMetadata> tmpCollectionMetadata = new HashMap<String,CollectionMetadata>();
Iterator collections = cfg.getCollectionMappings();
while ( collections.hasNext() ) {
Collection model = (Collection) collections.next();
final String cacheRegionName = cacheRegionPrefix + model.getCacheRegionName();
final AccessType accessType = AccessType.fromExternalName( model.getCacheConcurrencyStrategy() );
CollectionRegionAccessStrategy accessStrategy = null;
if ( accessType != null && settings.isSecondLevelCacheEnabled() ) {
LOG.tracev( "Building shared cache region for collection data [{0}]", model.getRole() );
CollectionRegion collectionRegion = regionFactory.buildCollectionRegion( cacheRegionName, properties, CacheDataDescriptionImpl
.decode( model ) );
accessStrategy = collectionRegion.buildAccessStrategy( accessType );
entityAccessStrategies.put( cacheRegionName, accessStrategy );
cacheAccess.addCacheRegion( cacheRegionName, collectionRegion );
}
CollectionPersister persister = persisterFactory.createCollectionPersister(
cfg,
model,
accessStrategy,
this
) ;
collectionPersisters.put( model.getRole(), persister );
tmpCollectionMetadata.put( model.getRole(), persister.getCollectionMetadata() );
Type indexType = persister.getIndexType();
if ( indexType != null && indexType.isAssociationType() && !indexType.isAnyType() ) {
String entityName = ( ( AssociationType ) indexType ).getAssociatedEntityName( this );
Set roles = tmpEntityToCollectionRoleMap.get( entityName );
if ( roles == null ) {
roles = new HashSet();
tmpEntityToCollectionRoleMap.put( entityName, roles );
}
roles.add( persister.getRole() );
}
Type elementType = persister.getElementType();
if ( elementType.isAssociationType() && !elementType.isAnyType() ) {
String entityName = ( ( AssociationType ) elementType ).getAssociatedEntityName( this );
Set roles = tmpEntityToCollectionRoleMap.get( entityName );
if ( roles == null ) {
roles = new HashSet();
tmpEntityToCollectionRoleMap.put( entityName, roles );
}
roles.add( persister.getRole() );
}
}
collectionMetadata = Collections.unmodifiableMap( tmpCollectionMetadata );
Iterator itr = tmpEntityToCollectionRoleMap.entrySet().iterator();
while ( itr.hasNext() ) {
final Map.Entry entry = ( Map.Entry ) itr.next();
entry.setValue( Collections.unmodifiableSet( ( Set ) entry.getValue() ) );
}
collectionRolesByEntityParticipant = Collections.unmodifiableMap( tmpEntityToCollectionRoleMap );
//Named Queries:
this.namedQueryRepository = new NamedQueryRepository(
cfg.getNamedQueries().values(),
cfg.getNamedSQLQueries().values(),
cfg.getSqlResultSetMappings().values(),
toProcedureCallMementos( cfg.getNamedProcedureCallMap(), cfg.getSqlResultSetMappings() )
);
// after *all* persisters and named queries are registered
for ( EntityPersister persister : entityPersisters.values() ) {
persister.generateEntityDefinition();
}
for ( EntityPersister persister : entityPersisters.values() ) {
persister.postInstantiate();
registerEntityNameResolvers( persister );
}
if ( sessionFactoryOptions.getEntityNameResolvers() != null ) {
for ( EntityNameResolver resolver : sessionFactoryOptions.getEntityNameResolvers() ) {
registerEntityNameResolver( resolver );
}
}
for ( CollectionPersister persister : collectionPersisters.values() ) {
persister.postInstantiate();
}
//JNDI + Serialization:
name = settings.getSessionFactoryName();
try {
uuid = (String) UUID_GENERATOR.generate(null, null);
}
catch (Exception e) {
throw new AssertionFailure("Could not generate UUID");
}
SessionFactoryRegistry.INSTANCE.addSessionFactory(
uuid,
name,
settings.isSessionFactoryNameAlsoJndiName(),
this,
serviceRegistry.getService( JndiService.class )
);
LOG.debug( "Instantiated session factory" );
settings.getMultiTableBulkIdStrategy().prepare(
jdbcServices,
buildLocalConnectionAccess(),
cfg.createMappings(),
cfg.buildMapping(),
properties
);
if ( settings.isAutoCreateSchema() ) {
new SchemaExport( serviceRegistry, cfg )
.setImportSqlCommandExtractor( serviceRegistry.getService( ImportSqlCommandExtractor.class ) )
.create( false, true );
}
if ( settings.isAutoUpdateSchema() ) {
new SchemaUpdate( serviceRegistry, cfg ).execute( false, true );
}
if ( settings.isAutoValidateSchema() ) {
new SchemaValidator( serviceRegistry, cfg ).validate();
}
if ( settings.isAutoDropSchema() ) {
schemaExport = new SchemaExport( serviceRegistry, cfg )
.setImportSqlCommandExtractor( serviceRegistry.getService( ImportSqlCommandExtractor.class ) );
}
currentSessionContext = buildCurrentSessionContext();
//checking for named queries
if ( settings.isNamedQueryStartupCheckingEnabled() ) {
final Map<String,HibernateException> errors = checkNamedQueries();
if ( ! errors.isEmpty() ) {
StringBuilder failingQueries = new StringBuilder( "Errors in named queries: " );
String sep = "";
for ( Map.Entry<String,HibernateException> entry : errors.entrySet() ) {
LOG.namedQueryError( entry.getKey(), entry.getValue() );
failingQueries.append( sep ).append( entry.getKey() );
sep = ", ";
}
throw new HibernateException( failingQueries.toString() );
}
}
// this needs to happen after persisters are all ready to go...
this.fetchProfiles = new HashMap();
itr = cfg.iterateFetchProfiles();
while ( itr.hasNext() ) {
final org.hibernate.mapping.FetchProfile mappingProfile =
( org.hibernate.mapping.FetchProfile ) itr.next();
final FetchProfile fetchProfile = new FetchProfile( mappingProfile.getName() );
for ( org.hibernate.mapping.FetchProfile.Fetch mappingFetch : mappingProfile.getFetches() ) {
// resolve the persister owning the fetch
final String entityName = getImportedClassName( mappingFetch.getEntity() );
final EntityPersister owner = entityName == null
? null
: entityPersisters.get( entityName );
if ( owner == null ) {
throw new HibernateException(
"Unable to resolve entity reference [" + mappingFetch.getEntity()
+ "] in fetch profile [" + fetchProfile.getName() + "]"
);
}
// validate the specified association fetch
Type associationType = owner.getPropertyType( mappingFetch.getAssociation() );
if ( associationType == null || !associationType.isAssociationType() ) {
throw new HibernateException( "Fetch profile [" + fetchProfile.getName() + "] specified an invalid association" );
}
// resolve the style
final Fetch.Style fetchStyle = Fetch.Style.parse( mappingFetch.getStyle() );
// then construct the fetch instance...
fetchProfile.addFetch( new Association( owner, mappingFetch.getAssociation() ), fetchStyle );
((Loadable) owner).registerAffectingFetchProfile( fetchProfile.getName() );
}
fetchProfiles.put( fetchProfile.getName(), fetchProfile );
}
this.transactionEnvironment = new TransactionEnvironmentImpl( this );
this.observer.sessionFactoryCreated( this );
final JpaMetaModelPopulationSetting jpaMetaModelPopulationSetting = determineJpaMetaModelPopulationSetting( cfg );
if ( jpaMetaModelPopulationSetting != JpaMetaModelPopulationSetting.DISABLED ) {
this.jpaMetamodel = org.hibernate.jpa.metamodel.internal.legacy.MetamodelImpl.buildMetamodel(
cfg.getClassMappings(),
this,
jpaMetaModelPopulationSetting == JpaMetaModelPopulationSetting.IGNORE_UNSUPPORTED
);
}
else {
jpaMetamodel = null;
}
}
// @SuppressWarnings( {"unchecked", "ThrowableResultOfMethodCallIgnored"})
// public SessionFactoryImpl(
// final Configuration cfg,
// Mapping mapping,
// final ServiceRegistry serviceRegistry,
// final Settings settings,
// final SessionFactoryObserver userObserver) throws HibernateException {
// LOG.debug( "Building session factory" );
//
// sessionFactoryOptions = new SessionFactoryOptions() {
// private final Interceptor interceptor;
// private final CustomEntityDirtinessStrategy customEntityDirtinessStrategy;
// private final CurrentTenantIdentifierResolver currentTenantIdentifierResolver;
// private final EntityNotFoundDelegate entityNotFoundDelegate;
//
// {
// interceptor = cfg.getInterceptor();
//
// customEntityDirtinessStrategy = serviceRegistry.getService( StrategySelector.class ).resolveDefaultableStrategy(
// CustomEntityDirtinessStrategy.class,
// cfg.getProperties().get( AvailableSettings.CUSTOM_ENTITY_DIRTINESS_STRATEGY ),
// DefaultCustomEntityDirtinessStrategy.INSTANCE
// );
//
// if ( cfg.getCurrentTenantIdentifierResolver() != null ) {
// currentTenantIdentifierResolver = cfg.getCurrentTenantIdentifierResolver();
// }
// else {
// currentTenantIdentifierResolver =serviceRegistry.getService( StrategySelector.class ).resolveStrategy(
// CurrentTenantIdentifierResolver.class,
// cfg.getProperties().get( AvailableSettings.MULTI_TENANT_IDENTIFIER_RESOLVER )
// );
// }
//
// if ( cfg.getEntityNotFoundDelegate() != null ) {
// entityNotFoundDelegate = cfg.getEntityNotFoundDelegate();
// }
// else {
// entityNotFoundDelegate = new EntityNotFoundDelegate() {
// public void handleEntityNotFound(String entityName, Serializable id) {
// throw new ObjectNotFoundException( id, entityName );
// }
// };
// }
// }
//
// @Override
// public StandardServiceRegistry getServiceRegistry() {
// return (StandardServiceRegistry) serviceRegistry;
// }
//
// @Override
// public Interceptor getInterceptor() {
// return interceptor;
// }
//
// @Override
// public CustomEntityDirtinessStrategy getCustomEntityDirtinessStrategy() {
// return customEntityDirtinessStrategy;
// }
//
// @Override
// public CurrentTenantIdentifierResolver getCurrentTenantIdentifierResolver() {
// return currentTenantIdentifierResolver;
// }
//
// @Override
// public SessionFactoryObserver[] getSessionFactoryObservers() {
// return userObserver == null
// ? new SessionFactoryObserver[0]
// : new SessionFactoryObserver[] { userObserver };
// }
//
// @Override
// public EntityNameResolver[] getEntityNameResolvers() {
// return new EntityNameResolver[0];
// }
//
// @Override
// public Settings getSettings() {
// return settings;
// }
//
// @Override
// public EntityNotFoundDelegate getEntityNotFoundDelegate() {
// if ( entityNotFoundDelegate == null ) {
// }
// return entityNotFoundDelegate;
// }
// };
//
// this.settings = settings;
//
// this.properties = new Properties();
// this.properties.putAll( cfg.getProperties() );
//
// this.serviceRegistry = serviceRegistry.getService( SessionFactoryServiceRegistryFactory.class ).buildServiceRegistry(
// this,
// cfg
// );
// this.jdbcServices = this.serviceRegistry.getService( JdbcServices.class );
// this.dialect = this.jdbcServices.getDialect();
// this.cacheAccess = this.serviceRegistry.getService( CacheImplementor.class );
// this.sqlFunctionRegistry = new SQLFunctionRegistry( getDialect(), cfg.getSqlFunctions() );
// for ( SessionFactoryObserver observer : sessionFactoryOptions.getSessionFactoryObservers() ) {
// this.observer.addObserver( observer );
// }
//
// this.typeResolver = cfg.getTypeResolver().scope( this );
// this.typeHelper = new TypeLocatorImpl( typeResolver );
//
// this.filters = new HashMap<String, FilterDefinition>();
// this.filters.putAll( cfg.getFilterDefinitions() );
//
// LOG.debugf( "Session factory constructed with filter configurations : %s", filters );
// LOG.debugf( "Instantiating session factory with properties: %s", properties );
//
//
// this.queryPlanCache = new QueryPlanCache( this );
//
// // todo : everything above here consider implementing as standard SF service. specifically: stats, caches, types, function-reg
//
// class IntegratorObserver implements SessionFactoryObserver {
// private ArrayList<Integrator> integrators = new ArrayList<Integrator>();
//
// @Override
// public void sessionFactoryCreated(SessionFactory factory) {
// }
//
// @Override
// public void sessionFactoryClosed(SessionFactory factory) {
// for ( Integrator integrator : integrators ) {
// integrator.disintegrate( SessionFactoryImpl.this, SessionFactoryImpl.this.serviceRegistry );
// }
// integrators.clear();
// }
// }
//
// final IntegratorObserver integratorObserver = new IntegratorObserver();
// this.observer.addObserver( integratorObserver );
// for ( Integrator integrator : serviceRegistry.getService( IntegratorService.class ).getIntegrators() ) {
// integrator.integrate( cfg, this, this.serviceRegistry );
// integratorObserver.integrators.add( integrator );
// }
//
// //Generators:
//
// identifierGenerators = new HashMap();
// Iterator classes = cfg.getClassMappings();
// while ( classes.hasNext() ) {
// PersistentClass model = (PersistentClass) classes.next();
// if ( !model.isInherited() ) {
// IdentifierGenerator generator = model.getIdentifier().createIdentifierGenerator(
// cfg.getIdentifierGeneratorFactory(),
// getDialect(),
// settings.getDefaultCatalogName(),
// settings.getDefaultSchemaName(),
// (RootClass) model
// );
// identifierGenerators.put( model.getEntityName(), generator );
// }
// }
//
// imports = new HashMap<String,String>( cfg.getImports() );
//
// ///////////////////////////////////////////////////////////////////////
// // Prepare persisters and link them up with their cache
// // region/access-strategy
//
// final RegionFactory regionFactory = cacheAccess.getRegionFactory();
// final String cacheRegionPrefix = settings.getCacheRegionPrefix() == null ? "" : settings.getCacheRegionPrefix() + ".";
// final PersisterFactory persisterFactory = serviceRegistry.getService( PersisterFactory.class );
//
// // todo : consider removing this silliness and just have EntityPersister directly implement ClassMetadata
// // EntityPersister.getClassMetadata() for the internal impls simply "return this";
// // collapsing those would allow us to remove this "extra" Map
// //
// // todo : similar for CollectionPersister/CollectionMetadata
//
// entityPersisters = new HashMap();
// Map entityAccessStrategies = new HashMap();
// Map<String,ClassMetadata> classMeta = new HashMap<String,ClassMetadata>();
// classes = cfg.getClassMappings();
// while ( classes.hasNext() ) {
// final PersistentClass model = (PersistentClass) classes.next();
// model.prepareTemporaryTables( mapping, getDialect() );
// final String cacheRegionName = cacheRegionPrefix + model.getRootClass().getCacheRegionName();
// // cache region is defined by the root-class in the hierarchy...
// EntityRegionAccessStrategy accessStrategy = ( EntityRegionAccessStrategy ) entityAccessStrategies.get( cacheRegionName );
// if ( accessStrategy == null && settings.isSecondLevelCacheEnabled() ) {
// final AccessType accessType = AccessType.fromExternalName( model.getCacheConcurrencyStrategy() );
// if ( accessType != null ) {
// LOG.tracef( "Building shared cache region for entity data [%s]", model.getEntityName() );
// EntityRegion entityRegion = regionFactory.buildEntityRegion( cacheRegionName, properties, CacheDataDescriptionImpl.decode( model ) );
// accessStrategy = entityRegion.buildAccessStrategy( accessType );
// entityAccessStrategies.put( cacheRegionName, accessStrategy );
// cacheAccess.addCacheRegion( cacheRegionName, entityRegion );
// }
// }
//
// NaturalIdRegionAccessStrategy naturalIdAccessStrategy = null;
// if ( model.hasNaturalId() && model.getNaturalIdCacheRegionName() != null ) {
// final String naturalIdCacheRegionName = cacheRegionPrefix + model.getNaturalIdCacheRegionName();
// naturalIdAccessStrategy = ( NaturalIdRegionAccessStrategy ) entityAccessStrategies.get( naturalIdCacheRegionName );
//
// if ( naturalIdAccessStrategy == null && settings.isSecondLevelCacheEnabled() ) {
// final CacheDataDescriptionImpl cacheDataDescription = CacheDataDescriptionImpl.decode( model );
//
// NaturalIdRegion naturalIdRegion = null;
// try {
// naturalIdRegion = regionFactory.buildNaturalIdRegion( naturalIdCacheRegionName, properties,
// cacheDataDescription );
// }
// catch ( UnsupportedOperationException e ) {
// LOG.warnf(
// "Shared cache region factory [%s] does not support natural id caching; " +
// "shared NaturalId caching will be disabled for not be enabled for %s",
// regionFactory.getClass().getName(),
// model.getEntityName()
// );
// }
//
// if (naturalIdRegion != null) {
// naturalIdAccessStrategy = naturalIdRegion.buildAccessStrategy( regionFactory.getDefaultAccessType() );
// entityAccessStrategies.put( naturalIdCacheRegionName, naturalIdAccessStrategy );
// cacheAccess.addCacheRegion( naturalIdCacheRegionName, naturalIdRegion );
// }
// }
// }
//
// EntityPersister cp = persisterFactory.createEntityPersister(
// model,
// accessStrategy,
// naturalIdAccessStrategy,
// this,
// mapping
// );
// entityPersisters.put( model.getEntityName(), cp );
// classMeta.put( model.getEntityName(), cp.getClassMetadata() );
// }
// this.classMetadata = Collections.unmodifiableMap(classMeta);
//
// Map<String,Set<String>> tmpEntityToCollectionRoleMap = new HashMap<String,Set<String>>();
// collectionPersisters = new HashMap<String,CollectionPersister>();
// Map<String,CollectionMetadata> tmpCollectionMetadata = new HashMap<String,CollectionMetadata>();
// Iterator collections = cfg.getCollectionMappings();
// while ( collections.hasNext() ) {
// Collection model = (Collection) collections.next();
// final String cacheRegionName = cacheRegionPrefix + model.getCacheRegionName();
// final AccessType accessType = AccessType.fromExternalName( model.getCacheConcurrencyStrategy() );
// CollectionRegionAccessStrategy accessStrategy = null;
// if ( accessType != null && settings.isSecondLevelCacheEnabled() ) {
// LOG.tracev( "Building shared cache region for collection data [{0}]", model.getRole() );
// CollectionRegion collectionRegion = regionFactory.buildCollectionRegion( cacheRegionName, properties, CacheDataDescriptionImpl
// .decode( model ) );
// accessStrategy = collectionRegion.buildAccessStrategy( accessType );
// entityAccessStrategies.put( cacheRegionName, accessStrategy );
// cacheAccess.addCacheRegion( cacheRegionName, collectionRegion );
// }
// CollectionPersister persister = persisterFactory.createCollectionPersister(
// cfg,
// model,
// accessStrategy,
// this
// ) ;
// collectionPersisters.put( model.getRole(), persister );
// tmpCollectionMetadata.put( model.getRole(), persister.getCollectionMetadata() );
// Type indexType = persister.getIndexType();
// if ( indexType != null && indexType.isAssociationType() && !indexType.isAnyType() ) {
// String entityName = ( ( AssociationType ) indexType ).getAssociatedEntityName( this );
// Set roles = tmpEntityToCollectionRoleMap.get( entityName );
// if ( roles == null ) {
// roles = new HashSet();
// tmpEntityToCollectionRoleMap.put( entityName, roles );
// }
// roles.add( persister.getRole() );
// }
// Type elementType = persister.getElementType();
// if ( elementType.isAssociationType() && !elementType.isAnyType() ) {
// String entityName = ( ( AssociationType ) elementType ).getAssociatedEntityName( this );
// Set roles = tmpEntityToCollectionRoleMap.get( entityName );
// if ( roles == null ) {
// roles = new HashSet();
// tmpEntityToCollectionRoleMap.put( entityName, roles );
// }
// roles.add( persister.getRole() );
// }
// }
// collectionMetadata = Collections.unmodifiableMap( tmpCollectionMetadata );
// Iterator itr = tmpEntityToCollectionRoleMap.entrySet().iterator();
// while ( itr.hasNext() ) {
// final Map.Entry entry = ( Map.Entry ) itr.next();
// entry.setValue( Collections.unmodifiableSet( ( Set ) entry.getValue() ) );
// }
// collectionRolesByEntityParticipant = Collections.unmodifiableMap( tmpEntityToCollectionRoleMap );
//
// //Named Queries:
// this.namedQueryRepository = new NamedQueryRepository(
// cfg.getNamedQueries().values(),
// cfg.getNamedSQLQueries().values(),
// cfg.getSqlResultSetMappings().values(),
// toProcedureCallMementos( cfg.getNamedProcedureCallMap(), cfg.getSqlResultSetMappings() )
// );
//
// // after *all* persisters and named queries are registered
// for ( EntityPersister persister : entityPersisters.values() ) {
// persister.generateEntityDefinition();
// }
//
// for ( EntityPersister persister : entityPersisters.values() ) {
// persister.postInstantiate();
// registerEntityNameResolvers( persister );
// }
// if ( sessionFactoryOptions.getEntityNameResolvers() != null ) {
// for ( EntityNameResolver resolver : sessionFactoryOptions.getEntityNameResolvers() ) {
// registerEntityNameResolver( resolver );
// }
// }
//
// for ( CollectionPersister persister : collectionPersisters.values() ) {
// persister.postInstantiate();
// }
//
// //JNDI + Serialization:
//
// name = settings.getSessionFactoryName();
// try {
// uuid = (String) UUID_GENERATOR.generate(null, null);
// }
// catch (Exception e) {
// throw new AssertionFailure("Could not generate UUID");
// }
// SessionFactoryRegistry.INSTANCE.addSessionFactory(
// uuid,
// name,
// settings.isSessionFactoryNameAlsoJndiName(),
// this,
// serviceRegistry.getService( JndiService.class )
// );
//
// LOG.debug( "Instantiated session factory" );
//
// settings.getMultiTableBulkIdStrategy().prepare(
// jdbcServices,
// buildLocalConnectionAccess(),
// cfg.createMappings(),
// cfg.buildMapping(),
// properties
// );
//
//
// if ( settings.isAutoCreateSchema() ) {
// new SchemaExport( serviceRegistry, cfg )
// .setImportSqlCommandExtractor( serviceRegistry.getService( ImportSqlCommandExtractor.class ) )
// .create( false, true );
// }
// if ( settings.isAutoUpdateSchema() ) {
// new SchemaUpdate( serviceRegistry, cfg ).execute( false, true );
// }
// if ( settings.isAutoValidateSchema() ) {
// new SchemaValidator( serviceRegistry, cfg ).validate();
// }
// if ( settings.isAutoDropSchema() ) {
// schemaExport = new SchemaExport( serviceRegistry, cfg )
// .setImportSqlCommandExtractor( serviceRegistry.getService( ImportSqlCommandExtractor.class ) );
// }
//
// currentSessionContext = buildCurrentSessionContext();
//
// //checking for named queries
// if ( settings.isNamedQueryStartupCheckingEnabled() ) {
// final Map<String,HibernateException> errors = checkNamedQueries();
// if ( ! errors.isEmpty() ) {
// StringBuilder failingQueries = new StringBuilder( "Errors in named queries: " );
// String sep = "";
// for ( Map.Entry<String,HibernateException> entry : errors.entrySet() ) {
// LOG.namedQueryError( entry.getKey(), entry.getValue() );
// failingQueries.append( sep ).append( entry.getKey() );
// sep = ", ";
// }
// throw new HibernateException( failingQueries.toString() );
// }
// }
//
// // this needs to happen after persisters are all ready to go...
// this.fetchProfiles = new HashMap();
// itr = cfg.iterateFetchProfiles();
// while ( itr.hasNext() ) {
// final org.hibernate.mapping.FetchProfile mappingProfile =
// ( org.hibernate.mapping.FetchProfile ) itr.next();
// final FetchProfile fetchProfile = new FetchProfile( mappingProfile.getName() );
// for ( org.hibernate.mapping.FetchProfile.Fetch mappingFetch : mappingProfile.getFetches() ) {
// // resolve the persister owning the fetch
// final String entityName = getImportedClassName( mappingFetch.getEntity() );
// final EntityPersister owner = entityName == null
// ? null
// : entityPersisters.get( entityName );
// if ( owner == null ) {
// throw new HibernateException(
// "Unable to resolve entity reference [" + mappingFetch.getEntity()
// + "] in fetch profile [" + fetchProfile.getName() + "]"
// );
// }
//
// // validate the specified association fetch
// Type associationType = owner.getPropertyType( mappingFetch.getAssociation() );
// if ( associationType == null || !associationType.isAssociationType() ) {
// throw new HibernateException( "Fetch profile [" + fetchProfile.getName() + "] specified an invalid association" );
// }
//
// // resolve the style
// final Fetch.Style fetchStyle = Fetch.Style.parse( mappingFetch.getStyle() );
//
// // then construct the fetch instance...
// fetchProfile.addFetch( new Association( owner, mappingFetch.getAssociation() ), fetchStyle );
// ((Loadable) owner).registerAffectingFetchProfile( fetchProfile.getName() );
// }
// fetchProfiles.put( fetchProfile.getName(), fetchProfile );
// }
//
// this.transactionEnvironment = new TransactionEnvironmentImpl( this );
// this.observer.sessionFactoryCreated( this );
//
// final JpaMetaModelPopulationSetting jpaMetaModelPopulationSetting = determineJpaMetaModelPopulationSetting( cfg );
// if ( jpaMetaModelPopulationSetting != JpaMetaModelPopulationSetting.DISABLED ) {
// this.jpaMetamodel = org.hibernate.jpa.metamodel.internal.legacy.MetamodelImpl.buildMetamodel(
// cfg.getClassMappings(),
// this,
// jpaMetaModelPopulationSetting == JpaMetaModelPopulationSetting.IGNORE_UNSUPPORTED
// );
// }
// else {
// jpaMetamodel = null;
// }
// }
private Map<String, ProcedureCallMemento> toProcedureCallMementos(
Map<String, NamedProcedureCallDefinition> definitions,
@ -695,12 +681,6 @@ public boolean supportsAggressiveRelease() {
};
}
protected JpaMetaModelPopulationSetting determineJpaMetaModelPopulationSetting(Configuration cfg) {
final String setting = cfg.getProperties().getProperty( AvailableSettings.JPA_METAMODEL_POPULATION );
return JpaMetaModelPopulationSetting.parse( setting );
}
@SuppressWarnings( {"ThrowableResultOfMethodCallIgnored"})
public SessionFactoryImpl(
MetadataImplementor metadata,
@ -793,13 +773,6 @@ public void sessionFactoryClosed(SessionFactory factory) {
// Prepare persisters and link them up with their cache
// region/access-strategy
final MetamodelBuilder jpaMetamodelBuilder = new MetamodelBuilder(
this,
JpaMetaModelPopulationSetting.parse(
properties.getProperty( AvailableSettings.JPA_METAMODEL_POPULATION )
)
);
StringBuilder stringBuilder = new StringBuilder();
if ( settings.getCacheRegionPrefix() != null) {
stringBuilder
@ -878,7 +851,6 @@ public void sessionFactoryClosed(SessionFactory factory) {
);
entityPersisters.put( model.getEntity().getName(), cp );
classMeta.put( model.getEntity().getName(), cp.getClassMetadata() );
jpaMetamodelBuilder.add( model );
}
this.classMetadata = Collections.unmodifiableMap(classMeta);
@ -1086,8 +1058,6 @@ public void sessionFactoryClosed(SessionFactory factory) {
this.transactionEnvironment = new TransactionEnvironmentImpl( this );
this.observer.sessionFactoryCreated( this );
this.jpaMetamodel = jpaMetamodelBuilder.buildMetamodel();
}
@SuppressWarnings( {"unchecked"} )

View File

@ -24,10 +24,7 @@
package org.hibernate.internal.util.xml;
import java.io.IOException;
import java.io.InputStream;
import java.io.StringReader;
import java.net.URL;
import javax.xml.XMLConstants;
import javax.xml.namespace.QName;
import javax.xml.stream.XMLEventReader;
import javax.xml.stream.XMLInputFactory;
@ -36,13 +33,15 @@
import javax.xml.stream.events.Attribute;
import javax.xml.stream.events.XMLEvent;
import javax.xml.transform.stax.StAXSource;
import javax.xml.transform.stream.StreamSource;
import javax.xml.validation.Schema;
import javax.xml.validation.SchemaFactory;
import javax.xml.validation.Validator;
import org.hibernate.InvalidMappingException;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.xml.internal.stax.BufferedXMLEventReader;
import org.hibernate.xml.internal.stax.FilteringXMLEventReader;
import org.hibernate.xml.internal.stax.LocalXmlResourceResolver;
import org.hibernate.xml.internal.stax.SupportedOrmXsdVersion;
import org.hibernate.xml.internal.stax.XmlInfrastructureException;
import org.jboss.logging.Logger;
@ -221,92 +220,6 @@ private void validateMapping(SupportedOrmXsdVersion xsdVersion, XMLEventReader s
}
}
public static enum SupportedOrmXsdVersion {
ORM_1_0( "org/hibernate/jpa/orm_1_0.xsd" ),
ORM_2_0( "org/hibernate/jpa/orm_2_0.xsd" ),
ORM_2_1( "org/hibernate/jpa/orm_2_1.xsd" ),
HBM_4_0( "org/hibernate/hibernate-mapping-4.0.xsd");
private final String schemaResourceName;
private SupportedOrmXsdVersion(String schemaResourceName) {
this.schemaResourceName = schemaResourceName;
}
public static SupportedOrmXsdVersion parse(String name, Origin origin) {
if ( "1.0".equals( name ) ) {
return ORM_1_0;
}
else if ( "2.0".equals( name ) ) {
return ORM_2_0;
}
else if ( "2.1".equals( name ) ) {
return ORM_2_1;
}
else if ( "4.0".equals( name ) ) {
return HBM_4_0;
}
throw new UnsupportedOrmXsdVersionException( name, origin );
}
private URL schemaUrl;
public URL getSchemaUrl() {
if ( schemaUrl == null ) {
schemaUrl = resolveLocalSchemaUrl( schemaResourceName );
}
return schemaUrl;
}
private Schema schema;
public Schema getSchema() {
if ( schema == null ) {
schema = resolveLocalSchema( getSchemaUrl() );
}
return schema;
}
}
public static URL resolveLocalSchemaUrl(String schemaName) {
URL url = MappingReader.class.getClassLoader().getResource( schemaName );
if ( url == null ) {
throw new XmlInfrastructureException( "Unable to locate schema [" + schemaName + "] via classpath" );
}
return url;
}
public static Schema resolveLocalSchema(String schemaName){
return resolveLocalSchema( resolveLocalSchemaUrl( schemaName ) );
}
public static Schema resolveLocalSchema(URL schemaUrl) {
try {
InputStream schemaStream = schemaUrl.openStream();
try {
StreamSource source = new StreamSource(schemaUrl.openStream());
SchemaFactory schemaFactory = SchemaFactory.newInstance( XMLConstants.W3C_XML_SCHEMA_NS_URI );
return schemaFactory.newSchema(source);
}
catch ( Exception e ) {
throw new XmlInfrastructureException( "Unable to load schema [" + schemaUrl.toExternalForm() + "]", e );
}
finally {
try {
schemaStream.close();
}
catch ( IOException e ) {
LOG.debugf( "Problem closing schema stream - %s", e.toString() );
}
}
}
catch ( IOException e ) {
throw new XmlInfrastructureException( "Stream error handling schema url [" + schemaUrl.toExternalForm() + "]" );
}
}
public XmlDocument readMappingDocument(EntityResolver entityResolver, InputSource source, Origin origin) {
return legacyReadMappingDocument( entityResolver, source, origin );

View File

@ -1,131 +0,0 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2009 by Red Hat Inc and/or its affiliates or by
* third-party contributors as indicated by either @author tags or express
* copyright attribution statements applied by the authors. All
* third-party contributions are distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.metamodel.internal;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.lang.reflect.Member;
import java.lang.reflect.Method;
import javax.persistence.metamodel.Attribute;
import javax.persistence.metamodel.ManagedType;
import org.hibernate.internal.util.ReflectHelper;
/**
* Models the commonality of the JPA {@link Attribute} hierarchy.
*
* @author Steve Ebersole
*/
public abstract class AbstractAttribute<X, Y> implements Attribute<X, Y>, Serializable {
private final String name;
private final Class<Y> javaType;
private final AbstractManagedType<X> declaringType;
private transient Member member;
private final PersistentAttributeType persistentAttributeType;
public AbstractAttribute(
String name,
Class<Y> javaType,
AbstractManagedType<X> declaringType,
Member member,
PersistentAttributeType persistentAttributeType) {
this.name = name;
this.javaType = javaType;
this.declaringType = declaringType;
this.member = member;
this.persistentAttributeType = persistentAttributeType;
}
@Override
public String getName() {
return name;
}
@Override
public ManagedType<X> getDeclaringType() {
return declaringType;
}
@Override
public Class<Y> getJavaType() {
return javaType;
}
@Override
public Member getJavaMember() {
return member;
}
@Override
public PersistentAttributeType getPersistentAttributeType() {
return persistentAttributeType;
}
/**
* Used by JDK serialization...
*
* @param oos The output stream to which we are being written...
* @throws IOException Indicates a general IO stream exception
*/
protected void writeObject(ObjectOutputStream oos) throws IOException {
oos.defaultWriteObject();
oos.writeObject( getJavaMember().getDeclaringClass().getName() );
oos.writeObject( getJavaMember().getName() );
// should only ever be a field or the getter-method...
oos.writeObject( Method.class.isInstance( getJavaMember() ) ? "method" : "field" );
}
/**
* Used by JDK serialization...
*
* @param ois The input stream from which we are being read...
* @throws java.io.IOException Indicates a general IO stream exception
* @throws ClassNotFoundException Indicates a class resolution issue
*/
protected void readObject(ObjectInputStream ois) throws IOException, ClassNotFoundException {
ois.defaultReadObject();
final String memberDeclaringClassName = ( String ) ois.readObject();
final String memberName = ( String ) ois.readObject();
final String memberType = ( String ) ois.readObject();
final Class memberDeclaringClass = Class.forName(
memberDeclaringClassName,
false,
declaringType.getJavaType().getClassLoader()
);
try {
this.member = "method".equals( memberType )
? memberDeclaringClass.getMethod( memberName, ReflectHelper.NO_PARAM_SIGNATURE )
: memberDeclaringClass.getField( memberName );
}
catch ( Exception e ) {
throw new IllegalStateException(
"Unable to locate member [" + memberDeclaringClassName + "#"
+ memberName + "]"
);
}
}
}

View File

@ -1,302 +0,0 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2009 by Red Hat Inc and/or its affiliates or by
* third-party contributors as indicated by either @author tags or express
* copyright attribution statements applied by the authors. All
* third-party contributions are distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.metamodel.internal;
import java.io.Serializable;
import java.util.HashSet;
import java.util.Set;
import javax.persistence.metamodel.Attribute;
import javax.persistence.metamodel.IdentifiableType;
import javax.persistence.metamodel.SingularAttribute;
import javax.persistence.metamodel.Type;
/**
* Defines commonality for the JPA {@link IdentifiableType} types. JPA defines
* identifiable types as entities or mapped-superclasses. Basically things to which an
* identifier can be attached.
* <p/>
* NOTE : Currently we only really have support for direct entities in the Hibernate metamodel
* as the information for them is consumed into the closest actual entity subclass(es) in the
* internal Hibernate mapping-metamodel.
*
* @author Steve Ebersole
*/
public abstract class AbstractIdentifiableType<X>
extends AbstractManagedType<X>
implements IdentifiableType<X>, Serializable {
private final boolean hasIdentifierProperty;
private final boolean isVersioned;
private SingularAttributeImpl<X, ?> id;
private SingularAttributeImpl<X, ?> version;
private Set<SingularAttribute<? super X,?>> idClassAttributes;
public AbstractIdentifiableType(
Class<X> javaType,
AbstractIdentifiableType<? super X> superType,
boolean hasIdentifierProperty,
boolean versioned) {
super( javaType, superType );
this.hasIdentifierProperty = hasIdentifierProperty;
isVersioned = versioned;
}
@Override
public AbstractIdentifiableType<? super X> getSupertype() {
return ( AbstractIdentifiableType<? super X> ) super.getSupertype();
}
/**
* Indicates if a non-null super type is required to provide the
* identifier attribute(s) if this object does not have a declared
* identifier.
* .
* @return true, if a non-null super type is required to provide
* the identifier attribute(s) if this object does not have a
* declared identifier; false, otherwise.
*/
protected abstract boolean requiresSupertypeForNonDeclaredIdentifier();
protected AbstractIdentifiableType<? super X> requireSupertype() {
if ( getSupertype() == null ) {
throw new IllegalStateException( "No supertype found" );
}
return getSupertype();
}
@Override
public boolean hasSingleIdAttribute() {
return hasIdentifierProperty;
}
@Override
@SuppressWarnings({ "unchecked" })
public <Y> SingularAttribute<? super X, Y> getId(Class<Y> javaType) {
final SingularAttribute<? super X, Y> id_;
if ( id != null ) {
checkSimpleId();
id_ = ( SingularAttribute<? super X, Y> ) id;
if ( javaType != id.getJavaType() ) {
throw new IllegalArgumentException( "Id attribute was not of specified type : " + javaType.getName() );
}
}
else {
//yuk yuk bad me
if ( ! requiresSupertypeForNonDeclaredIdentifier()) {
final AbstractIdentifiableType<? super X> supertype = getSupertype();
if (supertype != null) {
id_ = supertype.getId( javaType );
}
else {
id_ = null;
}
}
else {
id_ = requireSupertype().getId( javaType );
}
}
return id_;
}
/**
* Centralized check to ensure the id for this hierarchy is a simple one (i.e., does not use
* an id-class).
*
* @see #checkIdClass()
*/
protected void checkSimpleId() {
if ( ! hasIdentifierProperty ) {
throw new IllegalStateException( "This class uses an @IdClass" );
}
}
@Override
@SuppressWarnings({ "unchecked" })
public <Y> SingularAttribute<X, Y> getDeclaredId(Class<Y> javaType) {
checkDeclaredId();
checkSimpleId();
if ( javaType != id.getJavaType() ) {
throw new IllegalArgumentException( "Id attribute was not of specified type : " + javaType.getName() );
}
return (SingularAttribute<X, Y>) id;
}
/**
* Centralized check to ensure the id is actually declared on the class mapped here, as opposed to a
* super class.
*/
protected void checkDeclaredId() {
if ( id == null ) {
throw new IllegalArgumentException( "The id attribute is not declared on this type" );
}
}
@Override
public Type<?> getIdType() {
if ( id != null ) {
checkSimpleId();
return id.getType();
}
else {
return requireSupertype().getIdType();
}
}
private boolean hasIdClassAttributesDefined() {
return idClassAttributes != null ||
( getSupertype() != null && getSupertype().hasIdClassAttributesDefined() );
}
@Override
public Set<SingularAttribute<? super X, ?>> getIdClassAttributes() {
if ( idClassAttributes != null ) {
checkIdClass();
}
else {
// Java does not allow casting requireSupertype().getIdClassAttributes()
// to Set<SingularAttribute<? super X, ?>> because the
// superclass X is a different Java type from this X
// (i.e, getSupertype().getJavaType() != getJavaType()).
// It will, however, allow a Set<SingularAttribute<? super X, ?>>
// to be initialized with requireSupertype().getIdClassAttributes(),
// since getSupertype().getJavaType() is a superclass of getJavaType()
if ( requiresSupertypeForNonDeclaredIdentifier() ) {
idClassAttributes = new HashSet<SingularAttribute<? super X, ?>>( requireSupertype().getIdClassAttributes() );
}
else if ( getSupertype() != null && hasIdClassAttributesDefined() ) {
idClassAttributes = new HashSet<SingularAttribute<? super X, ?>>( getSupertype().getIdClassAttributes() );
}
}
return idClassAttributes;
}
/**
* Centralized check to ensure the id for this hierarchy uses an id-class.
*
* @see #checkSimpleId()
*/
private void checkIdClass() {
if ( hasIdentifierProperty ) {
throw new IllegalArgumentException( "This class does not use @IdClass" );
}
}
@Override
public boolean hasVersionAttribute() {
return isVersioned;
}
public boolean hasDeclaredVersionAttribute() {
return isVersioned && version != null;
}
@Override
@SuppressWarnings({ "unchecked" })
public <Y> SingularAttribute<? super X, Y> getVersion(Class<Y> javaType) {
if ( ! hasVersionAttribute() ) {
return null;
}
final SingularAttribute<? super X, Y> version_;
if ( version != null ) {
version_ = ( SingularAttribute<? super X, Y> ) version;
if ( javaType != version.getJavaType() ) {
throw new IllegalArgumentException( "Version attribute was not of specified type : " + javaType.getName() );
}
}
else {
version_ = requireSupertype().getVersion( javaType );
}
return version_;
}
@Override
@SuppressWarnings({ "unchecked" })
public <Y> SingularAttribute<X, Y> getDeclaredVersion(Class<Y> javaType) {
checkDeclaredVersion();
if ( javaType != version.getJavaType() ) {
throw new IllegalArgumentException( "Version attribute was not of specified type : " + javaType.getName() );
}
return ( SingularAttribute<X, Y> ) version;
}
/**
* For used to retrieve the declared version when populating the static metamodel.
*
* @return The declared
*/
public SingularAttribute<X, ?> getDeclaredVersion() {
checkDeclaredVersion();
return version;
}
/**
* Centralized check to ensure the version (if one) is actually declared on the class mapped here, as opposed to a
* super class.
*/
protected void checkDeclaredVersion() {
if ( version == null || ( getSupertype() != null && getSupertype().hasVersionAttribute() )) {
throw new IllegalArgumentException( "The version attribute is not declared on this type" );
}
}
@Override
public Builder<X> getBuilder() {
final AbstractManagedType.Builder<X> managedBuilder = super.getBuilder();
return new Builder<X>() {
@Override
public void applyIdAttribute(SingularAttributeImpl<X, ?> idAttribute) {
AbstractIdentifiableType.this.id = idAttribute;
managedBuilder.addAttribute( idAttribute );
}
@Override
public void applyIdClassAttributes(Set<SingularAttribute<? super X,?>> idClassAttributes) {
for ( SingularAttribute<? super X,?> idClassAttribute : idClassAttributes ) {
if ( AbstractIdentifiableType.this == idClassAttribute.getDeclaringType() ) {
@SuppressWarnings({ "unchecked" })
SingularAttribute<X,?> declaredAttribute = ( SingularAttribute<X,?> ) idClassAttribute;
addAttribute( declaredAttribute );
}
}
AbstractIdentifiableType.this.idClassAttributes = idClassAttributes;
}
@Override
public void applyVersionAttribute(SingularAttributeImpl<X, ?> versionAttribute) {
AbstractIdentifiableType.this.version = versionAttribute;
managedBuilder.addAttribute( versionAttribute );
}
@Override
public void addAttribute(Attribute<X, ?> attribute) {
managedBuilder.addAttribute( attribute );
}
};
}
public static interface Builder<X> extends AbstractManagedType.Builder<X> {
public void applyIdAttribute(SingularAttributeImpl<X,?> idAttribute);
public void applyIdClassAttributes(Set<SingularAttribute<? super X,?>> idClassAttributes);
public void applyVersionAttribute(SingularAttributeImpl<X,?> versionAttribute);
}
}

View File

@ -1,480 +0,0 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2009 by Red Hat Inc and/or its affiliates or by
* third-party contributors as indicated by either @author tags or express
* copyright attribution statements applied by the authors. All
* third-party contributions are distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.metamodel.internal;
import java.io.Serializable;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import javax.persistence.metamodel.Attribute;
import javax.persistence.metamodel.Bindable;
import javax.persistence.metamodel.CollectionAttribute;
import javax.persistence.metamodel.ListAttribute;
import javax.persistence.metamodel.ManagedType;
import javax.persistence.metamodel.MapAttribute;
import javax.persistence.metamodel.PluralAttribute;
import javax.persistence.metamodel.SetAttribute;
import javax.persistence.metamodel.SingularAttribute;
import org.hibernate.annotations.common.AssertionFailure;
/**
* Defines commonality for the JPA {@link ManagedType} hierarchy of interfaces.
*
* @author Steve Ebersole
*/
public abstract class AbstractManagedType<X>
extends AbstractType<X>
implements ManagedType<X>, Serializable {
private final AbstractManagedType<? super X> superType;
private final Map<String,Attribute<X, ?>> declaredAttributes
= new HashMap<String, Attribute<X,?>>();
private final Map<String, SingularAttribute<X, ?>> declaredSingularAttributes
= new HashMap<String, SingularAttribute<X,?>>();
private final Map<String, PluralAttribute<X, ?, ?>> declaredPluralAttributes
= new HashMap<String, PluralAttribute<X,?,?>>();
protected AbstractManagedType(Class<X> javaType, AbstractManagedType<? super X> superType) {
super( javaType );
this.superType = superType;
}
public AbstractManagedType<? super X> getSupertype() {
return superType;
}
private boolean locked = false;
public Builder<X> getBuilder() {
if ( locked ) {
throw new IllegalStateException( "Type has been locked" );
}
return new Builder<X>() {
@Override
public void addAttribute(Attribute<X,?> attribute) {
declaredAttributes.put( attribute.getName(), attribute );
final Bindable.BindableType bindableType = ( ( Bindable ) attribute ).getBindableType();
switch ( bindableType ) {
case SINGULAR_ATTRIBUTE : {
declaredSingularAttributes.put( attribute.getName(), (SingularAttribute<X,?>) attribute );
break;
}
case PLURAL_ATTRIBUTE : {
declaredPluralAttributes.put(attribute.getName(), (PluralAttribute<X,?,?>) attribute );
break;
}
default : {
throw new AssertionFailure( "unknown bindable type: " + bindableType );
}
}
}
};
}
public void lock() {
locked = true;
}
public static interface Builder<X> {
public void addAttribute(Attribute<X,?> attribute);
}
@Override
@SuppressWarnings({ "unchecked" })
public Set<Attribute<? super X, ?>> getAttributes() {
HashSet attributes = new HashSet<Attribute<X, ?>>( declaredAttributes.values() );
if ( getSupertype() != null ) {
attributes.addAll( getSupertype().getAttributes() );
}
return attributes;
}
@Override
public Set<Attribute<X, ?>> getDeclaredAttributes() {
return new HashSet<Attribute<X, ?>>( declaredAttributes.values() );
}
@Override
@SuppressWarnings({ "unchecked" })
public Attribute<? super X, ?> getAttribute(String name) {
Attribute<? super X, ?> attribute = declaredAttributes.get( name );
if ( attribute == null && getSupertype() != null ) {
attribute = getSupertype().getAttribute( name );
}
return attribute;
}
@Override
public Attribute<X, ?> getDeclaredAttribute(String name) {
final Attribute<X, ?> attr = declaredSingularAttributes.get( name );
checkNotNull( "Attribute ", attr, name );
return attr;
}
private void checkNotNull(String attributeType, Attribute<?,?> attribute, String name) {
if ( attribute == null ) {
throw new IllegalArgumentException( attributeType + " named " + name + " is not present" );
}
}
@Override
@SuppressWarnings({ "unchecked" })
public Set<SingularAttribute<? super X, ?>> getSingularAttributes() {
HashSet attributes = new HashSet<SingularAttribute<X, ?>>( declaredSingularAttributes.values() );
if ( getSupertype() != null ) {
attributes.addAll( getSupertype().getSingularAttributes() );
}
return attributes;
}
@Override
public Set<SingularAttribute<X, ?>> getDeclaredSingularAttributes() {
return new HashSet<SingularAttribute<X, ?>>( declaredSingularAttributes.values() );
}
@Override
@SuppressWarnings({ "unchecked" })
public SingularAttribute<? super X, ?> getSingularAttribute(String name) {
SingularAttribute<? super X, ?> attribute = declaredSingularAttributes.get( name );
if ( attribute == null && getSupertype() != null ) {
attribute = getSupertype().getSingularAttribute( name );
}
return attribute;
}
@Override
public SingularAttribute<X, ?> getDeclaredSingularAttribute(String name) {
final SingularAttribute<X, ?> attr = declaredSingularAttributes.get( name );
checkNotNull( "SingularAttribute ", attr, name );
return attr;
}
@Override
@SuppressWarnings({ "unchecked" })
public <Y> SingularAttribute<? super X, Y> getSingularAttribute(String name, Class<Y> type) {
SingularAttribute<? super X, ?> attribute = declaredSingularAttributes.get( name );
if ( attribute == null && getSupertype() != null ) {
attribute = getSupertype().getSingularAttribute( name );
}
checkTypeForSingleAttribute( "SingularAttribute ", attribute, name, type );
return ( SingularAttribute<? super X, Y> ) attribute;
}
@Override
@SuppressWarnings( "unchecked")
public <Y> SingularAttribute<X, Y> getDeclaredSingularAttribute(String name, Class<Y> javaType) {
final SingularAttribute<X, ?> attr = declaredSingularAttributes.get( name );
checkTypeForSingleAttribute( "SingularAttribute ", attr, name, javaType );
return ( SingularAttribute<X, Y> ) attr;
}
private <Y> void checkTypeForSingleAttribute(
String attributeType,
SingularAttribute<?,?> attribute,
String name,
Class<Y> javaType) {
if ( attribute == null || ( javaType != null && !attribute.getBindableJavaType().equals( javaType ) ) ) {
if ( isPrimitiveVariant( attribute, javaType ) ) {
return;
}
throw new IllegalArgumentException(
attributeType + " named " + name
+ ( javaType != null ? " and of type " + javaType.getName() : "" )
+ " is not present"
);
}
}
@SuppressWarnings({ "SimplifiableIfStatement" })
protected <Y> boolean isPrimitiveVariant(SingularAttribute<?,?> attribute, Class<Y> javaType) {
if ( attribute == null ) {
return false;
}
Class declaredType = attribute.getBindableJavaType();
if ( declaredType.isPrimitive() ) {
return ( Boolean.class.equals( javaType ) && Boolean.TYPE.equals( declaredType ) )
|| ( Character.class.equals( javaType ) && Character.TYPE.equals( declaredType ) )
|| ( Byte.class.equals( javaType ) && Byte.TYPE.equals( declaredType ) )
|| ( Short.class.equals( javaType ) && Short.TYPE.equals( declaredType ) )
|| ( Integer.class.equals( javaType ) && Integer.TYPE.equals( declaredType ) )
|| ( Long.class.equals( javaType ) && Long.TYPE.equals( declaredType ) )
|| ( Float.class.equals( javaType ) && Float.TYPE.equals( declaredType ) )
|| ( Double.class.equals( javaType ) && Double.TYPE.equals( declaredType ) );
}
if ( javaType.isPrimitive() ) {
return ( Boolean.class.equals( declaredType ) && Boolean.TYPE.equals( javaType ) )
|| ( Character.class.equals( declaredType ) && Character.TYPE.equals( javaType ) )
|| ( Byte.class.equals( declaredType ) && Byte.TYPE.equals( javaType ) )
|| ( Short.class.equals( declaredType ) && Short.TYPE.equals( javaType ) )
|| ( Integer.class.equals( declaredType ) && Integer.TYPE.equals( javaType ) )
|| ( Long.class.equals( declaredType ) && Long.TYPE.equals( javaType ) )
|| ( Float.class.equals( declaredType ) && Float.TYPE.equals( javaType ) )
|| ( Double.class.equals( declaredType ) && Double.TYPE.equals( javaType ) );
}
return false;
}
@Override
@SuppressWarnings({ "unchecked" })
public Set<PluralAttribute<? super X, ?, ?>> getPluralAttributes() {
HashSet attributes = new HashSet<PluralAttribute<? super X, ?, ?>>( declaredPluralAttributes.values() );
if ( getSupertype() != null ) {
attributes.addAll( getSupertype().getPluralAttributes() );
}
return attributes;
}
@Override
public Set<PluralAttribute<X, ?, ?>> getDeclaredPluralAttributes() {
return new HashSet<PluralAttribute<X,?,?>>( declaredPluralAttributes.values() );
}
@Override
@SuppressWarnings({ "unchecked" })
public CollectionAttribute<? super X, ?> getCollection(String name) {
PluralAttribute<? super X, ?, ?> attribute = getPluralAttribute( name );
if ( attribute == null && getSupertype() != null ) {
attribute = getSupertype().getPluralAttribute( name );
}
basicCollectionCheck( attribute, name );
return ( CollectionAttribute<X, ?> ) attribute;
}
private PluralAttribute<? super X, ?, ?> getPluralAttribute(String name) {
return declaredPluralAttributes.get( name );
}
private void basicCollectionCheck(PluralAttribute<? super X, ?, ?> attribute, String name) {
checkNotNull( "CollectionAttribute", attribute, name );
if ( ! CollectionAttribute.class.isAssignableFrom( attribute.getClass() ) ) {
throw new IllegalArgumentException( name + " is not a CollectionAttribute: " + attribute.getClass() );
}
}
@Override
@SuppressWarnings( "unchecked")
public CollectionAttribute<X, ?> getDeclaredCollection(String name) {
final PluralAttribute<X,?,?> attribute = declaredPluralAttributes.get( name );
basicCollectionCheck( attribute, name );
return ( CollectionAttribute<X, ?> ) attribute;
}
@Override
@SuppressWarnings({ "unchecked" })
public SetAttribute<? super X, ?> getSet(String name) {
PluralAttribute<? super X, ?, ?> attribute = getPluralAttribute( name );
if ( attribute == null && getSupertype() != null ) {
attribute = getSupertype().getPluralAttribute( name );
}
basicSetCheck( attribute, name );
return (SetAttribute<? super X, ?>) attribute;
}
private void basicSetCheck(PluralAttribute<? super X, ?, ?> attribute, String name) {
checkNotNull( "SetAttribute", attribute, name );
if ( ! SetAttribute.class.isAssignableFrom( attribute.getClass() ) ) {
throw new IllegalArgumentException( name + " is not a SetAttribute: " + attribute.getClass() );
}
}
@Override
@SuppressWarnings( "unchecked")
public SetAttribute<X, ?> getDeclaredSet(String name) {
final PluralAttribute<X,?,?> attribute = declaredPluralAttributes.get( name );
basicSetCheck( attribute, name );
return ( SetAttribute<X, ?> ) attribute;
}
@Override
@SuppressWarnings({ "unchecked" })
public ListAttribute<? super X, ?> getList(String name) {
PluralAttribute<? super X, ?, ?> attribute = getPluralAttribute( name );
if ( attribute == null && getSupertype() != null ) {
attribute = getSupertype().getPluralAttribute( name );
}
basicListCheck( attribute, name );
return (ListAttribute<? super X, ?>) attribute;
}
private void basicListCheck(PluralAttribute<? super X, ?, ?> attribute, String name) {
checkNotNull( "ListAttribute", attribute, name );
if ( ! ListAttribute.class.isAssignableFrom( attribute.getClass() ) ) {
throw new IllegalArgumentException( name + " is not a ListAttribute: " + attribute.getClass() );
}
}
@Override
public ListAttribute<X, ?> getDeclaredList(String name) {
final PluralAttribute<X,?,?> attribute = declaredPluralAttributes.get( name );
basicListCheck( attribute, name );
return ( ListAttribute<X, ?> ) attribute;
}
@Override
@SuppressWarnings({ "unchecked" })
public MapAttribute<? super X, ?, ?> getMap(String name) {
PluralAttribute<? super X, ?, ?> attribute = getPluralAttribute( name );
if ( attribute == null && getSupertype() != null ) {
attribute = getSupertype().getPluralAttribute( name );
}
basicMapCheck( attribute, name );
return (MapAttribute<? super X, ?, ?>) attribute;
}
private void basicMapCheck(PluralAttribute<? super X, ?, ?> attribute, String name) {
checkNotNull( "MapAttribute", attribute, name );
if ( ! MapAttribute.class.isAssignableFrom( attribute.getClass() ) ) {
throw new IllegalArgumentException( name + " is not a MapAttribute: " + attribute.getClass() );
}
}
@Override
public MapAttribute<X, ?, ?> getDeclaredMap(String name) {
final PluralAttribute<X,?,?> attribute = declaredPluralAttributes.get( name );
basicMapCheck( attribute, name );
return ( MapAttribute<X,?,?> ) attribute;
}
@Override
@SuppressWarnings({ "unchecked" })
public <E> CollectionAttribute<? super X, E> getCollection(String name, Class<E> elementType) {
PluralAttribute<? super X, ?, ?> attribute = declaredPluralAttributes.get( name );
if ( attribute == null && getSupertype() != null ) {
attribute = getSupertype().getPluralAttribute( name );
}
checkCollectionElementType( attribute, name, elementType );
return ( CollectionAttribute<? super X, E> ) attribute;
}
@Override
public <E> CollectionAttribute<X, E> getDeclaredCollection(String name, Class<E> elementType) {
final PluralAttribute<X,?,?> attribute = declaredPluralAttributes.get( name );
checkCollectionElementType( attribute, name, elementType );
return ( CollectionAttribute<X, E> ) attribute;
}
private <E> void checkCollectionElementType(PluralAttribute<?,?,?> attribute, String name, Class<E> elementType) {
checkTypeForPluralAttributes( "CollectionAttribute", attribute, name, elementType, PluralAttribute.CollectionType.COLLECTION );
}
private <E> void checkTypeForPluralAttributes(
String attributeType,
PluralAttribute<?,?,?> attribute,
String name,
Class<E> elementType,
PluralAttribute.CollectionType collectionType) {
if ( attribute == null
|| ( elementType != null && !attribute.getBindableJavaType().equals( elementType ) )
|| attribute.getCollectionType() != collectionType ) {
throw new IllegalArgumentException(
attributeType + " named " + name
+ ( elementType != null ? " and of element type " + elementType : "" )
+ " is not present"
);
}
}
@Override
@SuppressWarnings({ "unchecked" })
public <E> SetAttribute<? super X, E> getSet(String name, Class<E> elementType) {
PluralAttribute<? super X, ?, ?> attribute = declaredPluralAttributes.get( name );
if ( attribute == null && getSupertype() != null ) {
attribute = getSupertype().getPluralAttribute( name );
}
checkSetElementType( attribute, name, elementType );
return ( SetAttribute<? super X, E> ) attribute;
}
private <E> void checkSetElementType(PluralAttribute<? super X, ?, ?> attribute, String name, Class<E> elementType) {
checkTypeForPluralAttributes( "SetAttribute", attribute, name, elementType, PluralAttribute.CollectionType.SET );
}
@Override
public <E> SetAttribute<X, E> getDeclaredSet(String name, Class<E> elementType) {
final PluralAttribute<X,?,?> attribute = declaredPluralAttributes.get( name );
checkSetElementType( attribute, name, elementType );
return ( SetAttribute<X, E> ) attribute;
}
@Override
@SuppressWarnings({ "unchecked" })
public <E> ListAttribute<? super X, E> getList(String name, Class<E> elementType) {
PluralAttribute<? super X, ?, ?> attribute = declaredPluralAttributes.get( name );
if ( attribute == null && getSupertype() != null ) {
attribute = getSupertype().getPluralAttribute( name );
}
checkListElementType( attribute, name, elementType );
return ( ListAttribute<? super X, E> ) attribute;
}
private <E> void checkListElementType(PluralAttribute<? super X, ?, ?> attribute, String name, Class<E> elementType) {
checkTypeForPluralAttributes( "ListAttribute", attribute, name, elementType, PluralAttribute.CollectionType.LIST );
}
@Override
public <E> ListAttribute<X, E> getDeclaredList(String name, Class<E> elementType) {
final PluralAttribute<X,?,?> attribute = declaredPluralAttributes.get( name );
checkListElementType( attribute, name, elementType );
return ( ListAttribute<X, E> ) attribute;
}
@Override
@SuppressWarnings({ "unchecked" })
public <K, V> MapAttribute<? super X, K, V> getMap(String name, Class<K> keyType, Class<V> valueType) {
PluralAttribute<? super X, ?, ?> attribute = getPluralAttribute( name );
if ( attribute == null && getSupertype() != null ) {
attribute = getSupertype().getPluralAttribute( name );
}
checkMapValueType( attribute, name, valueType );
final MapAttribute<? super X, K, V> mapAttribute = ( MapAttribute<? super X, K, V> ) attribute;
checkMapKeyType( mapAttribute, name, keyType );
return mapAttribute;
}
private <V> void checkMapValueType(PluralAttribute<? super X, ?, ?> attribute, String name, Class<V> valueType) {
checkTypeForPluralAttributes( "MapAttribute", attribute, name, valueType, PluralAttribute.CollectionType.MAP);
}
private <K,V> void checkMapKeyType(MapAttribute<? super X, K, V> mapAttribute, String name, Class<K> keyType) {
if ( mapAttribute.getKeyJavaType() != keyType ) {
throw new IllegalArgumentException( "MapAttribute named " + name + " does not support a key of type " + keyType );
}
}
@Override
public <K, V> MapAttribute<X, K, V> getDeclaredMap(String name, Class<K> keyType, Class<V> valueType) {
final PluralAttribute<X,?,?> attribute = declaredPluralAttributes.get( name );
checkMapValueType( attribute, name, valueType );
final MapAttribute<X, K, V> mapAttribute = ( MapAttribute<X, K, V> ) attribute;
checkMapKeyType( mapAttribute, name, keyType );
return mapAttribute;
}
}

View File

@ -1,43 +0,0 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2009 by Red Hat Inc and/or its affiliates or by
* third-party contributors as indicated by either @author tags or express
* copyright attribution statements applied by the authors. All
* third-party contributions are distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.metamodel.internal;
import java.io.Serializable;
import javax.persistence.metamodel.Type;
/**
* Defines commonality for the JPA {@link Type} hierarchy of interfaces.
*
* @author Steve Ebersole
*/
public abstract class AbstractType<X> implements Type<X>, Serializable {
private final Class<X> javaType;
public AbstractType(Class<X> javaType) {
this.javaType = javaType;
}
public Class<X> getJavaType() {
return javaType;
}
}

View File

@ -1,47 +0,0 @@
/*
* Copyright (c) 2009, Red Hat Middleware LLC or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.metamodel.internal;
import java.io.Serializable;
import javax.persistence.metamodel.BasicType;
/**
* @author Emmanuel Bernard
*/
public class BasicTypeImpl<X> implements BasicType<X>, Serializable {
private final Class<X> clazz;
private PersistenceType persistenceType;
@Override
public PersistenceType getPersistenceType() {
return persistenceType;
}
@Override
public Class<X> getJavaType() {
return clazz;
}
public BasicTypeImpl(Class<X> clazz, PersistenceType persistenceType) {
this.clazz = clazz;
this.persistenceType = persistenceType;
}
}

View File

@ -1,57 +0,0 @@
/*
* Copyright (c) 2009, Red Hat Middleware LLC or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.metamodel.internal;
import java.io.Serializable;
import javax.persistence.metamodel.EmbeddableType;
import org.hibernate.type.ComponentType;
/**
* @author Emmanuel Bernard
*/
public class EmbeddableTypeImpl<X>
extends AbstractManagedType<X>
implements EmbeddableType<X>, Serializable {
private final AbstractManagedType parent;
private final ComponentType hibernateType;
public EmbeddableTypeImpl(Class<X> javaType, AbstractManagedType parent, ComponentType hibernateType) {
super( javaType, null );
this.parent = parent;
this.hibernateType = hibernateType;
}
@Override
public PersistenceType getPersistenceType() {
return PersistenceType.EMBEDDABLE;
}
public AbstractManagedType getParent() {
return parent;
}
public ComponentType getHibernateType() {
return hibernateType;
}
}

View File

@ -1,71 +0,0 @@
/*
* Copyright (c) 2009, Red Hat Middleware LLC or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.metamodel.internal;
import java.io.Serializable;
import javax.persistence.metamodel.EntityType;
/**
* Defines the Hibernate implementation of the JPA {@link EntityType} contract.
*
* @author Steve Ebersole
* @author Emmanuel Bernard
*/
public class EntityTypeImpl<X>
extends AbstractIdentifiableType<X>
implements EntityType<X>, Serializable {
private final String entityName;
public EntityTypeImpl(
Class<X> javaType,
AbstractIdentifiableType<? super X> superType,
String entityName,
boolean hasIdentifierProperty,
boolean isVersioned) {
super( javaType, superType, hasIdentifierProperty, isVersioned );
this.entityName = entityName;
}
@Override
public String getName() {
return entityName;
}
@Override
public BindableType getBindableType() {
return BindableType.ENTITY_TYPE;
}
@Override
public Class<X> getBindableJavaType() {
return getJavaType();
}
@Override
public PersistenceType getPersistenceType() {
return PersistenceType.ENTITY;
}
@Override
protected boolean requiresSupertypeForNonDeclaredIdentifier() {
return true;
}
}

View File

@ -1,128 +0,0 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2012, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.metamodel.internal;
import java.io.Serializable;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import javax.persistence.metamodel.EmbeddableType;
import javax.persistence.metamodel.EntityType;
import javax.persistence.metamodel.ManagedType;
import javax.persistence.metamodel.Metamodel;
import org.hibernate.internal.util.collections.CollectionHelper;
/**
* Hibernate implementation of the JPA {@link javax.persistence.metamodel.Metamodel} contract.
*
* @author Steve Ebersole
* @author Emmanuel Bernard
*/
public class MetamodelImpl implements Metamodel, Serializable {
private final Map<Class<?>, EntityTypeImpl<?>> entityTypeMap;
private final Map<Class<?>, MappedSuperclassTypeImpl<?>> mappedSuperclassTypeMap;
private final Map<Class<?>, EmbeddableTypeImpl<?>> embeddableTypeMap;
private final Map<String, EntityTypeImpl<?>> entityTypesByEntityName;
/**
* Instantiate the metamodel.
*
* @param entityTypeMap The entity mappings.
* @param mappedSuperclassTypeMap The {@link javax.persistence.MappedSuperclass} mappings
* @param embeddableTypeMap The embeddable (component) mappings.
*/
public MetamodelImpl(
Map<Class<?>, EntityTypeImpl<?>> entityTypeMap,
Map<Class<?>, MappedSuperclassTypeImpl<?>> mappedSuperclassTypeMap,
Map<Class<?>, EmbeddableTypeImpl<?>> embeddableTypeMap,
Map<String, EntityTypeImpl<?>> entityTypesByEntityName) {
this.entityTypeMap = entityTypeMap;
this.mappedSuperclassTypeMap = mappedSuperclassTypeMap;
this.embeddableTypeMap = embeddableTypeMap;
this.entityTypesByEntityName = entityTypesByEntityName;
}
@Override
@SuppressWarnings({ "unchecked" })
public <X> EntityType<X> entity(Class<X> cls) {
final EntityType<?> entityType = entityTypeMap.get( cls );
if ( entityType == null ) {
throw new IllegalArgumentException( "Not an entity: " + cls );
}
return (EntityType<X>) entityType;
}
@Override
@SuppressWarnings({ "unchecked" })
public <X> ManagedType<X> managedType(Class<X> cls) {
ManagedType<?> type = entityTypeMap.get( cls );
if ( type == null ) {
type = mappedSuperclassTypeMap.get( cls );
}
if ( type == null ) {
type = embeddableTypeMap.get( cls );
}
if ( type == null ) {
throw new IllegalArgumentException( "Not an managed type: " + cls );
}
return (ManagedType<X>) type;
}
@Override
@SuppressWarnings({ "unchecked" })
public <X> EmbeddableType<X> embeddable(Class<X> cls) {
final EmbeddableType<?> embeddableType = embeddableTypeMap.get( cls );
if ( embeddableType == null ) {
throw new IllegalArgumentException( "Not an embeddable: " + cls );
}
return (EmbeddableType<X>) embeddableType;
}
@Override
public Set<ManagedType<?>> getManagedTypes() {
final int setSize = CollectionHelper.determineProperSizing(
entityTypeMap.size() + mappedSuperclassTypeMap.size() + embeddableTypeMap.size()
);
final Set<ManagedType<?>> managedTypes = new HashSet<ManagedType<?>>( setSize );
managedTypes.addAll( entityTypeMap.values() );
managedTypes.addAll( mappedSuperclassTypeMap.values() );
managedTypes.addAll( embeddableTypeMap.values() );
return managedTypes;
}
@Override
public Set<EntityType<?>> getEntities() {
return new HashSet<EntityType<?>>( entityTypesByEntityName.values() );
}
@Override
public Set<EmbeddableType<?>> getEmbeddables() {
return new HashSet<EmbeddableType<?>>( embeddableTypeMap.values() );
}
public EntityTypeImpl getEntityTypeByName(String entityName) {
return entityTypesByEntityName.get( entityName );
}
}

View File

@ -1,223 +0,0 @@
/*
* Copyright (c) 2009, Red Hat Middleware LLC or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.metamodel.internal;
import java.io.Serializable;
import java.lang.reflect.Member;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.persistence.metamodel.CollectionAttribute;
import javax.persistence.metamodel.ListAttribute;
import javax.persistence.metamodel.MapAttribute;
import javax.persistence.metamodel.PluralAttribute;
import javax.persistence.metamodel.SetAttribute;
import javax.persistence.metamodel.Type;
import org.hibernate.metamodel.spi.binding.PluralAttributeBinding;
/**
* @author Emmanuel Bernard
* @author Steve Ebersole
*/
public abstract class PluralAttributeImpl<X, C, E>
extends AbstractAttribute<X,C>
implements PluralAttribute<X, C, E>, Serializable {
private final Type<E> elementType;
@SuppressWarnings("unchecked")
private PluralAttributeImpl(Builder builder) {
super(
builder.attributeBinding.getAttribute().getName(),
builder.collectionClass,
builder.owner,
builder.member,
builder.persistentAttributeType
);
this.elementType = builder.elementType;
}
public static class Builder {
private final Class collectionClass;
private AbstractManagedType owner;
private PluralAttributeBinding attributeBinding;
private Member member;
private Type keyType;
private Type elementType;
private PersistentAttributeType persistentAttributeType;
public Builder(Class collectionClass) {
this.collectionClass = collectionClass;
}
public Builder owner(AbstractManagedType owner) {
this.owner = owner;
return this;
}
public Builder member(Member member) {
this.member = member;
return this;
}
public Builder binding(PluralAttributeBinding attributeBinding) {
this.attributeBinding = attributeBinding;
return this;
}
public Builder elementType(Type elementType) {
this.elementType = elementType;
return this;
}
public Builder keyType(Type keyType) {
this.keyType = keyType;
return this;
}
public Builder persistentAttributeType(PersistentAttributeType attrType) {
this.persistentAttributeType = attrType;
return this;
}
@SuppressWarnings( "unchecked" )
public <X,C,E,K> PluralAttributeImpl<X,C,E> build() {
//apply strict spec rules first
if ( Map.class.equals( collectionClass ) ) {
return ( PluralAttributeImpl<X, C, E> ) new MapAttributeImpl<X,K,E>( this );
}
else if ( Set.class.equals( collectionClass ) ) {
return ( PluralAttributeImpl<X, C, E> ) new SetAttributeImpl<X,E>( this );
}
else if ( List.class.equals( collectionClass ) ) {
return ( PluralAttributeImpl<X, C, E> ) new ListAttributeImpl<X,E>( this );
}
else if ( Collection.class.equals( collectionClass ) ) {
return ( PluralAttributeImpl<X, C, E> ) new CollectionAttributeImpl<X, E>( this );
}
//apply loose rules
if ( Map.class.isAssignableFrom( collectionClass ) ) {
return ( PluralAttributeImpl<X, C, E> ) new MapAttributeImpl<X,K,E>( this );
}
else if ( Set.class.isAssignableFrom( collectionClass ) ) {
return ( PluralAttributeImpl<X, C, E> ) new SetAttributeImpl<X,E>( this );
}
else if ( List.class.isAssignableFrom( collectionClass ) ) {
return ( PluralAttributeImpl<X, C, E> ) new ListAttributeImpl<X,E>( this );
}
else if ( Collection.class.isAssignableFrom( collectionClass ) ) {
return ( PluralAttributeImpl<X, C, E> ) new CollectionAttributeImpl<X, E>( this );
}
throw new UnsupportedOperationException( "Unknown collection: " + collectionClass );
}
}
public static Builder builder(Class collectionClass) {
return new Builder( collectionClass );
}
@Override
public Type<E> getElementType() {
return elementType;
}
@Override
public boolean isAssociation() {
return true;
}
@Override
public boolean isCollection() {
return true;
}
@Override
public BindableType getBindableType() {
return BindableType.PLURAL_ATTRIBUTE;
}
@Override
public Class<E> getBindableJavaType() {
return elementType.getJavaType();
}
static class SetAttributeImpl<X,E> extends PluralAttributeImpl<X,Set<E>,E> implements SetAttribute<X,E> {
SetAttributeImpl(Builder xceBuilder) {
super( xceBuilder );
}
@Override
public CollectionType getCollectionType() {
return CollectionType.SET;
}
}
static class CollectionAttributeImpl<X,E> extends PluralAttributeImpl<X,Collection<E>,E> implements CollectionAttribute<X,E> {
CollectionAttributeImpl(Builder xceBuilder) {
super( xceBuilder );
}
@Override
public CollectionType getCollectionType() {
return CollectionType.COLLECTION;
}
}
static class ListAttributeImpl<X,E> extends PluralAttributeImpl<X,List<E>,E> implements ListAttribute<X,E> {
ListAttributeImpl(Builder xceBuilder) {
super( xceBuilder );
}
@Override
public CollectionType getCollectionType() {
return CollectionType.LIST;
}
}
static class MapAttributeImpl<X,K,V> extends PluralAttributeImpl<X,Map<K,V>,V> implements MapAttribute<X,K,V> {
private final Type<K> keyType;
@SuppressWarnings("unchecked")
MapAttributeImpl(Builder xceBuilder) {
super( xceBuilder );
this.keyType = xceBuilder.keyType;
}
@Override
public CollectionType getCollectionType() {
return CollectionType.MAP;
}
@Override
public Class<K> getKeyJavaType() {
return keyType.getJavaType();
}
@Override
public Type<K> getKeyType() {
return keyType;
}
}
}

View File

@ -1,128 +0,0 @@
/*
* Copyright (c) 2009, Red Hat Middleware LLC or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.metamodel.internal;
import java.io.Serializable;
import java.lang.reflect.Member;
import javax.persistence.metamodel.SingularAttribute;
import javax.persistence.metamodel.Type;
/**
* @author Emmanuel Bernard
* @author Steve Ebersole
*/
public class SingularAttributeImpl<X, Y>
extends AbstractAttribute<X,Y>
implements SingularAttribute<X, Y>, Serializable {
private final boolean isIdentifier;
private final boolean isVersion;
private final boolean isOptional;
private final Type<Y> attributeType;
public SingularAttributeImpl(
String name,
Class<Y> javaType,
AbstractManagedType<X> declaringType,
Member member,
boolean isIdentifier,
boolean isVersion,
boolean isOptional,
Type<Y> attributeType,
PersistentAttributeType persistentAttributeType) {
super( name, javaType, declaringType, member, persistentAttributeType );
this.isIdentifier = isIdentifier;
this.isVersion = isVersion;
this.isOptional = isOptional;
this.attributeType = attributeType;
}
/**
* Subclass used to simply instantiation of singular attributes representing an entity's
* identifier.
*/
public static class Identifier<X,Y> extends SingularAttributeImpl<X,Y> {
public Identifier(
String name,
Class<Y> javaType,
AbstractManagedType<X> declaringType,
Member member,
Type<Y> attributeType,
PersistentAttributeType persistentAttributeType) {
super( name, javaType, declaringType, member, true, false, false, attributeType, persistentAttributeType );
}
}
/**
* Subclass used to simply instantiation of singular attributes representing an entity's
* version.
*/
public static class Version<X,Y> extends SingularAttributeImpl<X,Y> {
public Version(
String name,
Class<Y> javaType,
AbstractManagedType<X> declaringType,
Member member,
Type<Y> attributeType,
PersistentAttributeType persistentAttributeType) {
super( name, javaType, declaringType, member, false, true, false, attributeType, persistentAttributeType );
}
}
@Override
public boolean isId() {
return isIdentifier;
}
@Override
public boolean isVersion() {
return isVersion;
}
@Override
public boolean isOptional() {
return isOptional;
}
@Override
public Type<Y> getType() {
return attributeType;
}
@Override
public boolean isAssociation() {
return false;
}
@Override
public boolean isCollection() {
return false;
}
@Override
public BindableType getBindableType() {
return BindableType.SINGULAR_ATTRIBUTE;
}
@Override
public Class<Y> getBindableJavaType() {
return attributeType.getJavaType();
}
}

View File

@ -1,142 +0,0 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2012, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.metamodel.internal.legacy;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.lang.reflect.Member;
import java.lang.reflect.Method;
import javax.persistence.metamodel.Attribute;
import javax.persistence.metamodel.ManagedType;
import org.hibernate.internal.util.ReflectHelper;
/**
* Models the commonality of the JPA {@link javax.persistence.metamodel.Attribute} hierarchy.
*
* @author Steve Ebersole
*/
public abstract class AbstractAttribute<X, Y>
implements Attribute<X, Y>, AttributeImplementor<X,Y>, Serializable {
private final String name;
private final Class<Y> javaType;
private final AbstractManagedType<X> declaringType;
private transient Member member;
private final PersistentAttributeType persistentAttributeType;
public AbstractAttribute(
String name,
Class<Y> javaType,
AbstractManagedType<X> declaringType,
Member member,
PersistentAttributeType persistentAttributeType) {
this.name = name;
this.javaType = javaType;
this.declaringType = declaringType;
this.member = member;
this.persistentAttributeType = persistentAttributeType;
}
/**
* {@inheritDoc}
*/
public String getName() {
return name;
}
/**
* {@inheritDoc}
*/
public ManagedType<X> getDeclaringType() {
return declaringType;
}
/**
* {@inheritDoc}
*/
public Class<Y> getJavaType() {
return javaType;
}
/**
* {@inheritDoc}
*/
public Member getJavaMember() {
return member;
}
/**
* {@inheritDoc}
*/
public PersistentAttributeType getPersistentAttributeType() {
return persistentAttributeType;
}
/**
* Used by JDK serialization...
*
* @param ois The input stream from which we are being read...
* @throws java.io.IOException Indicates a general IO stream exception
* @throws ClassNotFoundException Indicates a class resolution issue
*/
protected void readObject(ObjectInputStream ois) throws IOException, ClassNotFoundException {
ois.defaultReadObject();
final String memberDeclaringClassName = ( String ) ois.readObject();
final String memberName = ( String ) ois.readObject();
final String memberType = ( String ) ois.readObject();
final Class memberDeclaringClass = Class.forName(
memberDeclaringClassName,
false,
declaringType.getJavaType().getClassLoader()
);
try {
this.member = "method".equals( memberType )
? memberDeclaringClass.getMethod( memberName, ReflectHelper.NO_PARAM_SIGNATURE )
: memberDeclaringClass.getField( memberName );
}
catch ( Exception e ) {
throw new IllegalStateException(
"Unable to locate member [" + memberDeclaringClassName + "#"
+ memberName + "]"
);
}
}
/**
* Used by JDK serialization...
*
* @param oos The output stream to which we are being written...
* @throws java.io.IOException Indicates a general IO stream exception
*/
protected void writeObject(ObjectOutputStream oos) throws IOException {
oos.defaultWriteObject();
oos.writeObject( getJavaMember().getDeclaringClass().getName() );
oos.writeObject( getJavaMember().getName() );
// should only ever be a field or the getter-method...
oos.writeObject( Method.class.isInstance( getJavaMember() ) ? "method" : "field" );
}
}

View File

@ -1,318 +0,0 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2012, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.metamodel.internal.legacy;
import java.io.Serializable;
import java.util.HashSet;
import java.util.Set;
import javax.persistence.metamodel.Attribute;
import javax.persistence.metamodel.IdentifiableType;
import javax.persistence.metamodel.SingularAttribute;
import javax.persistence.metamodel.Type;
/**
* Defines commonality for the JPA {@link javax.persistence.metamodel.IdentifiableType} types. JPA defines
* identifiable types as entities or mapped-superclasses. Basically things to which an
* identifier can be attached.
* <p/>
* NOTE : Currently we only really have support for direct entities in the Hibernate metamodel
* as the information for them is consumed into the closest actual entity subclass(es) in the
* internal Hibernate mapping-metamodel.
*
* @author Steve Ebersole
*/
public abstract class AbstractIdentifiableType<X>
extends AbstractManagedType<X>
implements IdentifiableType<X>, Serializable {
private final boolean hasIdentifierProperty;
private final boolean isVersioned;
private SingularAttributeImpl<X, ?> id;
private SingularAttributeImpl<X, ?> version;
private Set<SingularAttribute<? super X,?>> idClassAttributes;
public AbstractIdentifiableType(
Class<X> javaType,
AbstractIdentifiableType<? super X> superType,
boolean hasIdentifierProperty,
boolean versioned) {
super( javaType, superType );
this.hasIdentifierProperty = hasIdentifierProperty;
isVersioned = versioned;
}
/**
* {@inheritDoc}
*/
public AbstractIdentifiableType<? super X> getSupertype() {
return ( AbstractIdentifiableType<? super X> ) super.getSupertype();
}
/**
* Indicates if a non-null super type is required to provide the
* identifier attribute(s) if this object does not have a declared
* identifier.
* .
* @return true, if a non-null super type is required to provide
* the identifier attribute(s) if this object does not have a
* declared identifier; false, otherwise.
*/
protected abstract boolean requiresSupertypeForNonDeclaredIdentifier();
protected AbstractIdentifiableType<? super X> requireSupertype() {
if ( getSupertype() == null ) {
throw new IllegalStateException( "No supertype found" );
}
return getSupertype();
}
/**
* {@inheritDoc}
*/
public boolean hasSingleIdAttribute() {
return hasIdentifierProperty;
}
/**
* {@inheritDoc}
*/
@SuppressWarnings({ "unchecked" })
public <Y> SingularAttribute<? super X, Y> getId(Class<Y> javaType) {
final SingularAttribute<? super X, Y> id_;
if ( id != null ) {
checkSimpleId();
id_ = ( SingularAttribute<? super X, Y> ) id;
if ( javaType != id.getJavaType() ) {
throw new IllegalArgumentException( "Id attribute was not of specified type : " + javaType.getName() );
}
}
else {
//yuk yuk bad me
if ( ! requiresSupertypeForNonDeclaredIdentifier()) {
final AbstractIdentifiableType<? super X> supertype = getSupertype();
if (supertype != null) {
id_ = supertype.getId( javaType );
}
else {
id_ = null;
}
}
else {
id_ = requireSupertype().getId( javaType );
}
}
return id_;
}
/**
* Centralized check to ensure the id for this hierarchy is a simple one (i.e., does not use
* an id-class).
*
* @see #checkIdClass()
*/
protected void checkSimpleId() {
if ( ! hasIdentifierProperty ) {
throw new IllegalStateException( "This class uses an @IdClass" );
}
}
/**
* {@inheritDoc}
*/
@SuppressWarnings({ "unchecked" })
public <Y> SingularAttribute<X, Y> getDeclaredId(Class<Y> javaType) {
checkDeclaredId();
checkSimpleId();
if ( javaType != id.getJavaType() ) {
throw new IllegalArgumentException( "Id attribute was not of specified type : " + javaType.getName() );
}
return (SingularAttribute<X, Y>) id;
}
/**
* Centralized check to ensure the id is actually declared on the class mapped here, as opposed to a
* super class.
*/
protected void checkDeclaredId() {
if ( id == null ) {
throw new IllegalArgumentException( "The id attribute is not declared on this type" );
}
}
/**
* {@inheritDoc}
*/
public Type<?> getIdType() {
if ( id != null ) {
checkSimpleId();
return id.getType();
}
else {
return requireSupertype().getIdType();
}
}
private boolean hasIdClassAttributesDefined() {
return idClassAttributes != null ||
( getSupertype() != null && getSupertype().hasIdClassAttributesDefined() );
}
/**
* {@inheritDoc}
*/
public Set<SingularAttribute<? super X, ?>> getIdClassAttributes() {
if ( idClassAttributes != null ) {
checkIdClass();
}
else {
// Java does not allow casting requireSupertype().getIdClassAttributes()
// to Set<SingularAttribute<? super X, ?>> because the
// superclass X is a different Java type from this X
// (i.e, getSupertype().getJavaType() != getJavaType()).
// It will, however, allow a Set<SingularAttribute<? super X, ?>>
// to be initialized with requireSupertype().getIdClassAttributes(),
// since getSupertype().getJavaType() is a superclass of getJavaType()
if ( requiresSupertypeForNonDeclaredIdentifier() ) {
idClassAttributes = new HashSet<SingularAttribute<? super X, ?>>( requireSupertype().getIdClassAttributes() );
}
else if ( getSupertype() != null && hasIdClassAttributesDefined() ) {
idClassAttributes = new HashSet<SingularAttribute<? super X, ?>>( getSupertype().getIdClassAttributes() );
}
}
return idClassAttributes;
}
/**
* Centralized check to ensure the id for this hierarchy uses an id-class.
*
* @see #checkSimpleId()
*/
private void checkIdClass() {
if ( hasIdentifierProperty ) {
throw new IllegalArgumentException( "This class does not use @IdClass" );
}
}
/**
* {@inheritDoc}
*/
public boolean hasVersionAttribute() {
return isVersioned;
}
public boolean hasDeclaredVersionAttribute() {
return isVersioned && version != null;
}
/**
* {@inheritDoc}
*/
@SuppressWarnings({ "unchecked" })
public <Y> SingularAttribute<? super X, Y> getVersion(Class<Y> javaType) {
if ( ! hasVersionAttribute() ) {
return null;
}
final SingularAttribute<? super X, Y> version_;
if ( version != null ) {
version_ = ( SingularAttribute<? super X, Y> ) version;
if ( javaType != version.getJavaType() ) {
throw new IllegalArgumentException( "Version attribute was not of specified type : " + javaType.getName() );
}
}
else {
version_ = requireSupertype().getVersion( javaType );
}
return version_;
}
/**
* {@inheritDoc}
*/
@SuppressWarnings({ "unchecked" })
public <Y> SingularAttribute<X, Y> getDeclaredVersion(Class<Y> javaType) {
checkDeclaredVersion();
if ( javaType != version.getJavaType() ) {
throw new IllegalArgumentException( "Version attribute was not of specified type : " + javaType.getName() );
}
return ( SingularAttribute<X, Y> ) version;
}
/**
* For used to retrieve the declared version when populating the static metamodel.
*
* @return The declared
*/
public SingularAttribute<X, ?> getDeclaredVersion() {
checkDeclaredVersion();
return version;
}
/**
* Centralized check to ensure the version (if one) is actually declared on the class mapped here, as opposed to a
* super class.
*/
protected void checkDeclaredVersion() {
if ( version == null || ( getSupertype() != null && getSupertype().hasVersionAttribute() )) {
throw new IllegalArgumentException( "The version attribute is not declared on this type" );
}
}
public Builder<X> getBuilder() {
final AbstractManagedType.Builder<X> managedBuilder = super.getBuilder();
return new Builder<X>() {
public void applyIdAttribute(SingularAttributeImpl<X, ?> idAttribute) {
AbstractIdentifiableType.this.id = idAttribute;
managedBuilder.addAttribute( idAttribute );
}
public void applyIdClassAttributes(Set<SingularAttribute<? super X,?>> idClassAttributes) {
for ( SingularAttribute<? super X,?> idClassAttribute : idClassAttributes ) {
if ( AbstractIdentifiableType.this == idClassAttribute.getDeclaringType() ) {
@SuppressWarnings({ "unchecked" })
SingularAttribute<X,?> declaredAttribute = ( SingularAttribute<X,?> ) idClassAttribute;
addAttribute( declaredAttribute );
}
}
AbstractIdentifiableType.this.idClassAttributes = idClassAttributes;
}
public void applyVersionAttribute(SingularAttributeImpl<X, ?> versionAttribute) {
AbstractIdentifiableType.this.version = versionAttribute;
managedBuilder.addAttribute( versionAttribute );
}
public void addAttribute(Attribute<X, ?> attribute) {
managedBuilder.addAttribute( attribute );
}
};
}
public static interface Builder<X> extends AbstractManagedType.Builder<X> {
public void applyIdAttribute(SingularAttributeImpl<X, ?> idAttribute);
public void applyIdClassAttributes(Set<SingularAttribute<? super X, ?>> idClassAttributes);
public void applyVersionAttribute(SingularAttributeImpl<X, ?> versionAttribute);
}
}

View File

@ -1,529 +0,0 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2009 by Red Hat Inc and/or its affiliates or by
* third-party contributors as indicated by either @author tags or express
* copyright attribution statements applied by the authors. All
* third-party contributions are distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.metamodel.internal.legacy;
import java.io.Serializable;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import javax.persistence.metamodel.Attribute;
import javax.persistence.metamodel.Bindable;
import javax.persistence.metamodel.CollectionAttribute;
import javax.persistence.metamodel.ListAttribute;
import javax.persistence.metamodel.ManagedType;
import javax.persistence.metamodel.MapAttribute;
import javax.persistence.metamodel.PluralAttribute;
import javax.persistence.metamodel.SetAttribute;
import javax.persistence.metamodel.SingularAttribute;
import org.hibernate.annotations.common.AssertionFailure;
/**
* Defines commonality for the JPA {@link javax.persistence.metamodel.ManagedType} hierarchy of interfaces.
*
* @author Steve Ebersole
*/
public abstract class AbstractManagedType<X>
extends AbstractType<X>
implements ManagedType<X>, Serializable {
private final AbstractManagedType<? super X> superType;
private final Map<String,Attribute<X, ?>> declaredAttributes
= new HashMap<String, Attribute<X,?>>();
private final Map<String, SingularAttribute<X, ?>> declaredSingularAttributes
= new HashMap<String, SingularAttribute<X,?>>();
private final Map<String, PluralAttribute<X, ?, ?>> declaredPluralAttributes
= new HashMap<String, PluralAttribute<X,?,?>>();
protected AbstractManagedType(Class<X> javaType, AbstractManagedType<? super X> superType) {
super( javaType );
this.superType = superType;
}
protected AbstractManagedType<? super X> getSupertype() {
return superType;
}
private boolean locked = false;
public Builder<X> getBuilder() {
if ( locked ) {
throw new IllegalStateException( "Type has been locked" );
}
return new Builder<X>() {
public void addAttribute(Attribute<X,?> attribute) {
declaredAttributes.put( attribute.getName(), attribute );
final Bindable.BindableType bindableType = ( ( Bindable ) attribute ).getBindableType();
switch ( bindableType ) {
case SINGULAR_ATTRIBUTE : {
declaredSingularAttributes.put( attribute.getName(), (SingularAttribute<X,?>) attribute );
break;
}
case PLURAL_ATTRIBUTE : {
declaredPluralAttributes.put(attribute.getName(), (PluralAttribute<X,?,?>) attribute );
break;
}
default : {
throw new AssertionFailure( "unknown bindable type: " + bindableType );
}
}
}
};
}
public void lock() {
locked = true;
}
public static interface Builder<X> {
public void addAttribute(Attribute<X, ?> attribute);
}
/**
* {@inheritDoc}
*/
@SuppressWarnings({ "unchecked" })
public Set<Attribute<? super X, ?>> getAttributes() {
HashSet attributes = new HashSet<Attribute<X, ?>>( declaredAttributes.values() );
if ( getSupertype() != null ) {
attributes.addAll( getSupertype().getAttributes() );
}
return attributes;
}
/**
* {@inheritDoc}
*/
public Set<Attribute<X, ?>> getDeclaredAttributes() {
return new HashSet<Attribute<X, ?>>( declaredAttributes.values() );
}
/**
* {@inheritDoc}
*/
@SuppressWarnings({ "unchecked" })
public Attribute<? super X, ?> getAttribute(String name) {
Attribute<? super X, ?> attribute = declaredAttributes.get( name );
if ( attribute == null && getSupertype() != null ) {
attribute = getSupertype().getAttribute( name );
}
return attribute;
}
/**
* {@inheritDoc}
*/
public Attribute<X, ?> getDeclaredAttribute(String name) {
final Attribute<X, ?> attr = declaredAttributes.get( name );
checkNotNull( "Attribute ", attr, name );
return attr;
}
private void checkNotNull(String attributeType, Attribute<?,?> attribute, String name) {
if ( attribute == null ) {
throw new IllegalArgumentException( attributeType + " named " + name + " is not present" );
}
}
/**
* {@inheritDoc}
*/
@SuppressWarnings({ "unchecked" })
public Set<SingularAttribute<? super X, ?>> getSingularAttributes() {
HashSet attributes = new HashSet<SingularAttribute<X, ?>>( declaredSingularAttributes.values() );
if ( getSupertype() != null ) {
attributes.addAll( getSupertype().getSingularAttributes() );
}
return attributes;
}
/**
* {@inheritDoc}
*/
public Set<SingularAttribute<X, ?>> getDeclaredSingularAttributes() {
return new HashSet<SingularAttribute<X, ?>>( declaredSingularAttributes.values() );
}
/**
* {@inheritDoc}
*/
@SuppressWarnings({ "unchecked" })
public SingularAttribute<? super X, ?> getSingularAttribute(String name) {
SingularAttribute<? super X, ?> attribute = declaredSingularAttributes.get( name );
if ( attribute == null && getSupertype() != null ) {
attribute = getSupertype().getSingularAttribute( name );
}
return attribute;
}
/**
* {@inheritDoc}
*/
public SingularAttribute<X, ?> getDeclaredSingularAttribute(String name) {
final SingularAttribute<X, ?> attr = declaredSingularAttributes.get( name );
checkNotNull( "SingularAttribute ", attr, name );
return attr;
}
/**
* {@inheritDoc}
*/
@SuppressWarnings({ "unchecked" })
public <Y> SingularAttribute<? super X, Y> getSingularAttribute(String name, Class<Y> type) {
SingularAttribute<? super X, ?> attribute = declaredSingularAttributes.get( name );
if ( attribute == null && getSupertype() != null ) {
attribute = getSupertype().getSingularAttribute( name );
}
checkTypeForSingleAttribute( "SingularAttribute ", attribute, name, type );
return ( SingularAttribute<? super X, Y> ) attribute;
}
/**
* {@inheritDoc}
*/
@SuppressWarnings( "unchecked")
public <Y> SingularAttribute<X, Y> getDeclaredSingularAttribute(String name, Class<Y> javaType) {
final SingularAttribute<X, ?> attr = declaredSingularAttributes.get( name );
checkTypeForSingleAttribute( "SingularAttribute ", attr, name, javaType );
return ( SingularAttribute<X, Y> ) attr;
}
private <Y> void checkTypeForSingleAttribute(
String attributeType,
SingularAttribute<?,?> attribute,
String name,
Class<Y> javaType) {
if ( attribute == null || ( javaType != null && !attribute.getBindableJavaType().equals( javaType ) ) ) {
if ( isPrimitiveVariant( attribute, javaType ) ) {
return;
}
throw new IllegalArgumentException(
attributeType + " named " + name
+ ( javaType != null ? " and of type " + javaType.getName() : "" )
+ " is not present"
);
}
}
@SuppressWarnings({ "SimplifiableIfStatement" })
protected <Y> boolean isPrimitiveVariant(SingularAttribute<?,?> attribute, Class<Y> javaType) {
if ( attribute == null ) {
return false;
}
Class declaredType = attribute.getBindableJavaType();
if ( declaredType.isPrimitive() ) {
return ( Boolean.class.equals( javaType ) && Boolean.TYPE.equals( declaredType ) )
|| ( Character.class.equals( javaType ) && Character.TYPE.equals( declaredType ) )
|| ( Byte.class.equals( javaType ) && Byte.TYPE.equals( declaredType ) )
|| ( Short.class.equals( javaType ) && Short.TYPE.equals( declaredType ) )
|| ( Integer.class.equals( javaType ) && Integer.TYPE.equals( declaredType ) )
|| ( Long.class.equals( javaType ) && Long.TYPE.equals( declaredType ) )
|| ( Float.class.equals( javaType ) && Float.TYPE.equals( declaredType ) )
|| ( Double.class.equals( javaType ) && Double.TYPE.equals( declaredType ) );
}
if ( javaType.isPrimitive() ) {
return ( Boolean.class.equals( declaredType ) && Boolean.TYPE.equals( javaType ) )
|| ( Character.class.equals( declaredType ) && Character.TYPE.equals( javaType ) )
|| ( Byte.class.equals( declaredType ) && Byte.TYPE.equals( javaType ) )
|| ( Short.class.equals( declaredType ) && Short.TYPE.equals( javaType ) )
|| ( Integer.class.equals( declaredType ) && Integer.TYPE.equals( javaType ) )
|| ( Long.class.equals( declaredType ) && Long.TYPE.equals( javaType ) )
|| ( Float.class.equals( declaredType ) && Float.TYPE.equals( javaType ) )
|| ( Double.class.equals( declaredType ) && Double.TYPE.equals( javaType ) );
}
return false;
}
/**
* {@inheritDoc}
*/
@SuppressWarnings({ "unchecked" })
public Set<PluralAttribute<? super X, ?, ?>> getPluralAttributes() {
HashSet attributes = new HashSet<PluralAttribute<? super X, ?, ?>>( declaredPluralAttributes.values() );
if ( getSupertype() != null ) {
attributes.addAll( getSupertype().getPluralAttributes() );
}
return attributes;
}
/**
* {@inheritDoc}
*/
public Set<PluralAttribute<X, ?, ?>> getDeclaredPluralAttributes() {
return new HashSet<PluralAttribute<X,?,?>>( declaredPluralAttributes.values() );
}
/**
* {@inheritDoc}
*/
@SuppressWarnings({ "unchecked" })
public CollectionAttribute<? super X, ?> getCollection(String name) {
PluralAttribute<? super X, ?, ?> attribute = getPluralAttribute( name );
if ( attribute == null && getSupertype() != null ) {
attribute = getSupertype().getPluralAttribute( name );
}
basicCollectionCheck( attribute, name );
return ( CollectionAttribute<X, ?> ) attribute;
}
private PluralAttribute<? super X, ?, ?> getPluralAttribute(String name) {
return declaredPluralAttributes.get( name );
}
private void basicCollectionCheck(PluralAttribute<? super X, ?, ?> attribute, String name) {
checkNotNull( "CollectionAttribute", attribute, name );
if ( ! CollectionAttribute.class.isAssignableFrom( attribute.getClass() ) ) {
throw new IllegalArgumentException( name + " is not a CollectionAttribute: " + attribute.getClass() );
}
}
/**
* {@inheritDoc}
*/
@SuppressWarnings( "unchecked")
public CollectionAttribute<X, ?> getDeclaredCollection(String name) {
final PluralAttribute<X,?,?> attribute = declaredPluralAttributes.get( name );
basicCollectionCheck( attribute, name );
return ( CollectionAttribute<X, ?> ) attribute;
}
/**
* {@inheritDoc}
*/
@SuppressWarnings({ "unchecked" })
public SetAttribute<? super X, ?> getSet(String name) {
PluralAttribute<? super X, ?, ?> attribute = getPluralAttribute( name );
if ( attribute == null && getSupertype() != null ) {
attribute = getSupertype().getPluralAttribute( name );
}
basicSetCheck( attribute, name );
return (SetAttribute<? super X, ?>) attribute;
}
private void basicSetCheck(PluralAttribute<? super X, ?, ?> attribute, String name) {
checkNotNull( "SetAttribute", attribute, name );
if ( ! SetAttribute.class.isAssignableFrom( attribute.getClass() ) ) {
throw new IllegalArgumentException( name + " is not a SetAttribute: " + attribute.getClass() );
}
}
/**
* {@inheritDoc}
*/
@SuppressWarnings( "unchecked")
public SetAttribute<X, ?> getDeclaredSet(String name) {
final PluralAttribute<X,?,?> attribute = declaredPluralAttributes.get( name );
basicSetCheck( attribute, name );
return ( SetAttribute<X, ?> ) attribute;
}
/**
* {@inheritDoc}
*/
@SuppressWarnings({ "unchecked" })
public ListAttribute<? super X, ?> getList(String name) {
PluralAttribute<? super X, ?, ?> attribute = getPluralAttribute( name );
if ( attribute == null && getSupertype() != null ) {
attribute = getSupertype().getPluralAttribute( name );
}
basicListCheck( attribute, name );
return (ListAttribute<? super X, ?>) attribute;
}
private void basicListCheck(PluralAttribute<? super X, ?, ?> attribute, String name) {
checkNotNull( "ListAttribute", attribute, name );
if ( ! ListAttribute.class.isAssignableFrom( attribute.getClass() ) ) {
throw new IllegalArgumentException( name + " is not a ListAttribute: " + attribute.getClass() );
}
}
/**
* {@inheritDoc}
*/
public ListAttribute<X, ?> getDeclaredList(String name) {
final PluralAttribute<X,?,?> attribute = declaredPluralAttributes.get( name );
basicListCheck( attribute, name );
return ( ListAttribute<X, ?> ) attribute;
}
/**
* {@inheritDoc}
*/
@SuppressWarnings({ "unchecked" })
public MapAttribute<? super X, ?, ?> getMap(String name) {
PluralAttribute<? super X, ?, ?> attribute = getPluralAttribute( name );
if ( attribute == null && getSupertype() != null ) {
attribute = getSupertype().getPluralAttribute( name );
}
basicMapCheck( attribute, name );
return (MapAttribute<? super X, ?, ?>) attribute;
}
private void basicMapCheck(PluralAttribute<? super X, ?, ?> attribute, String name) {
checkNotNull( "MapAttribute", attribute, name );
if ( ! MapAttribute.class.isAssignableFrom( attribute.getClass() ) ) {
throw new IllegalArgumentException( name + " is not a MapAttribute: " + attribute.getClass() );
}
}
/**
* {@inheritDoc}
*/
public MapAttribute<X, ?, ?> getDeclaredMap(String name) {
final PluralAttribute<X,?,?> attribute = declaredPluralAttributes.get( name );
basicMapCheck( attribute, name );
return ( MapAttribute<X,?,?> ) attribute;
}
/**
* {@inheritDoc}
*/
@SuppressWarnings({ "unchecked" })
public <E> CollectionAttribute<? super X, E> getCollection(String name, Class<E> elementType) {
PluralAttribute<? super X, ?, ?> attribute = declaredPluralAttributes.get( name );
if ( attribute == null && getSupertype() != null ) {
attribute = getSupertype().getPluralAttribute( name );
}
checkCollectionElementType( attribute, name, elementType );
return ( CollectionAttribute<? super X, E> ) attribute;
}
/**
* {@inheritDoc}
*/
public <E> CollectionAttribute<X, E> getDeclaredCollection(String name, Class<E> elementType) {
final PluralAttribute<X,?,?> attribute = declaredPluralAttributes.get( name );
checkCollectionElementType( attribute, name, elementType );
return ( CollectionAttribute<X, E> ) attribute;
}
private <E> void checkCollectionElementType(PluralAttribute<?,?,?> attribute, String name, Class<E> elementType) {
checkTypeForPluralAttributes( "CollectionAttribute", attribute, name, elementType, PluralAttribute.CollectionType.COLLECTION );
}
private <E> void checkTypeForPluralAttributes(
String attributeType,
PluralAttribute<?,?,?> attribute,
String name,
Class<E> elementType,
PluralAttribute.CollectionType collectionType) {
if ( attribute == null
|| ( elementType != null && !attribute.getBindableJavaType().equals( elementType ) )
|| attribute.getCollectionType() != collectionType ) {
throw new IllegalArgumentException(
attributeType + " named " + name
+ ( elementType != null ? " and of element type " + elementType : "" )
+ " is not present"
);
}
}
/**
* {@inheritDoc}
*/
@SuppressWarnings({ "unchecked" })
public <E> SetAttribute<? super X, E> getSet(String name, Class<E> elementType) {
PluralAttribute<? super X, ?, ?> attribute = declaredPluralAttributes.get( name );
if ( attribute == null && getSupertype() != null ) {
attribute = getSupertype().getPluralAttribute( name );
}
checkSetElementType( attribute, name, elementType );
return ( SetAttribute<? super X, E> ) attribute;
}
private <E> void checkSetElementType(PluralAttribute<? super X, ?, ?> attribute, String name, Class<E> elementType) {
checkTypeForPluralAttributes( "SetAttribute", attribute, name, elementType, PluralAttribute.CollectionType.SET );
}
/**
* {@inheritDoc}
*/
public <E> SetAttribute<X, E> getDeclaredSet(String name, Class<E> elementType) {
final PluralAttribute<X,?,?> attribute = declaredPluralAttributes.get( name );
checkSetElementType( attribute, name, elementType );
return ( SetAttribute<X, E> ) attribute;
}
/**
* {@inheritDoc}
*/
@SuppressWarnings({ "unchecked" })
public <E> ListAttribute<? super X, E> getList(String name, Class<E> elementType) {
PluralAttribute<? super X, ?, ?> attribute = declaredPluralAttributes.get( name );
if ( attribute == null && getSupertype() != null ) {
attribute = getSupertype().getPluralAttribute( name );
}
checkListElementType( attribute, name, elementType );
return ( ListAttribute<? super X, E> ) attribute;
}
private <E> void checkListElementType(PluralAttribute<? super X, ?, ?> attribute, String name, Class<E> elementType) {
checkTypeForPluralAttributes( "ListAttribute", attribute, name, elementType, PluralAttribute.CollectionType.LIST );
}
/**
* {@inheritDoc}
*/
public <E> ListAttribute<X, E> getDeclaredList(String name, Class<E> elementType) {
final PluralAttribute<X,?,?> attribute = declaredPluralAttributes.get( name );
checkListElementType( attribute, name, elementType );
return ( ListAttribute<X, E> ) attribute;
}
@SuppressWarnings({ "unchecked" })
public <K, V> MapAttribute<? super X, K, V> getMap(String name, Class<K> keyType, Class<V> valueType) {
PluralAttribute<? super X, ?, ?> attribute = getPluralAttribute( name );
if ( attribute == null && getSupertype() != null ) {
attribute = getSupertype().getPluralAttribute( name );
}
checkMapValueType( attribute, name, valueType );
final MapAttribute<? super X, K, V> mapAttribute = ( MapAttribute<? super X, K, V> ) attribute;
checkMapKeyType( mapAttribute, name, keyType );
return mapAttribute;
}
private <V> void checkMapValueType(PluralAttribute<? super X, ?, ?> attribute, String name, Class<V> valueType) {
checkTypeForPluralAttributes( "MapAttribute", attribute, name, valueType, PluralAttribute.CollectionType.MAP);
}
private <K,V> void checkMapKeyType(MapAttribute<? super X, K, V> mapAttribute, String name, Class<K> keyType) {
if ( mapAttribute.getKeyJavaType() != keyType ) {
throw new IllegalArgumentException( "MapAttribute named " + name + " does not support a key of type " + keyType );
}
}
public <K, V> MapAttribute<X, K, V> getDeclaredMap(String name, Class<K> keyType, Class<V> valueType) {
final PluralAttribute<X,?,?> attribute = declaredPluralAttributes.get( name );
checkMapValueType( attribute, name, valueType );
final MapAttribute<X, K, V> mapAttribute = ( MapAttribute<X, K, V> ) attribute;
checkMapKeyType( mapAttribute, name, keyType );
return mapAttribute;
}
}

View File

@ -1,46 +0,0 @@
/*
* Copyright (c) 2009, Red Hat Middleware LLC or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.metamodel.internal.legacy;
import java.io.Serializable;
import javax.persistence.metamodel.BasicType;
/**
* @author Emmanuel Bernard
*/
public class BasicTypeImpl<X> implements BasicType<X>, Serializable {
private final Class<X> clazz;
private PersistenceType persistenceType;
public PersistenceType getPersistenceType() {
return persistenceType;
}
public Class<X> getJavaType() {
return clazz;
}
public BasicTypeImpl(Class<X> clazz, PersistenceType persistenceType) {
this.clazz = clazz;
this.persistenceType = persistenceType;
}
}

View File

@ -1,56 +0,0 @@
/*
* Copyright (c) 2009, Red Hat Middleware LLC or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.metamodel.internal.legacy;
import java.io.Serializable;
import javax.persistence.metamodel.EmbeddableType;
import org.hibernate.type.ComponentType;
/**
* @author Emmanuel Bernard
*/
public class EmbeddableTypeImpl<X>
extends AbstractManagedType<X>
implements EmbeddableType<X>, Serializable {
private final AbstractManagedType parent;
private final ComponentType hibernateType;
public EmbeddableTypeImpl(Class<X> javaType, AbstractManagedType parent, ComponentType hibernateType) {
super( javaType, null );
this.parent = parent;
this.hibernateType = hibernateType;
}
public PersistenceType getPersistenceType() {
return PersistenceType.EMBEDDABLE;
}
public AbstractManagedType getParent() {
return parent;
}
public ComponentType getHibernateType() {
return hibernateType;
}
}

View File

@ -1,68 +0,0 @@
/*
* Copyright (c) 2009, Red Hat Middleware LLC or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.metamodel.internal.legacy;
import java.io.Serializable;
import javax.persistence.metamodel.EntityType;
/**
* Defines the Hibernate implementation of the JPA {@link javax.persistence.metamodel.EntityType} contract.
*
* @author Steve Ebersole
* @author Emmanuel Bernard
*/
public class EntityTypeImpl<X>
extends AbstractIdentifiableType<X>
implements EntityType<X>, Serializable {
private final String jpaEntityName;
public EntityTypeImpl(
Class<X> javaType,
AbstractIdentifiableType<? super X> superType,
String jpaEntityName,
boolean hasIdentifierProperty,
boolean isVersioned) {
super( javaType, superType, hasIdentifierProperty, isVersioned );
this.jpaEntityName = jpaEntityName;
}
public String getName() {
return jpaEntityName;
}
public BindableType getBindableType() {
return BindableType.ENTITY_TYPE;
}
public Class<X> getBindableJavaType() {
return getJavaType();
}
public PersistenceType getPersistenceType() {
return PersistenceType.ENTITY;
}
@Override
protected boolean requiresSupertypeForNonDeclaredIdentifier() {
return true;
}
}

View File

@ -1,24 +0,0 @@
package org.hibernate.jpa.metamodel.internal.legacy;
import javax.persistence.metamodel.MappedSuperclassType;
/**
* @author Emmanuel Bernard
*/
public class MappedSuperclassTypeImpl<X> extends AbstractIdentifiableType<X> implements MappedSuperclassType<X> {
public MappedSuperclassTypeImpl(
Class<X> javaType,
AbstractIdentifiableType<? super X> superType,
boolean hasIdentifierProperty,
boolean versioned) {
super( javaType, superType, hasIdentifierProperty, versioned );
}
public PersistenceType getPersistenceType() {
return PersistenceType.MAPPED_SUPERCLASS;
}
@Override
protected boolean requiresSupertypeForNonDeclaredIdentifier() {
return false;
}
}

View File

@ -1,510 +0,0 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2012, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.metamodel.internal.legacy;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.persistence.metamodel.Attribute;
import javax.persistence.metamodel.IdentifiableType;
import javax.persistence.metamodel.MappedSuperclassType;
import javax.persistence.metamodel.SingularAttribute;
import org.jboss.logging.Logger;
import org.hibernate.annotations.common.AssertionFailure;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.mapping.Component;
import org.hibernate.mapping.KeyValue;
import org.hibernate.mapping.MappedSuperclass;
import org.hibernate.mapping.PersistentClass;
import org.hibernate.mapping.Property;
/**
* Defines a context for storing information during the building of the {@link MetamodelImpl}.
* <p/>
* This contextual information includes data needing to be processed in a second pass as well as
* cross-references into the built metamodel classes.
* <p/>
* At the end of the day, clients are interested in the {@link #getEntityTypeMap} and {@link #getEmbeddableTypeMap}
* results, which represent all the registered {@linkplain #registerEntityType entities} and
* {@linkplain #registerEmbeddedableType embeddables} respectively.
*
* @author Steve Ebersole
* @author Emmanuel Bernard
*/
class MetadataContext {
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class,
MetadataContext.class.getName());
private final SessionFactoryImplementor sessionFactory;
private final boolean ignoreUnsupported;
private final AttributeFactory attributeFactory = new AttributeFactory( this );
private Map<Class<?>,EntityTypeImpl<?>> entityTypes
= new HashMap<Class<?>, EntityTypeImpl<?>>();
private Map<String,EntityTypeImpl<?>> entityTypesByEntityName
= new HashMap<String, EntityTypeImpl<?>>();
private Map<PersistentClass,EntityTypeImpl<?>> entityTypesByPersistentClass
= new HashMap<PersistentClass,EntityTypeImpl<?>>();
private Map<Class<?>, EmbeddableTypeImpl<?>> embeddables
= new HashMap<Class<?>, EmbeddableTypeImpl<?>>();
private Map<MappedSuperclass, MappedSuperclassTypeImpl<?>> mappedSuperclassByMappedSuperclassMapping
= new HashMap<MappedSuperclass,MappedSuperclassTypeImpl<?>>();
//this list contains MappedSuperclass and EntityTypes ordered by superclass first
private List<Object> orderedMappings = new ArrayList<Object>();
/**
* Stack of PersistentClass being process. Last in the list is the highest in the stack.
*
*/
private List<PersistentClass> stackOfPersistentClassesBeingProcessed
= new ArrayList<PersistentClass>();
private Map<MappedSuperclassTypeImpl<?>, PersistentClass> mappedSuperClassTypeToPersistentClass
= new HashMap<MappedSuperclassTypeImpl<?>, PersistentClass>();
public MetadataContext(SessionFactoryImplementor sessionFactory, boolean ignoreUnsupported) {
this.sessionFactory = sessionFactory;
this.ignoreUnsupported = ignoreUnsupported;
}
/*package*/ SessionFactoryImplementor getSessionFactory() {
return sessionFactory;
}
/*package*/ boolean isIgnoreUnsupported() {
return ignoreUnsupported;
}
/**
* Retrieves the {@linkplain Class java type} to {@link EntityTypeImpl} map.
*
* @return The {@linkplain Class java type} to {@link EntityTypeImpl} map.
*/
public Map<Class<?>, EntityTypeImpl<?>> getEntityTypeMap() {
return Collections.unmodifiableMap( entityTypes );
}
public Map<Class<?>, EmbeddableTypeImpl<?>> getEmbeddableTypeMap() {
return Collections.unmodifiableMap( embeddables );
}
public Map<Class<?>,MappedSuperclassType<?>> getMappedSuperclassTypeMap() {
// we need to actually build this map...
final Map<Class<?>,MappedSuperclassType<?>> mappedSuperClassTypeMap = CollectionHelper.mapOfSize(
mappedSuperclassByMappedSuperclassMapping.size()
);
for ( MappedSuperclassTypeImpl mappedSuperclassType : mappedSuperclassByMappedSuperclassMapping.values() ) {
mappedSuperClassTypeMap.put(
mappedSuperclassType.getJavaType(),
mappedSuperclassType
);
}
return mappedSuperClassTypeMap;
}
/*package*/ void registerEntityType(PersistentClass persistentClass, EntityTypeImpl<?> entityType) {
entityTypes.put( entityType.getBindableJavaType(), entityType );
entityTypesByEntityName.put( persistentClass.getEntityName(), entityType );
entityTypesByPersistentClass.put( persistentClass, entityType );
orderedMappings.add( persistentClass );
}
/*package*/ void registerEmbeddedableType(EmbeddableTypeImpl<?> embeddableType) {
embeddables.put( embeddableType.getJavaType(), embeddableType );
}
/*package*/ void registerMappedSuperclassType(MappedSuperclass mappedSuperclass,
MappedSuperclassTypeImpl<?> mappedSuperclassType) {
mappedSuperclassByMappedSuperclassMapping.put( mappedSuperclass, mappedSuperclassType );
orderedMappings.add( mappedSuperclass );
mappedSuperClassTypeToPersistentClass.put( mappedSuperclassType, getEntityWorkedOn() );
}
/**
* Given a Hibernate {@link org.hibernate.mapping.PersistentClass}, locate the corresponding JPA {@link org.hibernate.type.EntityType}
* implementation. May retur null if the given {@link org.hibernate.mapping.PersistentClass} has not yet been processed.
*
* @param persistentClass The Hibernate (config time) metamodel instance representing an entity.
* @return Tne corresponding JPA {@link org.hibernate.type.EntityType}, or null if not yet processed.
*/
public EntityTypeImpl<?> locateEntityType(PersistentClass persistentClass) {
return entityTypesByPersistentClass.get( persistentClass );
}
/**
* Given a Java {@link Class}, locate the corresponding JPA {@link org.hibernate.type.EntityType}. May
* return null which could means that no such mapping exists at least at this time.
*
* @param javaType The java class.
* @return The corresponding JPA {@link org.hibernate.type.EntityType}, or null.
*/
public EntityTypeImpl<?> locateEntityType(Class<?> javaType) {
return entityTypes.get( javaType );
}
/**
* Given an entity-name, locate the corresponding JPA {@link org.hibernate.type.EntityType}. May
* return null which could means that no such mapping exists at least at this time.
*
* @param entityName The entity-name.
* @return The corresponding JPA {@link org.hibernate.type.EntityType}, or null.
*/
public EntityTypeImpl<?> locateEntityType(String entityName) {
return entityTypesByEntityName.get( entityName );
}
@SuppressWarnings({ "unchecked" })
public void wrapUp() {
LOG.trace("Wrapping up metadata context...");
//we need to process types from superclasses to subclasses
for (Object mapping : orderedMappings) {
if ( PersistentClass.class.isAssignableFrom( mapping.getClass() ) ) {
@SuppressWarnings( "unchecked" )
final PersistentClass safeMapping = (PersistentClass) mapping;
LOG.trace("Starting entity [" + safeMapping.getEntityName() + "]");
try {
final EntityTypeImpl<?> jpa2Mapping = entityTypesByPersistentClass.get( safeMapping );
if ( ! safeMapping.getEntityName().equals( jpa2Mapping.getJavaType().getName() ) ) {
// skip it
continue;
}
if ( sessionFactory.getEntityPersister( safeMapping.getEntityName() ).getEntityMetamodel() == null ) {
// skip it
continue;
}
applyIdMetadata( safeMapping, jpa2Mapping );
applyVersionAttribute( safeMapping, jpa2Mapping );
Iterator<Property> properties = safeMapping.getDeclaredPropertyIterator();
while ( properties.hasNext() ) {
final Property property = properties.next();
if ( property.getValue() == safeMapping.getIdentifierMapper() ) {
// property represents special handling for id-class mappings but we have already
// accounted for the embedded property mappings in #applyIdMetadata &&
// #buildIdClassAttributes
continue;
}
if ( safeMapping.isVersioned() && property == safeMapping.getVersion() ) {
// skip the version property, it was already handled previously.
continue;
}
final Attribute attribute = attributeFactory.buildAttribute( jpa2Mapping, property );
if ( attribute != null ) {
jpa2Mapping.getBuilder().addAttribute( attribute );
}
}
jpa2Mapping.lock();
populateStaticMetamodel( jpa2Mapping );
}
finally {
LOG.trace("Completed entity [" + safeMapping.getEntityName() + "]");
}
}
else if ( MappedSuperclass.class.isAssignableFrom( mapping.getClass() ) ) {
@SuppressWarnings( "unchecked" )
final MappedSuperclass safeMapping = (MappedSuperclass) mapping;
LOG.trace("Starting mapped superclass [" + safeMapping.getMappedClass().getName() + "]");
try {
final MappedSuperclassTypeImpl<?> jpa2Mapping = mappedSuperclassByMappedSuperclassMapping.get(
safeMapping
);
applyIdMetadata( safeMapping, jpa2Mapping );
applyVersionAttribute( safeMapping, jpa2Mapping );
Iterator<Property> properties = safeMapping.getDeclaredPropertyIterator();
while ( properties.hasNext() ) {
final Property property = properties.next();
if ( safeMapping.isVersioned() && property == safeMapping.getVersion() ) {
// skip the version property, it was already handled previously.
continue;
}
final Attribute attribute = attributeFactory.buildAttribute( jpa2Mapping, property );
if ( attribute != null ) {
jpa2Mapping.getBuilder().addAttribute( attribute );
}
}
jpa2Mapping.lock();
populateStaticMetamodel( jpa2Mapping );
}
finally {
LOG.trace("Completed mapped superclass [" + safeMapping.getMappedClass().getName() + "]");
}
}
else {
throw new AssertionFailure( "Unexpected mapping type: " + mapping.getClass() );
}
}
for ( EmbeddableTypeImpl embeddable : embeddables.values() ) {
populateStaticMetamodel( embeddable );
}
}
private <X> void applyIdMetadata(PersistentClass persistentClass, EntityTypeImpl<X> jpaEntityType) {
if ( persistentClass.hasIdentifierProperty() ) {
final Property declaredIdentifierProperty = persistentClass.getDeclaredIdentifierProperty();
if (declaredIdentifierProperty != null) {
jpaEntityType.getBuilder().applyIdAttribute(
attributeFactory.buildIdAttribute( jpaEntityType, declaredIdentifierProperty )
);
}
}
else if ( persistentClass.hasIdentifierMapper() ) {
@SuppressWarnings( "unchecked")
Iterator<Property> propertyIterator = persistentClass.getIdentifierMapper().getPropertyIterator();
Set<SingularAttribute<? super X, ?>> attributes = buildIdClassAttributes( jpaEntityType, propertyIterator );
jpaEntityType.getBuilder().applyIdClassAttributes( attributes );
}
else {
final KeyValue value = persistentClass.getIdentifier();
if (value instanceof Component ) {
final Component component = ( Component ) value;
if ( component.getPropertySpan() > 1 ) {
//FIXME we are an Hibernate embedded id (ie not type)
}
else {
//FIXME take care of declared vs non declared property
jpaEntityType.getBuilder().applyIdAttribute(
attributeFactory.buildIdAttribute(
jpaEntityType,
(Property) component.getPropertyIterator().next() )
);
}
}
}
}
private <X> void applyIdMetadata(MappedSuperclass mappingType, MappedSuperclassTypeImpl<X> jpaMappingType) {
if ( mappingType.hasIdentifierProperty() ) {
final Property declaredIdentifierProperty = mappingType.getDeclaredIdentifierProperty();
if (declaredIdentifierProperty != null) {
jpaMappingType.getBuilder().applyIdAttribute(
attributeFactory.buildIdAttribute( jpaMappingType, declaredIdentifierProperty )
);
}
}
//an MappedSuperclass can have no identifier if the id is set below in the hierarchy
else if ( mappingType.getIdentifierMapper() != null ){
@SuppressWarnings( "unchecked")
Iterator<Property> propertyIterator = mappingType.getIdentifierMapper().getPropertyIterator();
Set<SingularAttribute<? super X, ?>> attributes = buildIdClassAttributes( jpaMappingType, propertyIterator );
jpaMappingType.getBuilder().applyIdClassAttributes( attributes );
}
}
private <X> void applyVersionAttribute(PersistentClass persistentClass, EntityTypeImpl<X> jpaEntityType) {
final Property declaredVersion = persistentClass.getDeclaredVersion();
if (declaredVersion != null) {
jpaEntityType.getBuilder().applyVersionAttribute(
attributeFactory.buildVersionAttribute( jpaEntityType, declaredVersion )
);
}
}
private <X> void applyVersionAttribute(MappedSuperclass mappingType, MappedSuperclassTypeImpl<X> jpaMappingType) {
final Property declaredVersion = mappingType.getDeclaredVersion();
if ( declaredVersion != null ) {
jpaMappingType.getBuilder().applyVersionAttribute(
attributeFactory.buildVersionAttribute( jpaMappingType, declaredVersion )
);
}
}
private <X> Set<SingularAttribute<? super X, ?>> buildIdClassAttributes(
AbstractIdentifiableType<X> ownerType,
Iterator<Property> propertyIterator) {
LOG.trace("Building old-school composite identifier [" + ownerType.getJavaType().getName() + "]");
Set<SingularAttribute<? super X, ?>> attributes = new HashSet<SingularAttribute<? super X, ?>>();
while ( propertyIterator.hasNext() ) {
attributes.add( attributeFactory.buildIdAttribute( ownerType, propertyIterator.next() ) );
}
return attributes;
}
private <X> void populateStaticMetamodel(AbstractManagedType<X> managedType) {
final Class<X> managedTypeClass = managedType.getJavaType();
final String metamodelClassName = managedTypeClass.getName() + "_";
try {
final Class metamodelClass = Class.forName( metamodelClassName, true, managedTypeClass.getClassLoader() );
// we found the class; so populate it...
registerAttributes( metamodelClass, managedType );
}
catch ( ClassNotFoundException ignore ) {
// nothing to do...
}
// todo : this does not account for @MappeSuperclass, mainly because this is not being tracked in our
// internal metamodel as populated from the annotatios properly
AbstractManagedType<? super X> superType = managedType.getSupertype();
if ( superType != null ) {
populateStaticMetamodel( superType );
}
}
private final Set<Class> processedMetamodelClasses = new HashSet<Class>();
private <X> void registerAttributes(Class metamodelClass, AbstractManagedType<X> managedType) {
if ( ! processedMetamodelClasses.add( metamodelClass ) ) {
return;
}
// push the attributes on to the metamodel class...
for ( Attribute<X, ?> attribute : managedType.getDeclaredAttributes() ) {
registerAttribute( metamodelClass, attribute );
}
if ( IdentifiableType.class.isInstance( managedType ) ) {
final AbstractIdentifiableType<X> entityType = ( AbstractIdentifiableType<X> ) managedType;
// handle version
if ( entityType.hasDeclaredVersionAttribute() ) {
registerAttribute( metamodelClass, entityType.getDeclaredVersion() );
}
// handle id-class mappings specially
if ( ! entityType.hasSingleIdAttribute() ) {
final Set<SingularAttribute<? super X, ?>> attributes = entityType.getIdClassAttributes();
if ( attributes != null ) {
for ( SingularAttribute<? super X, ?> attribute : attributes ) {
registerAttribute( metamodelClass, attribute );
}
}
}
}
}
private <X> void registerAttribute(Class metamodelClass, Attribute<X, ?> attribute) {
final String name = attribute.getName();
try {
// there is a shortcoming in the existing Hibernate code in terms of the way MappedSuperclass
// support was bolted on which comes to bear right here when the attribute is an embeddable type
// defined on a MappedSuperclass. We do not have the correct information to determine the
// appropriate attribute declarer in such cases and so the incoming metamodelClass most likely
// does not represent the declarer in such cases.
//
// As a result, in the case of embeddable classes we simply use getField rather than get
// getDeclaredField
final Field field = attribute.getPersistentAttributeType() == Attribute.PersistentAttributeType.EMBEDDED
? metamodelClass.getField( name )
: metamodelClass.getDeclaredField( name );
try {
if ( ! field.isAccessible() ) {
// should be public anyway, but to be sure...
field.setAccessible( true );
}
field.set( null, attribute );
}
catch ( IllegalAccessException e ) {
// todo : exception type?
throw new AssertionFailure(
"Unable to inject static metamodel attribute : " + metamodelClass.getName() + '#' + name,
e
);
}
catch ( IllegalArgumentException e ) {
// most likely a mismatch in the type we are injecting and the defined field; this represents a
// mismatch in how the annotation processor interpretted the attribute and how our metamodel
// and/or annotation binder did.
// This is particularly the case as arrays are nto handled propery by the StaticMetamodel generator
// throw new AssertionFailure(
// "Illegal argument on static metamodel field injection : " + metamodelClass.getName() + '#' + name
// + "; expected type : " + attribute.getClass().getName()
// + "; encountered type : " + field.getType().getName()
// );
LOG.illegalArgumentOnStaticMetamodelFieldInjection(metamodelClass.getName(),
name,
attribute.getClass().getName(),
field.getType().getName());
}
}
catch ( NoSuchFieldException e ) {
LOG.unableToLocateStaticMetamodelField(metamodelClass.getName(), name);
// throw new AssertionFailure(
// "Unable to locate static metamodel field : " + metamodelClass.getName() + '#' + name
// );
}
}
public MappedSuperclassTypeImpl<?> locateMappedSuperclassType(MappedSuperclass mappedSuperclass) {
return mappedSuperclassByMappedSuperclassMapping.get(mappedSuperclass);
}
public void pushEntityWorkedOn(PersistentClass persistentClass) {
stackOfPersistentClassesBeingProcessed.add(persistentClass);
}
public void popEntityWorkedOn(PersistentClass persistentClass) {
final PersistentClass stackTop = stackOfPersistentClassesBeingProcessed.remove(
stackOfPersistentClassesBeingProcessed.size() - 1
);
if (stackTop != persistentClass) {
throw new AssertionFailure( "Inconsistent popping: "
+ persistentClass.getEntityName() + " instead of " + stackTop.getEntityName() );
}
}
private PersistentClass getEntityWorkedOn() {
return stackOfPersistentClassesBeingProcessed.get(
stackOfPersistentClassesBeingProcessed.size() - 1
);
}
public PersistentClass getPersistentClassHostingProperties(MappedSuperclassTypeImpl<?> mappedSuperclassType) {
final PersistentClass persistentClass = mappedSuperClassTypeToPersistentClass.get( mappedSuperclassType );
if (persistentClass == null) {
throw new AssertionFailure( "Could not find PersistentClass for MappedSuperclassType: "
+ mappedSuperclassType.getJavaType() );
}
return persistentClass;
}
public void handleAnyMapping() {
// ANY mappings are currently not supported in the JPA metamodel; see HHH-6589
if ( isIgnoreUnsupported() ) {
}
else {
throw new UnsupportedOperationException( "ANY not supported" );
}
}
public void handleArrayMapping() {
if ( isIgnoreUnsupported() ) {
}
else {
throw new UnsupportedOperationException( "Arrays not supported" );
}
}
}

View File

@ -1,238 +0,0 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2012, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.metamodel.internal.legacy;
import java.io.Serializable;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import javax.persistence.metamodel.EmbeddableType;
import javax.persistence.metamodel.EntityType;
import javax.persistence.metamodel.ManagedType;
import javax.persistence.metamodel.MappedSuperclassType;
import javax.persistence.metamodel.Metamodel;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.mapping.MappedSuperclass;
import org.hibernate.mapping.PersistentClass;
/**
* Hibernate implementation of the JPA {@link javax.persistence.metamodel.Metamodel} contract.
*
* @author Steve Ebersole
* @author Emmanuel Bernard
*/
public class MetamodelImpl implements Metamodel, Serializable {
private final Map<Class<?>,EntityTypeImpl<?>> entities;
private final Map<Class<?>, EmbeddableTypeImpl<?>> embeddables;
private final Map<Class<?>, MappedSuperclassType<?>> mappedSuperclassTypeMap;
/**
* Build the metamodel using the information from the collection of Hibernate
* {@link org.hibernate.mapping.PersistentClass} models as well as the Hibernate {@link org.hibernate.SessionFactory}.
*
* @param persistentClasses Iterator over the Hibernate (config-time) metamodel
* @param sessionFactory The Hibernate session factory.
* @return The built metamodel
*
* @deprecated use {@link #buildMetamodel(java.util.Iterator,org.hibernate.engine.spi.SessionFactoryImplementor,boolean)} instead
*/
@Deprecated
public static MetamodelImpl buildMetamodel(
Iterator<PersistentClass> persistentClasses,
SessionFactoryImplementor sessionFactory) {
return buildMetamodel(persistentClasses, sessionFactory, false);
}
/**
* Build the metamodel using the information from the collection of Hibernate
* {@link org.hibernate.mapping.PersistentClass} models as well as the Hibernate {@link org.hibernate.SessionFactory}.
*
* @param persistentClasses Iterator over the Hibernate (config-time) metamodel
* @param sessionFactory The Hibernate session factory.
* @param ignoreUnsupported ignore unsupported/unknown annotations (like @Any)
* @return The built metamodel
*/
public static MetamodelImpl buildMetamodel(
Iterator<PersistentClass> persistentClasses,
SessionFactoryImplementor sessionFactory,
boolean ignoreUnsupported) {
MetadataContext context = new MetadataContext( sessionFactory, ignoreUnsupported );
while ( persistentClasses.hasNext() ) {
PersistentClass pc = persistentClasses.next();
if ( pc.getMappedClass() != null ) {
locateOrBuildEntityType( pc, context );
}
}
context.wrapUp();
return new MetamodelImpl( context.getEntityTypeMap(), context.getEmbeddableTypeMap(), context.getMappedSuperclassTypeMap() );
}
private static EntityTypeImpl<?> locateOrBuildEntityType(PersistentClass persistentClass, MetadataContext context) {
EntityTypeImpl<?> entityType = context.locateEntityType( persistentClass );
if ( entityType == null ) {
entityType = buildEntityType( persistentClass, context );
}
return entityType;
}
//TODO remove / reduce @SW scope
@SuppressWarnings( "unchecked" )
private static EntityTypeImpl<?> buildEntityType(PersistentClass persistentClass, MetadataContext context) {
final Class javaType = persistentClass.getMappedClass();
context.pushEntityWorkedOn(persistentClass);
final MappedSuperclass superMappedSuperclass = persistentClass.getSuperMappedSuperclass();
AbstractIdentifiableType<?> superType = superMappedSuperclass == null
? null
: locateOrBuildMappedsuperclassType( superMappedSuperclass, context );
//no mappedSuperclass, check for a super entity
if (superType == null) {
final PersistentClass superPersistentClass = persistentClass.getSuperclass();
superType = superPersistentClass == null
? null
: locateOrBuildEntityType( superPersistentClass, context );
}
EntityTypeImpl entityType = new EntityTypeImpl(
javaType,
superType,
persistentClass.getJpaEntityName(),
persistentClass.hasIdentifierProperty(),
persistentClass.isVersioned()
);
context.registerEntityType( persistentClass, entityType );
context.popEntityWorkedOn(persistentClass);
return entityType;
}
private static MappedSuperclassTypeImpl<?> locateOrBuildMappedsuperclassType(
MappedSuperclass mappedSuperclass, MetadataContext context) {
MappedSuperclassTypeImpl<?> mappedSuperclassType = context.locateMappedSuperclassType( mappedSuperclass );
if ( mappedSuperclassType == null ) {
mappedSuperclassType = buildMappedSuperclassType(mappedSuperclass, context);
}
return mappedSuperclassType;
}
//TODO remove / reduce @SW scope
@SuppressWarnings( "unchecked" )
private static MappedSuperclassTypeImpl<?> buildMappedSuperclassType(MappedSuperclass mappedSuperclass,
MetadataContext context) {
final MappedSuperclass superMappedSuperclass = mappedSuperclass.getSuperMappedSuperclass();
AbstractIdentifiableType<?> superType = superMappedSuperclass == null
? null
: locateOrBuildMappedsuperclassType( superMappedSuperclass, context );
//no mappedSuperclass, check for a super entity
if (superType == null) {
final PersistentClass superPersistentClass = mappedSuperclass.getSuperPersistentClass();
superType = superPersistentClass == null
? null
: locateOrBuildEntityType( superPersistentClass, context );
}
final Class javaType = mappedSuperclass.getMappedClass();
MappedSuperclassTypeImpl mappedSuperclassType = new MappedSuperclassTypeImpl(
javaType,
superType,
mappedSuperclass.hasIdentifierProperty(),
mappedSuperclass.isVersioned()
);
context.registerMappedSuperclassType( mappedSuperclass, mappedSuperclassType );
return mappedSuperclassType;
}
/**
* Instantiate the metamodel.
*
* @param entities The entity mappings.
* @param embeddables The embeddable (component) mappings.
* @param mappedSuperclassTypeMap The {@link javax.persistence.MappedSuperclass} mappings
*/
private MetamodelImpl(
Map<Class<?>, EntityTypeImpl<?>> entities,
Map<Class<?>, EmbeddableTypeImpl<?>> embeddables,
Map<Class<?>, MappedSuperclassType<?>> mappedSuperclassTypeMap) {
this.entities = entities;
this.embeddables = embeddables;
this.mappedSuperclassTypeMap = mappedSuperclassTypeMap;
}
@Override
@SuppressWarnings({ "unchecked" })
public <X> EntityType<X> entity(Class<X> cls) {
final EntityType<?> entityType = entities.get( cls );
if ( entityType == null ) {
throw new IllegalArgumentException( "Not an entity: " + cls );
}
return (EntityType<X>) entityType;
}
@Override
@SuppressWarnings({ "unchecked" })
public <X> ManagedType<X> managedType(Class<X> cls) {
ManagedType<?> type = entities.get( cls );
if ( type == null ) {
type = mappedSuperclassTypeMap.get( cls );
}
if ( type == null ) {
type = embeddables.get( cls );
}
if ( type == null ) {
throw new IllegalArgumentException( "Not an managed type: " + cls );
}
return (ManagedType<X>) type;
}
@Override
@SuppressWarnings({ "unchecked" })
public <X> EmbeddableType<X> embeddable(Class<X> cls) {
final EmbeddableType<?> embeddableType = embeddables.get( cls );
if ( embeddableType == null ) {
throw new IllegalArgumentException( "Not an embeddable: " + cls );
}
return (EmbeddableType<X>) embeddableType;
}
@Override
public Set<ManagedType<?>> getManagedTypes() {
final int setSize = CollectionHelper.determineProperSizing(
entities.size() + mappedSuperclassTypeMap.size() + embeddables.size()
);
final Set<ManagedType<?>> managedTypes = new HashSet<ManagedType<?>>( setSize );
managedTypes.addAll( entities.values() );
managedTypes.addAll( mappedSuperclassTypeMap.values() );
managedTypes.addAll( embeddables.values() );
return managedTypes;
}
@Override
public Set<EntityType<?>> getEntities() {
return new HashSet<EntityType<?>>( entities.values() );
}
@Override
public Set<EmbeddableType<?>> getEmbeddables() {
return new HashSet<EmbeddableType<?>>( embeddables.values() );
}
}

View File

@ -1,260 +0,0 @@
/*
* Copyright (c) 2009, Red Hat Middleware LLC or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.metamodel.internal.legacy;
import java.io.Serializable;
import java.lang.reflect.Member;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.persistence.metamodel.CollectionAttribute;
import javax.persistence.metamodel.ListAttribute;
import javax.persistence.metamodel.MapAttribute;
import javax.persistence.metamodel.PluralAttribute;
import javax.persistence.metamodel.SetAttribute;
import javax.persistence.metamodel.Type;
import org.hibernate.mapping.Property;
/**
* @author Emmanuel Bernard
* @author Steve Ebersole
*/
public abstract class PluralAttributeImpl<X, C, E>
extends AbstractAttribute<X,C>
implements PluralAttribute<X, C, E>, Serializable {
private final Type<E> elementType;
private PluralAttributeImpl(Builder<X,C,E,?> builder) {
super(
builder.property.getName(),
builder.collectionClass,
builder.type,
builder.member,
builder.persistentAttributeType
);
this.elementType = builder.attributeType;
}
public static class Builder<X, C, E, K> {
private final Type<E> attributeType;
private final AbstractManagedType<X> type;
private Member member;
private PersistentAttributeType persistentAttributeType;
private Property property;
private Class<C> collectionClass;
private Type<K> keyType;
private Builder(AbstractManagedType<X> ownerType, Type<E> attrType, Class<C> collectionClass, Type<K> keyType) {
this.type = ownerType;
this.attributeType = attrType;
this.collectionClass = collectionClass;
this.keyType = keyType;
}
public Builder<X,C,E,K> member(Member member) {
this.member = member;
return this;
}
public Builder<X,C,E,K> property(Property property) {
this.property = property;
return this;
}
public Builder<X,C,E,K> persistentAttributeType(PersistentAttributeType attrType) {
this.persistentAttributeType = attrType;
return this;
}
@SuppressWarnings( "unchecked" )
public <K> PluralAttributeImpl<X,C,E> build() {
//apply strict spec rules first
if ( Map.class.equals( collectionClass ) ) {
final Builder<X,Map<K,E>,E,K> builder = (Builder<X,Map<K,E>,E,K>) this;
return ( PluralAttributeImpl<X, C, E> ) new MapAttributeImpl<X,K,E>(
builder
);
}
else if ( Set.class.equals( collectionClass ) ) {
final Builder<X,Set<E>, E,?> builder = (Builder<X, Set<E>, E,?>) this;
return ( PluralAttributeImpl<X, C, E> ) new SetAttributeImpl<X,E>(
builder
);
}
else if ( List.class.equals( collectionClass ) ) {
final Builder<X, List<E>, E,?> builder = (Builder<X, List<E>, E,?>) this;
return ( PluralAttributeImpl<X, C, E> ) new ListAttributeImpl<X,E>(
builder
);
}
else if ( Collection.class.equals( collectionClass ) ) {
final Builder<X, Collection<E>,E,?> builder = (Builder<X, Collection<E>, E,?>) this;
return ( PluralAttributeImpl<X, C, E> ) new CollectionAttributeImpl<X, E>(
builder
);
}
//apply loose rules
if ( Map.class.isAssignableFrom( collectionClass ) ) {
final Builder<X,Map<K,E>,E,K> builder = (Builder<X,Map<K,E>,E,K>) this;
return ( PluralAttributeImpl<X, C, E> ) new MapAttributeImpl<X,K,E>(
builder
);
}
else if ( Set.class.isAssignableFrom( collectionClass ) ) {
final Builder<X,Set<E>, E,?> builder = (Builder<X, Set<E>, E,?>) this;
return ( PluralAttributeImpl<X, C, E> ) new SetAttributeImpl<X,E>(
builder
);
}
else if ( List.class.isAssignableFrom( collectionClass ) ) {
final Builder<X, List<E>, E,?> builder = (Builder<X, List<E>, E,?>) this;
return ( PluralAttributeImpl<X, C, E> ) new ListAttributeImpl<X,E>(
builder
);
}
else if ( Collection.class.isAssignableFrom( collectionClass ) ) {
final Builder<X, Collection<E>,E,?> builder = (Builder<X, Collection<E>, E,?>) this;
return ( PluralAttributeImpl<X, C, E> ) new CollectionAttributeImpl<X, E>(
builder
);
}
throw new UnsupportedOperationException( "Unkown collection: " + collectionClass );
}
}
public static <X,C,E,K> Builder<X,C,E,K> create(
AbstractManagedType<X> ownerType,
Type<E> attrType,
Class<C> collectionClass,
Type<K> keyType) {
return new Builder<X,C,E,K>(ownerType, attrType, collectionClass, keyType);
}
/**
* {@inheritDoc}
*/
public Type<E> getElementType() {
return elementType;
}
/**
* {@inheritDoc}
*/
public boolean isAssociation() {
return true;
}
/**
* {@inheritDoc}
*/
public boolean isCollection() {
return true;
}
/**
* {@inheritDoc}
*/
public BindableType getBindableType() {
return BindableType.PLURAL_ATTRIBUTE;
}
/**
* {@inheritDoc}
*/
public Class<E> getBindableJavaType() {
return elementType.getJavaType();
}
static class SetAttributeImpl<X,E> extends PluralAttributeImpl<X,Set<E>,E> implements SetAttribute<X,E> {
SetAttributeImpl(Builder<X,Set<E>,E,?> xceBuilder) {
super( xceBuilder );
}
/**
* {@inheritDoc}
*/
public CollectionType getCollectionType() {
return CollectionType.SET;
}
}
static class CollectionAttributeImpl<X,E> extends PluralAttributeImpl<X,Collection<E>,E> implements CollectionAttribute<X,E> {
CollectionAttributeImpl(Builder<X, Collection<E>,E,?> xceBuilder) {
super( xceBuilder );
}
/**
* {@inheritDoc}
*/
public CollectionType getCollectionType() {
return CollectionType.COLLECTION;
}
}
static class ListAttributeImpl<X,E> extends PluralAttributeImpl<X,List<E>,E> implements ListAttribute<X,E> {
ListAttributeImpl(Builder<X,List<E>,E,?> xceBuilder) {
super( xceBuilder );
}
/**
* {@inheritDoc}
*/
public CollectionType getCollectionType() {
return CollectionType.LIST;
}
}
static class MapAttributeImpl<X,K,V> extends PluralAttributeImpl<X,Map<K,V>,V> implements MapAttribute<X,K,V> {
private final Type<K> keyType;
MapAttributeImpl(Builder<X,Map<K,V>,V,K> xceBuilder) {
super( xceBuilder );
this.keyType = xceBuilder.keyType;
}
/**
* {@inheritDoc}
*/
public CollectionType getCollectionType() {
return CollectionType.MAP;
}
/**
* {@inheritDoc}
*/
public Class<K> getKeyJavaType() {
return keyType.getJavaType();
}
/**
* {@inheritDoc}
*/
public Type<K> getKeyType() {
return keyType;
}
}
}

View File

@ -1,145 +0,0 @@
/*
* Copyright (c) 2009, Red Hat Middleware LLC or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.metamodel.internal.legacy;
import java.io.Serializable;
import java.lang.reflect.Member;
import javax.persistence.metamodel.SingularAttribute;
import javax.persistence.metamodel.Type;
/**
* @author Emmanuel Bernard
* @author Steve Ebersole
*/
public class SingularAttributeImpl<X, Y>
extends AbstractAttribute<X,Y>
implements SingularAttribute<X, Y>, Serializable {
private final boolean isIdentifier;
private final boolean isVersion;
private final boolean isOptional;
private final Type<Y> attributeType;
public SingularAttributeImpl(
String name,
Class<Y> javaType,
AbstractManagedType<X> declaringType,
Member member,
boolean isIdentifier,
boolean isVersion,
boolean isOptional,
Type<Y> attributeType,
PersistentAttributeType persistentAttributeType) {
super( name, javaType, declaringType, member, persistentAttributeType );
this.isIdentifier = isIdentifier;
this.isVersion = isVersion;
this.isOptional = isOptional;
this.attributeType = attributeType;
}
/**
* Subclass used to simply instantiation of singular attributes representing an entity's
* identifier.
*/
public static class Identifier<X,Y> extends SingularAttributeImpl<X,Y> {
public Identifier(
String name,
Class<Y> javaType,
AbstractManagedType<X> declaringType,
Member member,
Type<Y> attributeType,
PersistentAttributeType persistentAttributeType) {
super( name, javaType, declaringType, member, true, false, false, attributeType, persistentAttributeType );
}
}
/**
* Subclass used to simply instantiation of singular attributes representing an entity's
* version.
*/
public static class Version<X,Y> extends SingularAttributeImpl<X,Y> {
public Version(
String name,
Class<Y> javaType,
AbstractManagedType<X> declaringType,
Member member,
Type<Y> attributeType,
PersistentAttributeType persistentAttributeType) {
super( name, javaType, declaringType, member, false, true, false, attributeType, persistentAttributeType );
}
}
/**
* {@inheritDoc}
*/
public boolean isId() {
return isIdentifier;
}
/**
* {@inheritDoc}
*/
public boolean isVersion() {
return isVersion;
}
/**
* {@inheritDoc}
*/
public boolean isOptional() {
return isOptional;
}
/**
* {@inheritDoc}
*/
public Type<Y> getType() {
return attributeType;
}
/**
* {@inheritDoc}
*/
public boolean isAssociation() {
return false;
}
/**
* {@inheritDoc}
*/
public boolean isCollection() {
return false;
}
/**
* {@inheritDoc}
*/
public BindableType getBindableType() {
return BindableType.SINGULAR_ATTRIBUTE;
}
/**
* {@inheritDoc}
*/
public Class<Y> getBindableJavaType() {
return attributeType.getJavaType();
}
}

View File

@ -1 +0,0 @@
package org.hibernate.jpa.metamodel;

View File

@ -33,6 +33,7 @@
import org.hibernate.MultiTenancyStrategy;
import org.hibernate.SessionFactory;
import org.hibernate.boot.registry.StandardServiceRegistry;
import org.hibernate.boot.spi.CacheRegionDefinition;
import org.hibernate.cache.spi.access.AccessType;
import org.hibernate.cfg.NamingStrategy;
import org.hibernate.cfg.annotations.NamedEntityGraphDefinition;
@ -46,6 +47,7 @@
import org.hibernate.metamodel.spi.binding.PluralAttributeBinding;
import org.hibernate.metamodel.spi.binding.SecondaryTable;
import org.hibernate.metamodel.spi.binding.TypeDefinition;
import org.hibernate.metamodel.spi.relational.Database;
import org.hibernate.metamodel.spi.relational.Identifier;
import org.hibernate.type.BasicType;
@ -56,7 +58,7 @@ public interface Metadata {
/**
* Exposes the options used to produce a {@link Metadata} instance.
*/
public static interface Options {
public static interface Options extends Database.Defaults {
StandardServiceRegistry getServiceRegistry();
MetadataSourceProcessingOrder getMetadataSourceProcessingOrder();
@ -65,12 +67,10 @@ public static interface Options {
SharedCacheMode getSharedCacheMode();
AccessType getDefaultAccessType();
boolean useNewIdentifierGenerators();
boolean isGloballyQuotedIdentifiers();
String getDefaultSchemaName();
String getDefaultCatalogName();
MultiTenancyStrategy getMultiTenancyStrategy();
IndexView getJandexView();
List<BasicType> getBasicTypeRegistrations();
List<CacheRegionDefinition> getCacheRegionDefinitions();
}
/**

View File

@ -28,8 +28,10 @@
import org.jboss.jandex.IndexView;
import org.xml.sax.EntityResolver;
import org.hibernate.boot.spi.CacheRegionDefinition;
import org.hibernate.cache.spi.access.AccessType;
import org.hibernate.cfg.NamingStrategy;
import org.hibernate.metamodel.spi.TypeContributor;
import org.hibernate.type.BasicType;
import org.hibernate.usertype.CompositeUserType;
import org.hibernate.usertype.UserType;
@ -144,6 +146,23 @@ public interface MetadataBuilder {
*/
public MetadataBuilder with(CompositeUserType type, String[] keys);
/**
* Apply an explicit TypeContributor (implicit application via ServiceLoader will still happen too)
*
* @param typeContributor The contributor to apply
*
* @return {@code this}, for method chaining
*/
public MetadataBuilder with(TypeContributor typeContributor);
/**
* Apply a CacheRegionDefinition to be applied to an entity, collection or query while building the
* Metadata object.
*
* @param cacheRegionDefinition The cache region definition to apply
*/
public void with(CacheRegionDefinition cacheRegionDefinition);
/**
* Actually build the metamodel
*

View File

@ -40,18 +40,21 @@
import java.util.jar.JarFile;
import java.util.zip.ZipEntry;
import javax.persistence.AttributeConverter;
import org.hibernate.HibernateException;
import org.hibernate.boot.registry.BootstrapServiceRegistry;
import org.hibernate.boot.registry.BootstrapServiceRegistryBuilder;
import org.hibernate.boot.registry.StandardServiceRegistry;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.boot.spi.CacheRegionDefinition;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.ReflectHelper;
import org.hibernate.internal.util.SerializationHelper;
import org.hibernate.jaxb.internal.JaxbMappingProcessor;
import org.hibernate.jaxb.spi.JaxbRoot;
import org.hibernate.jaxb.spi.Origin;
import org.hibernate.jaxb.spi.SourceType;
import org.hibernate.xml.internal.jaxb.MappingXmlBinder;
import org.hibernate.xml.spi.BindResult;
import org.hibernate.xml.spi.Origin;
import org.hibernate.xml.spi.SourceType;
import org.hibernate.jaxb.spi.orm.JaxbEntityMappings;
import org.hibernate.metamodel.internal.MetadataBuilderImpl;
import org.hibernate.metamodel.internal.source.annotations.util.HibernateDotNames;
@ -89,15 +92,18 @@ public class MetadataSources {
public static final String USE_NEW_METADATA_MAPPINGS = "hibernate.test.new_metadata_mappings";
private final ServiceRegistry serviceRegistry;
private final JaxbMappingProcessor jaxbProcessor;
private final List<CacheRegionDefinition> externalCacheRegionDefinitions = new ArrayList<CacheRegionDefinition>();
private List<JaxbRoot> jaxbRootList = new ArrayList<JaxbRoot>();
private final MappingXmlBinder jaxbProcessor;
private List<BindResult> bindResultList = new ArrayList<BindResult>();
private LinkedHashSet<Class<?>> annotatedClasses = new LinkedHashSet<Class<?>>();
private LinkedHashSet<String> annotatedClassNames = new LinkedHashSet<String>();
private LinkedHashSet<String> annotatedPackages = new LinkedHashSet<String>();
private boolean hasOrmXmlJaxbRoots;
public MetadataSources() {
this( new BootstrapServiceRegistryBuilder().build() );
}
/**
* Create a metadata sources using the specified service registry.
*
@ -113,7 +119,7 @@ public MetadataSources(ServiceRegistry serviceRegistry) {
);
}
this.serviceRegistry = serviceRegistry;
this.jaxbProcessor = new JaxbMappingProcessor( serviceRegistry );
this.jaxbProcessor = new MappingXmlBinder( serviceRegistry );
}
protected static boolean isExpectedServiceRegistryType(ServiceRegistry serviceRegistry) {
@ -121,8 +127,8 @@ protected static boolean isExpectedServiceRegistryType(ServiceRegistry serviceRe
|| StandardServiceRegistry.class.isInstance( serviceRegistry );
}
public List<JaxbRoot> getJaxbRootList() {
return jaxbRootList;
public List<BindResult> getBindResultList() {
return bindResultList;
}
public Iterable<String> getAnnotatedPackages() {
@ -137,10 +143,6 @@ public Iterable<String> getAnnotatedClassNames() {
return annotatedClassNames;
}
public List<CacheRegionDefinition> getExternalCacheRegionDefinitions() {
return externalCacheRegionDefinitions;
}
public ServiceRegistry getServiceRegistry() {
return serviceRegistry;
}
@ -248,11 +250,11 @@ private ClassLoaderService classLoaderService() {
return serviceRegistry.getService( ClassLoaderService.class );
}
private JaxbRoot add(InputStream inputStream, Origin origin, boolean close) {
private BindResult add(InputStream inputStream, Origin origin, boolean close) {
try {
JaxbRoot jaxbRoot = jaxbProcessor.unmarshal( inputStream, origin );
addJaxbRoot( jaxbRoot );
return jaxbRoot;
BindResult bindResult = jaxbProcessor.bind( inputStream, origin );
addJaxbRoot( bindResult );
return bindResult;
}
catch ( Exception e ) {
throw new InvalidMappingException( origin, e );
@ -369,7 +371,7 @@ public MetadataSources addCacheableFile(File file) {
}
LOG.readingMappingsFromFile( file.getPath() );
JaxbRoot metadataXml = add( inputStream, origin, true );
BindResult metadataXml = add( inputStream, origin, true );
try {
LOG.debugf( "Writing cache file for: %s to: %s", file, cachedFile );
@ -388,7 +390,7 @@ public MetadataSources addCacheableFile(File file) {
* Much like {@link #addCacheableFile(File)} except that here we will fail immediately if
* the cache version cannot be found or used for whatever reason
*
* @param xmlFile The xml file, not the bin!
* @param file The xml file, not the bin!
*
* @return The dom "deserialized" from the cached file.
*
@ -407,7 +409,7 @@ public MetadataSources addCacheableFileStrictly(File file) throws SerializationE
}
LOG.readingCachedMappings( cachedFile );
addJaxbRoot( ( JaxbRoot ) SerializationHelper.deserialize( new FileInputStream( cachedFile ) ) );
addJaxbRoot( (BindResult) SerializationHelper.deserialize( new FileInputStream( cachedFile ) ) );
return this;
}
@ -457,14 +459,14 @@ public MetadataSources addURL(URL url) {
*/
public MetadataSources addDocument(Document document) {
final Origin origin = new Origin( SourceType.DOM, UNKNOWN_FILE_PATH );
JaxbRoot jaxbRoot = jaxbProcessor.unmarshal( document, origin );
addJaxbRoot( jaxbRoot );
BindResult bindResult = jaxbProcessor.unmarshal( document, origin );
addJaxbRoot( bindResult );
return this;
}
private void addJaxbRoot(JaxbRoot jaxbRoot) {
hasOrmXmlJaxbRoots = hasOrmXmlJaxbRoots || JaxbEntityMappings.class.isInstance( jaxbRoot.getRoot() );
jaxbRootList.add( jaxbRoot );
private void addJaxbRoot(BindResult bindResult) {
hasOrmXmlJaxbRoots = hasOrmXmlJaxbRoots || JaxbEntityMappings.class.isInstance( bindResult.getRoot() );
bindResultList.add( bindResult );
}
/**
@ -537,11 +539,6 @@ else if ( file.getName().endsWith( ".hbm.xml" ) ) {
return this;
}
public MetadataSources addCacheRegionDefinitions(List<CacheRegionDefinition> cacheRegionDefinitions) {
externalCacheRegionDefinitions.addAll( cacheRegionDefinitions );
return this;
}
@SuppressWarnings("unchecked")
public IndexView wrapJandexView(IndexView jandexView) {
if ( ! hasOrmXmlJaxbRoots ) {
@ -550,9 +547,9 @@ public IndexView wrapJandexView(IndexView jandexView) {
}
final List<JaxbEntityMappings> collectedOrmXmlMappings = new ArrayList<JaxbEntityMappings>();
for ( JaxbRoot jaxbRoot : getJaxbRootList() ) {
if ( JaxbEntityMappings.class.isInstance( jaxbRoot.getRoot() ) ) {
collectedOrmXmlMappings.add( ( (JaxbRoot<JaxbEntityMappings>) jaxbRoot ).getRoot() );
for ( BindResult bindResult : getBindResultList() ) {
if ( JaxbEntityMappings.class.isInstance( bindResult.getRoot() ) ) {
collectedOrmXmlMappings.add( ( (BindResult<JaxbEntityMappings>) bindResult ).getRoot() );
}
}
@ -658,4 +655,12 @@ private ClassInfo indexResource(String resourceName, Indexer indexer) {
return null;
}
public void addAttributeConverter(Class<? extends AttributeConverter> cls) {
}
public void addAttributeConverter(Class<? extends AttributeConverter> theClass, boolean autoApply) {
}
}

View File

@ -37,6 +37,7 @@
import org.hibernate.boot.registry.StandardServiceRegistry;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.boot.spi.CacheRegionDefinition;
import org.hibernate.cache.spi.access.AccessType;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.cfg.EJB3DTDEntityResolver;
@ -49,6 +50,8 @@
import org.hibernate.metamodel.MetadataSourceProcessingOrder;
import org.hibernate.metamodel.MetadataSources;
import org.hibernate.metamodel.spi.MetadataSourcesContributor;
import org.hibernate.metamodel.spi.TypeContributions;
import org.hibernate.metamodel.spi.TypeContributor;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.type.BasicType;
import org.hibernate.type.CompositeCustomType;
@ -61,7 +64,7 @@
*
* @author Steve Ebersole
*/
public class MetadataBuilderImpl implements MetadataBuilder {
public class MetadataBuilderImpl implements MetadataBuilder, TypeContributions {
private static final Logger log = Logger.getLogger( MetadataBuilderImpl.class );
private final MetadataSources sources;
@ -170,6 +173,35 @@ public MetadataBuilder with(CompositeUserType type, String[] keys) {
return this;
}
@Override
public MetadataBuilder with(TypeContributor typeContributor) {
typeContributor.contribute( this, options.serviceRegistry );
return this;
}
@Override
public void contributeType(BasicType type) {
options.basicTypeRegistrations.add( type );
}
@Override
public void contributeType(UserType type, String[] keys) {
options.basicTypeRegistrations.add( new CustomType( type, keys ) );
}
@Override
public void contributeType(CompositeUserType type, String[] keys) {
options.basicTypeRegistrations.add( new CompositeCustomType( type, keys ) );
}
@Override
public void with(CacheRegionDefinition cacheRegionDefinition) {
if ( options.cacheRegionDefinitions == null ) {
options.cacheRegionDefinitions = new ArrayList<CacheRegionDefinition>();
}
options.cacheRegionDefinitions.add( cacheRegionDefinition );
}
@Override
public Metadata build() {
return new MetadataImpl( sources, options );
@ -190,8 +222,9 @@ public static class OptionsImpl implements Metadata.Options {
private String defaultSchemaName;
private String defaultCatalogName;
private MultiTenancyStrategy multiTenancyStrategy;
public IndexView jandexView;
public List<BasicType> basicTypeRegistrations = new ArrayList<BasicType>();
private IndexView jandexView;
private List<BasicType> basicTypeRegistrations = new ArrayList<BasicType>();
private List<CacheRegionDefinition> cacheRegionDefinitions;
public OptionsImpl(StandardServiceRegistry serviceRegistry) {
this.serviceRegistry = serviceRegistry;
@ -300,5 +333,10 @@ public IndexView getJandexView() {
public List<BasicType> getBasicTypeRegistrations() {
return basicTypeRegistrations;
}
@Override
public List<CacheRegionDefinition> getCacheRegionDefinitions() {
return cacheRegionDefinitions;
}
}
}

View File

@ -62,7 +62,7 @@
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.ValueHolder;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.jaxb.spi.JaxbRoot;
import org.hibernate.xml.spi.BindResult;
import org.hibernate.metamodel.MetadataSourceProcessingOrder;
import org.hibernate.metamodel.MetadataSources;
import org.hibernate.metamodel.SessionFactoryBuilder;
@ -258,11 +258,11 @@ public void contributeType(CompositeUserType type, String[] keys) {
contributor.contribute( this, jandexView );
}
final List<JaxbRoot> jaxbRoots = new ArrayList<JaxbRoot>();
final List<BindResult> bindResults = new ArrayList<BindResult>();
for ( AdditionalJaxbRootProducer producer : classLoaderService.loadJavaServices( AdditionalJaxbRootProducer.class ) ) {
jaxbRoots.addAll( producer.produceRoots( this, jandexView ) );
bindResults.addAll( producer.produceRoots( this, jandexView ) );
}
final HbmMetadataSourceProcessorImpl processor = new HbmMetadataSourceProcessorImpl( this, jaxbRoots );
final HbmMetadataSourceProcessorImpl processor = new HbmMetadataSourceProcessorImpl( this, bindResults );
final Binder binder = new Binder( this, identifierGeneratorFactory );
binder.addEntityHierarchies( processor.extractEntityHierarchies() );
binder.bindEntityHierarchies();
@ -299,10 +299,11 @@ private void secondPass(MetadataSources metadataSources) {
}
}
if ( metadataSources.getExternalCacheRegionDefinitions().isEmpty() ) {
if ( options.getCacheRegionDefinitions() == null || options.getCacheRegionDefinitions().isEmpty() ) {
return;
}
for ( CacheRegionDefinition cacheRegionDefinition : metadataSources.getExternalCacheRegionDefinitions() ) {
for ( CacheRegionDefinition cacheRegionDefinition : options.getCacheRegionDefinitions() ) {
final String role = cacheRegionDefinition.getRole();
if ( cacheRegionDefinition.getRegionType() == CacheRegionDefinition.CacheRegionType.ENTITY ) {
EntityBinding entityBinding = entityBindingMap.get( role );
@ -315,7 +316,8 @@ private void secondPass(MetadataSources metadataSources) {
cacheRegionDefinition.isCacheLazy()
)
);
}else{
}
else{
//logging?
throw new MappingException( "Can't find entitybinding for role " + role +" to apply cache configuration" );
}

View File

@ -35,7 +35,7 @@
import org.hibernate.MappingException;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.jaxb.spi.Origin;
import org.hibernate.xml.spi.Origin;
import org.hibernate.metamodel.internal.source.annotations.attribute.AssociationAttribute;
import org.hibernate.metamodel.internal.source.annotations.attribute.Column;
import org.hibernate.metamodel.internal.source.annotations.attribute.MappedAttribute;

View File

@ -25,8 +25,8 @@
import org.hibernate.cfg.NamingStrategy;
import org.hibernate.internal.util.ValueHolder;
import org.hibernate.jaxb.spi.Origin;
import org.hibernate.jaxb.spi.SourceType;
import org.hibernate.xml.spi.Origin;
import org.hibernate.xml.spi.SourceType;
import org.hibernate.metamodel.internal.source.annotations.AnnotationBindingContext;
import org.hibernate.metamodel.spi.MetadataImplementor;
import org.hibernate.metamodel.spi.binding.IdentifierGeneratorDefinition;

View File

@ -35,7 +35,7 @@
import org.hibernate.cfg.NotYetImplementedException;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.jaxb.spi.Origin;
import org.hibernate.xml.spi.Origin;
import org.hibernate.jaxb.spi.hbm.EntityElement;
import org.hibernate.jaxb.spi.hbm.JaxbAnyElement;
import org.hibernate.jaxb.spi.hbm.JaxbArrayElement;

View File

@ -24,8 +24,8 @@
package org.hibernate.metamodel.internal.source.hbm;
import org.hibernate.internal.util.ValueHolder;
import org.hibernate.jaxb.spi.JaxbRoot;
import org.hibernate.jaxb.spi.Origin;
import org.hibernate.xml.spi.BindResult;
import org.hibernate.xml.spi.Origin;
import org.hibernate.jaxb.spi.hbm.JaxbHibernateMapping;
import org.hibernate.metamodel.spi.source.MappingException;
@ -55,7 +55,7 @@ protected Origin origin() {
return sourceMappingDocument().getOrigin();
}
protected JaxbRoot<JaxbHibernateMapping> mappingRoot() {
protected BindResult<JaxbHibernateMapping> mappingRoot() {
return sourceMappingDocument().getJaxbRoot();
}

View File

@ -30,7 +30,7 @@
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.jaxb.spi.JaxbRoot;
import org.hibernate.xml.spi.BindResult;
import org.hibernate.jaxb.spi.hbm.JaxbHibernateMapping;
import org.hibernate.metamodel.MetadataSources;
import org.hibernate.metamodel.spi.MetadataImplementor;
@ -54,18 +54,18 @@ public class HbmMetadataSourceProcessorImpl implements MetadataSourceProcessor {
private final List<EntityHierarchyImpl> entityHierarchies;
public HbmMetadataSourceProcessorImpl(MetadataImplementor metadata, MetadataSources metadataSources) {
this( metadata, metadataSources.getJaxbRootList() );
this( metadata, metadataSources.getBindResultList() );
}
public HbmMetadataSourceProcessorImpl(MetadataImplementor metadata, List<JaxbRoot> jaxbRoots) {
public HbmMetadataSourceProcessorImpl(MetadataImplementor metadata, List<BindResult> bindResults) {
final HierarchyBuilder hierarchyBuilder = new HierarchyBuilder( metadata );
for ( JaxbRoot jaxbRoot : jaxbRoots ) {
if ( ! JaxbHibernateMapping.class.isInstance( jaxbRoot.getRoot() ) ) {
for ( BindResult bindResult : bindResults ) {
if ( ! JaxbHibernateMapping.class.isInstance( bindResult.getRoot() ) ) {
continue;
}
final MappingDocument mappingDocument = new MappingDocument( jaxbRoot, metadata );
final MappingDocument mappingDocument = new MappingDocument( bindResult, metadata );
processors.add( new HibernateMappingProcessor( metadata, mappingDocument ) );
hierarchyBuilder.processMappingDocument( mappingDocument );

View File

@ -28,8 +28,8 @@
import org.hibernate.cfg.NamingStrategy;
import org.hibernate.internal.util.ValueHolder;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.jaxb.spi.JaxbRoot;
import org.hibernate.jaxb.spi.Origin;
import org.hibernate.xml.spi.BindResult;
import org.hibernate.xml.spi.Origin;
import org.hibernate.jaxb.spi.hbm.EntityElement;
import org.hibernate.jaxb.spi.hbm.JaxbFetchProfileElement;
import org.hibernate.jaxb.spi.hbm.JaxbHibernateMapping;
@ -47,25 +47,25 @@
* @author Steve Ebersole
*/
public class MappingDocument {
private final JaxbRoot<JaxbHibernateMapping> hbmJaxbRoot;
private final BindResult<JaxbHibernateMapping> hbmBindResult;
private final LocalBindingContextImpl mappingLocalBindingContext;
public MappingDocument(JaxbRoot<JaxbHibernateMapping> hbmJaxbRoot, MetadataImplementor metadata) {
this.hbmJaxbRoot = hbmJaxbRoot;
public MappingDocument(BindResult<JaxbHibernateMapping> hbmBindResult, MetadataImplementor metadata) {
this.hbmBindResult = hbmBindResult;
this.mappingLocalBindingContext = new LocalBindingContextImpl( metadata );
}
public JaxbHibernateMapping getMappingRoot() {
return hbmJaxbRoot.getRoot();
return hbmBindResult.getRoot();
}
public Origin getOrigin() {
return hbmJaxbRoot.getOrigin();
return hbmBindResult.getOrigin();
}
public JaxbRoot<JaxbHibernateMapping> getJaxbRoot() {
return hbmJaxbRoot;
public BindResult<JaxbHibernateMapping> getJaxbRoot() {
return hbmBindResult;
}
public HbmBindingContext getMappingLocalBindingContext() {
@ -81,22 +81,22 @@ private LocalBindingContextImpl(MetadataImplementor metadata) {
this.metadata = metadata;
this.localMappingDefaults = new OverriddenMappingDefaults(
metadata.getMappingDefaults(),
hbmJaxbRoot.getRoot().getPackage(),
hbmJaxbRoot.getRoot().getSchema(),
hbmJaxbRoot.getRoot().getCatalog(),
hbmBindResult.getRoot().getPackage(),
hbmBindResult.getRoot().getSchema(),
hbmBindResult.getRoot().getCatalog(),
null,
null,
null,
hbmJaxbRoot.getRoot().getDefaultCascade(),
hbmJaxbRoot.getRoot().getDefaultAccess(),
hbmJaxbRoot.getRoot().isDefaultLazy()
hbmBindResult.getRoot().getDefaultCascade(),
hbmBindResult.getRoot().getDefaultAccess(),
hbmBindResult.getRoot().isDefaultLazy()
);
if ( CollectionHelper.isEmpty( hbmJaxbRoot.getRoot().getMeta() ) ) {
if ( CollectionHelper.isEmpty( hbmBindResult.getRoot().getMeta() ) ) {
this.metaAttributeContext = new MetaAttributeContext( metadata.getGlobalMetaAttributeContext() );
}
else {
this.metaAttributeContext = Helper.extractMetaAttributeContext(
hbmJaxbRoot.getRoot().getMeta(),
hbmBindResult.getRoot().getMeta(),
true,
metadata.getGlobalMetaAttributeContext()
);
@ -140,7 +140,7 @@ public ValueHolder<Class<?>> makeClassReference(String className) {
@Override
public boolean isAutoImport() {
return hbmJaxbRoot.getRoot().isAutoImport();
return hbmBindResult.getRoot().isAutoImport();
}
@Override
@ -150,7 +150,7 @@ public MetaAttributeContext getMetaAttributeContext() {
@Override
public Origin getOrigin() {
return hbmJaxbRoot.getOrigin();
return hbmBindResult.getOrigin();
}
@Override

View File

@ -27,10 +27,10 @@
import org.jboss.jandex.IndexView;
import org.hibernate.jaxb.spi.JaxbRoot;
import org.hibernate.xml.spi.BindResult;
/**
* Contract for integrations that wish to provide additional mappings (in the form of {@link JaxbRoot}. This hook
* Contract for integrations that wish to provide additional mappings (in the form of {@link org.hibernate.xml.spi.BindResult}. This hook
* is performed after all other mappings, annotations, etc have completed processing.
*
* @author Steve Ebersole
@ -44,5 +44,5 @@ public interface AdditionalJaxbRootProducer {
*
* @return List of additional mappings
*/
public List<JaxbRoot> produceRoots(MetadataImplementor metadata, IndexView jandexIndex);
public List<BindResult> produceRoots(MetadataImplementor metadata, IndexView jandexIndex);
}

View File

@ -39,6 +39,12 @@
* @author Gail Badner
*/
public class Database {
public static interface Defaults {
String getDefaultSchemaName();
String getDefaultCatalogName();
boolean isGloballyQuotedIdentifiers();
}
private final Schema.Name implicitSchemaName;
private final JdbcEnvironment jdbcEnvironment;
@ -46,7 +52,7 @@ public class Database {
private final List<AuxiliaryDatabaseObject> auxiliaryDatabaseObjects = new ArrayList<AuxiliaryDatabaseObject>();
private final List<InitCommand> initCommands = new ArrayList<InitCommand>();
public Database(Metadata.Options options, JdbcEnvironment jdbcEnvironment) {
public Database(Defaults options, JdbcEnvironment jdbcEnvironment) {
String schemaName = options.getDefaultSchemaName();
String catalogName = options.getDefaultCatalogName();
if ( options.isGloballyQuotedIdentifiers() ) {

View File

@ -29,7 +29,7 @@
import javax.persistence.EntityListeners;
import javax.persistence.MappedSuperclass;
import org.hibernate.jaxb.spi.Origin;
import org.hibernate.xml.spi.Origin;
import org.hibernate.metamodel.spi.binding.CustomSQL;
/**

View File

@ -24,9 +24,7 @@
package org.hibernate.metamodel.spi.source;
import javassist.runtime.Desc;
import org.hibernate.jaxb.spi.Origin;
import org.hibernate.xml.spi.Origin;
/**
* @author Brett Meyer

View File

@ -23,7 +23,7 @@
*/
package org.hibernate.metamodel.spi.source;
import org.hibernate.jaxb.spi.Origin;
import org.hibernate.xml.spi.Origin;
/**
* @author Steve Ebersole

View File

@ -24,7 +24,7 @@
package org.hibernate.metamodel.spi.source;
import org.hibernate.jaxb.spi.Origin;
import org.hibernate.xml.spi.Origin;
/**
* Indicates a problem parsing a mapping document.

View File

@ -24,7 +24,7 @@
package org.hibernate.metamodel.spi.source;
import org.hibernate.jaxb.spi.Origin;
import org.hibernate.xml.spi.Origin;
/**
* @author Steve Ebersole

View File

@ -265,377 +265,6 @@ public abstract class AbstractCollectionPersister
private Map collectionPropertyColumnAliases = new HashMap();
private Map collectionPropertyColumnNames = new HashMap();
public AbstractCollectionPersister(
final Collection collection,
final CollectionRegionAccessStrategy cacheAccessStrategy,
final Configuration cfg,
final SessionFactoryImplementor factory) throws MappingException, CacheException {
this.factory = factory;
this.cacheAccessStrategy = cacheAccessStrategy;
if ( factory.getSettings().isStructuredCacheEntriesEnabled() ) {
cacheEntryStructure = collection.isMap()
? StructuredMapCacheEntry.INSTANCE
: StructuredCollectionCacheEntry.INSTANCE;
}
else {
cacheEntryStructure = UnstructuredCacheEntry.INSTANCE;
}
dialect = factory.getDialect();
sqlExceptionHelper = factory.getSQLExceptionHelper();
collectionType = collection.getCollectionType();
role = collection.getRole();
entityName = collection.getOwnerEntityName();
ownerPersister = factory.getEntityPersister( entityName );
queryLoaderName = collection.getLoaderName();
nodeName = collection.getNodeName();
isMutable = collection.isMutable();
mappedByProperty = collection.getMappedByProperty();
Table table = collection.getCollectionTable();
fetchMode = collection.getElement().getFetchMode();
elementType = collection.getElement().getType();
// isSet = collection.isSet();
// isSorted = collection.isSorted();
isPrimitiveArray = collection.isPrimitiveArray();
isArray = collection.isArray();
subselectLoadable = collection.isSubselectLoadable();
qualifiedTableName = table.getQualifiedName(
dialect,
factory.getSettings().getDefaultCatalogName(),
factory.getSettings().getDefaultSchemaName()
);
int spacesSize = 1 + collection.getSynchronizedTables().size();
spaces = new String[spacesSize];
spaces[0] = qualifiedTableName;
Iterator iter = collection.getSynchronizedTables().iterator();
for ( int i = 1; i < spacesSize; i++ ) {
spaces[i] = (String) iter.next();
}
sqlWhereString = StringHelper.isNotEmpty( collection.getWhere() ) ? "( " + collection.getWhere() + ") " : null;
hasWhere = sqlWhereString != null;
sqlWhereStringTemplate = hasWhere ?
Template.renderWhereStringTemplate( sqlWhereString, dialect, factory.getSqlFunctionRegistry() ) :
null;
hasOrphanDelete = collection.hasOrphanDelete();
int batch = collection.getBatchSize();
if ( batch == -1 ) {
batch = factory.getSettings().getDefaultBatchFetchSize();
}
batchSize = batch;
isVersioned = collection.isOptimisticLocked();
// KEY
keyType = collection.getKey().getType();
iter = collection.getKey().getColumnIterator();
int keySpan = collection.getKey().getColumnSpan();
keyColumnNames = new String[keySpan];
keyColumnAliases = new String[keySpan];
int k = 0;
while ( iter.hasNext() ) {
// NativeSQL: collect key column and auto-aliases
Column col = ( (Column) iter.next() );
keyColumnNames[k] = col.getQuotedName( dialect );
keyColumnAliases[k] = col.getAlias( dialect, collection.getOwner().getRootTable() );
k++;
}
// unquotedKeyColumnNames = StringHelper.unQuote(keyColumnAliases);
// ELEMENT
String elemNode = collection.getElementNodeName();
if ( elementType.isEntityType() ) {
String entityName = ( (EntityType) elementType ).getAssociatedEntityName();
elementPersister = factory.getEntityPersister( entityName );
if ( elemNode == null ) {
elemNode = cfg.getClassMapping( entityName ).getNodeName();
}
// NativeSQL: collect element column and auto-aliases
}
else {
elementPersister = null;
}
elementNodeName = elemNode;
int elementSpan = collection.getElement().getColumnSpan();
elementColumnAliases = new String[elementSpan];
elementColumnNames = new String[elementSpan];
elementColumnWriters = new String[elementSpan];
elementColumnReaders = new String[elementSpan];
elementColumnReaderTemplates = new String[elementSpan];
elementFormulaTemplates = new String[elementSpan];
elementFormulas = new String[elementSpan];
elementColumnIsSettable = new boolean[elementSpan];
elementColumnIsInPrimaryKey = new boolean[elementSpan];
boolean isPureFormula = true;
boolean hasNotNullableColumns = false;
int j = 0;
iter = collection.getElement().getColumnIterator();
while ( iter.hasNext() ) {
Selectable selectable = (Selectable) iter.next();
elementColumnAliases[j] = selectable.getAlias( dialect, table );
if ( selectable.isFormula() ) {
Formula form = (Formula) selectable;
elementFormulaTemplates[j] = form.getTemplate( dialect, factory.getSqlFunctionRegistry() );
elementFormulas[j] = form.getFormula();
}
else {
Column col = (Column) selectable;
elementColumnNames[j] = col.getQuotedName( dialect );
elementColumnWriters[j] = col.getWriteExpr();
elementColumnReaders[j] = col.getReadExpr( dialect );
elementColumnReaderTemplates[j] = col.getTemplate( dialect, factory.getSqlFunctionRegistry() );
elementColumnIsSettable[j] = true;
elementColumnIsInPrimaryKey[j] = !col.isNullable();
if ( !col.isNullable() ) {
hasNotNullableColumns = true;
}
isPureFormula = false;
}
j++;
}
elementIsPureFormula = isPureFormula;
// workaround, for backward compatibility of sets with no
// not-null columns, assume all columns are used in the
// row locator SQL
if ( !hasNotNullableColumns ) {
Arrays.fill( elementColumnIsInPrimaryKey, true );
}
// INDEX AND ROW SELECT
hasIndex = collection.isIndexed();
if ( hasIndex ) {
// NativeSQL: collect index column and auto-aliases
IndexedCollection indexedCollection = (IndexedCollection) collection;
indexType = indexedCollection.getIndex().getType();
int indexSpan = indexedCollection.getIndex().getColumnSpan();
iter = indexedCollection.getIndex().getColumnIterator();
indexColumnNames = new String[indexSpan];
indexFormulaTemplates = new String[indexSpan];
indexFormulas = new String[indexSpan];
indexColumnIsSettable = new boolean[indexSpan];
indexColumnAliases = new String[indexSpan];
int i = 0;
boolean hasFormula = false;
while ( iter.hasNext() ) {
Selectable s = (Selectable) iter.next();
indexColumnAliases[i] = s.getAlias( dialect );
if ( s.isFormula() ) {
Formula indexForm = (Formula) s;
indexFormulaTemplates[i] = indexForm.getTemplate( dialect, factory.getSqlFunctionRegistry() );
indexFormulas[i] = indexForm.getFormula();
hasFormula = true;
}
else {
Column indexCol = (Column) s;
indexColumnNames[i] = indexCol.getQuotedName( dialect );
indexColumnIsSettable[i] = true;
}
i++;
}
indexContainsFormula = hasFormula;
baseIndex = indexedCollection.isList() ?
( (List) indexedCollection ).getBaseIndex() : 0;
indexNodeName = indexedCollection.getIndexNodeName();
}
else {
indexContainsFormula = false;
indexColumnIsSettable = null;
indexFormulaTemplates = null;
indexFormulas = null;
indexType = null;
indexColumnNames = null;
indexColumnAliases = null;
baseIndex = 0;
indexNodeName = null;
}
hasIdentifier = collection.isIdentified();
if ( hasIdentifier ) {
if ( collection.isOneToMany() ) {
throw new MappingException( "one-to-many collections with identifiers are not supported" );
}
IdentifierCollection idColl = (IdentifierCollection) collection;
identifierType = idColl.getIdentifier().getType();
iter = idColl.getIdentifier().getColumnIterator();
Column col = (Column) iter.next();
identifierColumnName = col.getQuotedName( dialect );
identifierColumnAlias = col.getAlias( dialect );
// unquotedIdentifierColumnName = identifierColumnAlias;
identifierGenerator = idColl.getIdentifier().createIdentifierGenerator(
cfg.getIdentifierGeneratorFactory(),
factory.getDialect(),
factory.getSettings().getDefaultCatalogName(),
factory.getSettings().getDefaultSchemaName(),
null
);
}
else {
identifierType = null;
identifierColumnName = null;
identifierColumnAlias = null;
// unquotedIdentifierColumnName = null;
identifierGenerator = null;
}
// GENERATE THE SQL:
// sqlSelectString = sqlSelectString();
// sqlSelectRowString = sqlSelectRowString();
if ( collection.getCustomSQLInsert() == null ) {
sqlInsertRowString = generateInsertRowString();
insertCallable = false;
insertCheckStyle = ExecuteUpdateResultCheckStyle.COUNT;
}
else {
sqlInsertRowString = collection.getCustomSQLInsert();
insertCallable = collection.isCustomInsertCallable();
insertCheckStyle = collection.getCustomSQLInsertCheckStyle() == null
? ExecuteUpdateResultCheckStyle.determineDefault( collection.getCustomSQLInsert(), insertCallable )
: collection.getCustomSQLInsertCheckStyle();
}
if ( collection.getCustomSQLUpdate() == null ) {
sqlUpdateRowString = generateUpdateRowString();
updateCallable = false;
updateCheckStyle = ExecuteUpdateResultCheckStyle.COUNT;
}
else {
sqlUpdateRowString = collection.getCustomSQLUpdate();
updateCallable = collection.isCustomUpdateCallable();
updateCheckStyle = collection.getCustomSQLUpdateCheckStyle() == null
? ExecuteUpdateResultCheckStyle.determineDefault( collection.getCustomSQLUpdate(), insertCallable )
: collection.getCustomSQLUpdateCheckStyle();
}
if ( collection.getCustomSQLDelete() == null ) {
sqlDeleteRowString = generateDeleteRowString();
deleteCallable = false;
deleteCheckStyle = ExecuteUpdateResultCheckStyle.NONE;
}
else {
sqlDeleteRowString = collection.getCustomSQLDelete();
deleteCallable = collection.isCustomDeleteCallable();
deleteCheckStyle = ExecuteUpdateResultCheckStyle.NONE;
}
if ( collection.getCustomSQLDeleteAll() == null ) {
sqlDeleteString = generateDeleteString();
deleteAllCallable = false;
deleteAllCheckStyle = ExecuteUpdateResultCheckStyle.NONE;
}
else {
sqlDeleteString = collection.getCustomSQLDeleteAll();
deleteAllCallable = collection.isCustomDeleteAllCallable();
deleteAllCheckStyle = ExecuteUpdateResultCheckStyle.NONE;
}
sqlSelectSizeString = generateSelectSizeString( collection.isIndexed() && !collection.isMap() );
sqlDetectRowByIndexString = generateDetectRowByIndexString();
sqlDetectRowByElementString = generateDetectRowByElementString();
sqlSelectRowByIndexString = generateSelectRowByIndexString();
logStaticSQL();
isLazy = collection.isLazy();
isExtraLazy = collection.isExtraLazy();
isInverse = collection.isInverse();
if ( collection.isArray() ) {
elementClass = ( (org.hibernate.mapping.Array) collection ).getElementClass();
}
else {
// for non-arrays, we don't need to know the element class
elementClass = null; // elementType.returnedClass();
}
if ( elementType.isComponentType() ) {
elementPropertyMapping = new CompositeElementPropertyMapping(
elementColumnNames,
elementColumnReaders,
elementColumnReaderTemplates,
elementFormulaTemplates,
(CompositeType) elementType,
factory
);
}
else if ( !elementType.isEntityType() ) {
elementPropertyMapping = new ElementPropertyMapping(
elementColumnNames,
elementType
);
}
else {
if ( elementPersister instanceof PropertyMapping ) { // not all classpersisters implement PropertyMapping!
elementPropertyMapping = (PropertyMapping) elementPersister;
}
else {
elementPropertyMapping = new ElementPropertyMapping(
elementColumnNames,
elementType
);
}
}
hasOrder = collection.getOrderBy() != null;
if ( hasOrder ) {
orderByTranslation = Template.translateOrderBy(
collection.getOrderBy(),
new ColumnMapperImpl(),
factory,
dialect,
factory.getSqlFunctionRegistry()
);
}
else {
orderByTranslation = null;
}
// Handle any filters applied to this collection
filterHelper = new FilterHelper( collection.getFilters(), factory);
// Handle any filters applied to this collection for many-to-many
manyToManyFilterHelper = new FilterHelper( collection.getManyToManyFilters(), factory);
manyToManyWhereString = StringHelper.isNotEmpty( collection.getManyToManyWhere() ) ?
"( " + collection.getManyToManyWhere() + ")" :
null;
manyToManyWhereTemplate = manyToManyWhereString == null ?
null :
Template.renderWhereStringTemplate( manyToManyWhereString, factory.getDialect(), factory.getSqlFunctionRegistry() );
hasManyToManyOrder = collection.getManyToManyOrdering() != null;
if ( hasManyToManyOrder ) {
manyToManyOrderByTranslation = Template.translateOrderBy(
collection.getManyToManyOrdering(),
new ColumnMapperImpl(),
factory,
dialect,
factory.getSqlFunctionRegistry()
);
}
else {
manyToManyOrderByTranslation = null;
}
initCollectionPropertyMap();
}
public AbstractCollectionPersister(
AbstractPluralAttributeBinding collection,
CollectionRegionAccessStrategy cacheAccessStrategy,

View File

@ -71,14 +71,6 @@ public boolean isCascadeDeleteEnabled() {
return false;
}
public BasicCollectionPersister(
Collection collection,
CollectionRegionAccessStrategy cacheAccessStrategy,
Configuration cfg,
SessionFactoryImplementor factory) throws MappingException, CacheException {
super( collection, cacheAccessStrategy, cfg, factory );
}
@SuppressWarnings( {"UnusedDeclaration"})
public BasicCollectionPersister(
AbstractPluralAttributeBinding collection,

View File

@ -71,6 +71,28 @@ public class OneToManyPersister extends AbstractCollectionPersister {
private final boolean keyIsNullable;
private final boolean keyIsUpdateable;
@SuppressWarnings( {"UnusedDeclaration"})
public OneToManyPersister(
AbstractPluralAttributeBinding collection,
CollectionRegionAccessStrategy cacheAccessStrategy,
MetadataImplementor metadataImplementor,
SessionFactoryImplementor factory) throws MappingException, CacheException {
super( collection, cacheAccessStrategy, metadataImplementor, factory );
if ( collection.getPluralAttributeElementBinding().getNature() !=
PluralAttributeElementBinding.Nature.ONE_TO_MANY ) {
throw new AssertionError(
String.format( "Unexpected plural attribute nature; expected=(%s), actual=(%s)",
PluralAttributeElementBinding.Nature.ONE_TO_MANY,
collection.getPluralAttributeElementBinding().getNature()
)
);
}
final PluralAttributeKeyBinding keyBinding = collection.getPluralAttributeKeyBinding();
cascadeDeleteEnabled = keyBinding.isCascadeDeleteEnabled() && factory.getDialect().supportsCascadeDelete();
keyIsNullable = keyBinding.isNullable();
keyIsUpdateable = keyBinding.isUpdatable();
}
@Override
protected boolean isRowDeleteEnabled() {
return keyIsUpdateable && keyIsNullable;
@ -85,40 +107,6 @@ public boolean isCascadeDeleteEnabled() {
return cascadeDeleteEnabled;
}
public OneToManyPersister(
Collection collection,
CollectionRegionAccessStrategy cacheAccessStrategy,
Configuration cfg,
SessionFactoryImplementor factory) throws MappingException, CacheException {
super( collection, cacheAccessStrategy, cfg, factory );
cascadeDeleteEnabled = collection.getKey().isCascadeDeleteEnabled() &&
factory.getDialect().supportsCascadeDelete();
keyIsNullable = collection.getKey().isNullable();
keyIsUpdateable = collection.getKey().isUpdateable();
}
@SuppressWarnings( {"UnusedDeclaration"})
public OneToManyPersister(
AbstractPluralAttributeBinding collection,
CollectionRegionAccessStrategy cacheAccessStrategy,
MetadataImplementor metadataImplementor,
SessionFactoryImplementor factory) throws MappingException, CacheException {
super( collection, cacheAccessStrategy, metadataImplementor, factory );
if ( collection.getPluralAttributeElementBinding().getNature() !=
PluralAttributeElementBinding.Nature.ONE_TO_MANY ) {
throw new AssertionError(
String.format( "Unexpected plural attribute nature; expected=(%s), actual=(%s)",
PluralAttributeElementBinding.Nature.ONE_TO_MANY,
collection.getPluralAttributeElementBinding().getNature()
)
);
}
final PluralAttributeKeyBinding keyBinding = collection.getPluralAttributeKeyBinding();
cascadeDeleteEnabled = keyBinding.isCascadeDeleteEnabled() && factory.getDialect().supportsCascadeDelete();
keyIsNullable = keyBinding.isNullable();
keyIsUpdateable = keyBinding.isUpdatable();
}
/**
* Generate the SQL UPDATE that updates all the foreign keys to null

View File

@ -78,7 +78,7 @@ public void integrate(
Configuration configuration,
SessionFactoryImplementor sessionFactory,
SessionFactoryServiceRegistry serviceRegistry) {
doIntegration( configuration.getProperties(), configuration.getJaccPermissionDeclarations(), serviceRegistry );
doIntegration( configuration.getProperties(), null, serviceRegistry );
}
private void doIntegration(
@ -121,6 +121,7 @@ public void integrate(
MetadataImplementor metadata,
SessionFactoryImplementor sessionFactory,
SessionFactoryServiceRegistry serviceRegistry) {
// todo : need to stash the JACC permissions somewhere accessible from here...
doIntegration( sessionFactory.getProperties(), null, serviceRegistry );
}

View File

@ -23,14 +23,18 @@
*/
package org.hibernate.service;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.Properties;
import org.hibernate.boot.registry.BootstrapServiceRegistry;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.jaxb.spi.Origin;
import org.hibernate.jaxb.spi.SourceType;
import org.hibernate.xml.spi.Origin;
import org.hibernate.xml.spi.SourceType;
import org.hibernate.jaxb.spi.cfg.JaxbHibernateConfiguration;
import org.hibernate.internal.util.ValueHolder;
import org.hibernate.internal.util.config.ConfigurationException;
@ -66,7 +70,53 @@ public JaxbHibernateConfiguration loadConfigXmlResource(String cfgXmlResourceNam
if ( stream == null ) {
throw new ConfigurationException( "Could not locate cfg.xml resource [" + cfgXmlResourceName + "]" );
}
return jaxbProcessorHolder.getValue().unmarshal( stream, new Origin( SourceType.RESOURCE, cfgXmlResourceName ) );
return unmarshall( stream, new Origin( SourceType.RESOURCE, cfgXmlResourceName ) );
}
private JaxbHibernateConfiguration unmarshall(InputStream stream, Origin origin) {
try {
return jaxbProcessorHolder.getValue().unmarshal( stream, origin );
}
finally {
try {
stream.close();
}
catch (IOException e) {
log.debug( "Unable to close config input stream : " + e.getMessage() );
}
}
}
public JaxbHibernateConfiguration loadConfigFile(File cfgXmlFile) {
final InputStream stream = toStream( cfgXmlFile );
return unmarshall( stream, new Origin( SourceType.FILE, cfgXmlFile.getAbsolutePath() ) );
}
private InputStream toStream(File file) {
try {
return new FileInputStream( file );
}
catch (FileNotFoundException e) {
throw new ConfigurationException(
"Could not open input stream from File [" + file.getAbsolutePath() + "]"
);
}
}
public JaxbHibernateConfiguration loadConfig(URL configFileUrl) {
final InputStream stream = toStream( configFileUrl );
return unmarshall( stream, new Origin( SourceType.URL, configFileUrl.toExternalForm() ) );
}
private InputStream toStream(URL configFileUrl) {
try {
return configFileUrl.openStream();
}
catch (IOException e) {
throw new ConfigurationException(
"Could not open input stream from config file url [" + configFileUrl.toExternalForm() + "]"
);
}
}
public Properties loadProperties(String resourceName) {
@ -91,4 +141,27 @@ public Properties loadProperties(String resourceName) {
}
}
}
public Properties loadProperties(File propertyFile) {
final InputStream stream = toStream( propertyFile );
try {
Properties properties = new Properties();
properties.load( stream );
return properties;
}
catch (IOException e) {
throw new ConfigurationException( "Unable to apply settings from properties file [" + propertyFile + "]", e );
}
finally {
try {
stream.close();
}
catch (IOException e) {
log.debug(
String.format( "Unable to close properties file [%s] stream", propertyFile ),
e
);
}
}
}
}

View File

@ -50,7 +50,7 @@
import javax.xml.validation.SchemaFactory;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.jaxb.spi.Origin;
import org.hibernate.xml.spi.Origin;
import org.hibernate.jaxb.spi.cfg.JaxbHibernateConfiguration;
import org.hibernate.internal.util.config.ConfigurationException;
import org.hibernate.metamodel.spi.source.MappingException;

View File

@ -25,7 +25,6 @@
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
@ -41,15 +40,15 @@
import java.util.Properties;
import org.hibernate.HibernateException;
import org.hibernate.boot.registry.BootstrapServiceRegistry;
import org.hibernate.boot.registry.BootstrapServiceRegistryBuilder;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.boot.registry.classloading.internal.ClassLoaderServiceImpl;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.boot.registry.internal.StandardServiceRegistryImpl;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.cfg.Configuration;
import org.hibernate.cfg.Environment;
import org.hibernate.cfg.NamingStrategy;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.config.spi.ConfigurationService;
import org.hibernate.engine.jdbc.connections.spi.ConnectionProvider;
import org.hibernate.engine.jdbc.internal.FormatStyle;
@ -60,9 +59,12 @@
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.internal.util.config.ConfigurationHelper;
import org.hibernate.metamodel.MetadataBuilder;
import org.hibernate.metamodel.MetadataSources;
import org.hibernate.metamodel.spi.MetadataImplementor;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.tool.schema.spi.SchemaManagementTool;
import org.jboss.logging.Logger;
/**
@ -112,26 +114,26 @@ public boolean doDrop() {
private String delimiter;
private boolean haltOnError = false;
public SchemaExport(ServiceRegistry serviceRegistry, Configuration configuration) {
this.connectionHelper = new SuppliedConnectionProviderConnectionHelper(
serviceRegistry.getService( ConnectionProvider.class )
);
this.sqlStatementLogger = serviceRegistry.getService( JdbcServices.class ).getSqlStatementLogger();
this.formatter = ( sqlStatementLogger.isFormat() ? FormatStyle.DDL : FormatStyle.NONE ).getFormatter();
this.sqlExceptionHelper = serviceRegistry.getService( JdbcServices.class ).getSqlExceptionHelper();
this.classLoaderService = serviceRegistry.getService( ClassLoaderService.class );
this.importFiles = ConfigurationHelper.getString(
AvailableSettings.HBM2DDL_IMPORT_FILES,
configuration.getProperties(),
DEFAULT_IMPORT_FILE
);
final Dialect dialect = serviceRegistry.getService( JdbcServices.class ).getDialect();
this.dropSQL = configuration.generateDropSchemaScript( dialect );
this.createSQL = configuration.generateSchemaCreationScript( dialect );
}
// public SchemaExport(ServiceRegistry serviceRegistry, Configuration configuration) {
// this.connectionHelper = new SuppliedConnectionProviderConnectionHelper(
// serviceRegistry.getService( ConnectionProvider.class )
// );
// this.sqlStatementLogger = serviceRegistry.getService( JdbcServices.class ).getSqlStatementLogger();
// this.formatter = ( sqlStatementLogger.isFormat() ? FormatStyle.DDL : FormatStyle.NONE ).getFormatter();
// this.sqlExceptionHelper = serviceRegistry.getService( JdbcServices.class ).getSqlExceptionHelper();
//
// this.classLoaderService = serviceRegistry.getService( ClassLoaderService.class );
//
// this.importFiles = ConfigurationHelper.getString(
// AvailableSettings.HBM2DDL_IMPORT_FILES,
// configuration.getProperties(),
// DEFAULT_IMPORT_FILE
// );
//
// final Dialect dialect = serviceRegistry.getService( JdbcServices.class ).getDialect();
// this.dropSQL = configuration.generateDropSchemaScript( dialect );
// this.createSQL = configuration.generateSchemaCreationScript( dialect );
// }
public SchemaExport(MetadataImplementor metadata, Connection connection){
ServiceRegistry serviceRegistry = metadata.getServiceRegistry();
@ -194,79 +196,79 @@ public SchemaExport(MetadataImplementor metadata) {
this(metadata, null);
}
/**
* Create a schema exporter for the given Configuration
*
* @param configuration The configuration from which to build a schema export.
*
* @throws HibernateException Indicates problem preparing for schema export.
*/
public SchemaExport(Configuration configuration) {
this( configuration, configuration.getProperties() );
}
// /**
// * Create a schema exporter for the given Configuration
// *
// * @param configuration The configuration from which to build a schema export.
// *
// * @throws HibernateException Indicates problem preparing for schema export.
// */
// public SchemaExport(Configuration configuration) {
// this( configuration, configuration.getProperties() );
// }
/**
* Create a schema exporter for the given Configuration, with the given
* database connection properties.
*
* @param configuration The configuration from which to build a schema export.
* @param properties The properties from which to configure connectivity etc.
*
* @throws HibernateException Indicates problem preparing for schema export.
* @deprecated properties may be specified via the Configuration object
*/
@Deprecated
public SchemaExport(Configuration configuration, Properties properties) throws HibernateException {
final Dialect dialect = Dialect.getDialect( properties );
// /**
// * Create a schema exporter for the given Configuration, with the given
// * database connection properties.
// *
// * @param configuration The configuration from which to build a schema export.
// * @param properties The properties from which to configure connectivity etc.
// *
// * @throws HibernateException Indicates problem preparing for schema export.
// * @deprecated properties may be specified via the Configuration object
// */
// @Deprecated
// public SchemaExport(Configuration configuration, Properties properties) throws HibernateException {
// final Dialect dialect = Dialect.getDialect( properties );
//
// Properties props = new Properties();
// props.putAll( dialect.getDefaultProperties() );
// props.putAll( properties );
// this.connectionHelper = new ManagedProviderConnectionHelper( props );
//
// this.sqlStatementLogger = new SqlStatementLogger( false, true );
// this.formatter = FormatStyle.DDL.getFormatter();
// this.sqlExceptionHelper = new SqlExceptionHelper();
//
// this.classLoaderService = new ClassLoaderServiceImpl();
//
// this.importFiles = ConfigurationHelper.getString(
// AvailableSettings.HBM2DDL_IMPORT_FILES,
// properties,
// DEFAULT_IMPORT_FILE
// );
//
// this.dropSQL = configuration.generateDropSchemaScript( dialect );
// this.createSQL = configuration.generateSchemaCreationScript( dialect );
// }
Properties props = new Properties();
props.putAll( dialect.getDefaultProperties() );
props.putAll( properties );
this.connectionHelper = new ManagedProviderConnectionHelper( props );
this.sqlStatementLogger = new SqlStatementLogger( false, true );
this.formatter = FormatStyle.DDL.getFormatter();
this.sqlExceptionHelper = new SqlExceptionHelper();
this.classLoaderService = new ClassLoaderServiceImpl();
this.importFiles = ConfigurationHelper.getString(
AvailableSettings.HBM2DDL_IMPORT_FILES,
properties,
DEFAULT_IMPORT_FILE
);
this.dropSQL = configuration.generateDropSchemaScript( dialect );
this.createSQL = configuration.generateSchemaCreationScript( dialect );
}
/**
* Create a schema exporter for the given Configuration, using the supplied connection for connectivity.
*
* @param configuration The configuration to use.
* @param connection The JDBC connection to use.
*
* @throws HibernateException Indicates problem preparing for schema export.
*/
public SchemaExport(Configuration configuration, Connection connection) throws HibernateException {
this.connectionHelper = new SuppliedConnectionHelper( connection );
this.sqlStatementLogger = new SqlStatementLogger( false, true );
this.formatter = FormatStyle.DDL.getFormatter();
this.sqlExceptionHelper = new SqlExceptionHelper();
this.classLoaderService = new ClassLoaderServiceImpl();
this.importFiles = ConfigurationHelper.getString(
AvailableSettings.HBM2DDL_IMPORT_FILES,
configuration.getProperties(),
DEFAULT_IMPORT_FILE
);
final Dialect dialect = Dialect.getDialect( configuration.getProperties() );
this.dropSQL = configuration.generateDropSchemaScript( dialect );
this.createSQL = configuration.generateSchemaCreationScript( dialect );
}
// /**
// * Create a schema exporter for the given Configuration, using the supplied connection for connectivity.
// *
// * @param configuration The configuration to use.
// * @param connection The JDBC connection to use.
// *
// * @throws HibernateException Indicates problem preparing for schema export.
// */
// public SchemaExport(Configuration configuration, Connection connection) throws HibernateException {
// this.connectionHelper = new SuppliedConnectionHelper( connection );
//
// this.sqlStatementLogger = new SqlStatementLogger( false, true );
// this.formatter = FormatStyle.DDL.getFormatter();
// this.sqlExceptionHelper = new SqlExceptionHelper();
//
// this.classLoaderService = new ClassLoaderServiceImpl();
//
// this.importFiles = ConfigurationHelper.getString(
// AvailableSettings.HBM2DDL_IMPORT_FILES,
// configuration.getProperties(),
// DEFAULT_IMPORT_FILE
// );
//
// final Dialect dialect = Dialect.getDialect( configuration.getProperties() );
// this.dropSQL = configuration.generateDropSchemaScript( dialect );
// this.createSQL = configuration.generateSchemaCreationScript( dialect );
// }
public SchemaExport(
ConnectionHelper connectionHelper,
@ -581,9 +583,13 @@ private static StandardServiceRegistryImpl createServiceRegistry(Properties prop
public static void main(String[] args) {
try {
final Configuration cfg = new Configuration();
final StandardServiceRegistryImpl serviceRegistry = createServiceRegistry( cfg.getProperties() );
final ClassLoaderService classLoaderService = serviceRegistry.getService( ClassLoaderService.class );
final BootstrapServiceRegistry bsr = new BootstrapServiceRegistryBuilder().build();
final ClassLoaderService classLoaderService = bsr.getService( ClassLoaderService.class );
final MetadataSources metadataSources = new MetadataSources( bsr );
final StandardServiceRegistryBuilder ssrBuilder = new StandardServiceRegistryBuilder( bsr );
NamingStrategy namingStrategy = null;
boolean script = true;
boolean drop = false;
@ -592,7 +598,6 @@ public static void main(String[] args) {
boolean export = true;
String outFile = null;
String importFile = DEFAULT_IMPORT_FILE;
String propFile = null;
boolean format = false;
String delim = null;
@ -620,7 +625,7 @@ else if ( args[i].startsWith( "--import=" ) ) {
importFile = args[i].substring( 9 );
}
else if ( args[i].startsWith( "--properties=" ) ) {
propFile = args[i].substring( 13 );
ssrBuilder.loadProperties( new File( args[i].substring( 13 ) ) );
}
else if ( args[i].equals( "--format" ) ) {
format = true;
@ -629,51 +634,48 @@ else if ( args[i].startsWith( "--delimiter=" ) ) {
delim = args[i].substring( 12 );
}
else if ( args[i].startsWith( "--config=" ) ) {
cfg.configure( args[i].substring( 9 ) );
ssrBuilder.configure( args[i].substring( 9 ) );
}
else if ( args[i].startsWith( "--naming=" ) ) {
cfg.setNamingStrategy(
( NamingStrategy ) classLoaderService.classForName( args[i].substring( 9 ) )
.newInstance()
);
namingStrategy = (NamingStrategy) classLoaderService.classForName( args[i].substring( 9 ) ).newInstance();
}
}
else {
String filename = args[i];
if ( filename.endsWith( ".jar" ) ) {
cfg.addJar( new File( filename ) );
metadataSources.addJar( new File( filename ) );
}
else {
cfg.addFile( filename );
metadataSources.addFile( filename );
}
}
}
if ( propFile != null ) {
Properties props = new Properties();
props.putAll( cfg.getProperties() );
props.load( new FileInputStream( propFile ) );
cfg.setProperties( props );
}
if ( importFile != null ) {
cfg.setProperty( AvailableSettings.HBM2DDL_IMPORT_FILES, importFile );
ssrBuilder.applySetting( AvailableSettings.HBM2DDL_IMPORT_FILES, importFile );
}
final StandardServiceRegistryImpl ssr = (StandardServiceRegistryImpl) ssrBuilder.build();
final MetadataBuilder metadataBuilder = metadataSources.getMetadataBuilder( ssr );
if ( namingStrategy != null ) {
metadataBuilder.with( namingStrategy );
}
final MetadataImplementor metadata = (MetadataImplementor) metadataBuilder.build();
try {
SchemaExport se = new SchemaExport( serviceRegistry, cfg )
SchemaExport se = new SchemaExport( metadata )
.setHaltOnError( halt )
.setOutputFile( outFile )
.setDelimiter( delim )
.setImportSqlCommandExtractor( serviceRegistry.getService( ImportSqlCommandExtractor.class ) );
.setImportSqlCommandExtractor( ssr.getService( ImportSqlCommandExtractor.class ) );
if ( format ) {
se.setFormat( true );
}
se.execute( script, export, drop, create );
}
finally {
serviceRegistry.destroy();
ssr.destroy();
}
}
catch ( Exception e ) {
@ -690,13 +692,5 @@ else if ( args[i].startsWith( "--naming=" ) ) {
public List getExceptions() {
return exceptions;
}
public String[] getCreateSqlScripts() {
return createSQL;
}
public String[] getDropSqlScripts() {
return dropSQL;
}
}

View File

@ -25,19 +25,22 @@
package org.hibernate.tool.hbm2ddl;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Properties;
import org.hibernate.HibernateException;
import org.hibernate.cfg.Configuration;
import org.hibernate.boot.registry.BootstrapServiceRegistry;
import org.hibernate.boot.registry.BootstrapServiceRegistryBuilder;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.boot.registry.internal.StandardServiceRegistryImpl;
import org.hibernate.cfg.NamingStrategy;
import org.hibernate.internal.util.ReflectHelper;
import org.hibernate.internal.util.collections.ArrayHelper;
import org.hibernate.metamodel.MetadataBuilder;
import org.hibernate.metamodel.MetadataSources;
import org.hibernate.metamodel.spi.MetadataImplementor;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.DirectoryScanner;
@ -167,7 +170,7 @@ public void setOutput(File outputFile) {
@Override
public void execute() throws BuildException {
try {
getSchemaExport( getConfiguration() ).execute(!quiet, !text, drop, create);
buildSchemaExport().execute( !quiet, !text, drop, create );
}
catch (HibernateException e) {
throw new BuildException("Schema text failed: " + e.getMessage(), e);
@ -183,66 +186,64 @@ public void execute() throws BuildException {
}
}
private SchemaExport buildSchemaExport() throws Exception {
final BootstrapServiceRegistry bsr = new BootstrapServiceRegistryBuilder().build();
final MetadataSources metadataSources = new MetadataSources( bsr );
final StandardServiceRegistryBuilder ssrBuilder = new StandardServiceRegistryBuilder( bsr );
if ( configurationFile != null ) {
ssrBuilder.configure( configurationFile );
}
if ( propertiesFile != null ) {
ssrBuilder.loadProperties( propertiesFile );
}
ssrBuilder.applySettings( getProject().getProperties() );
for ( String fileName : getFiles() ) {
if ( fileName.endsWith(".jar") ) {
metadataSources.addJar( new File( fileName ) );
}
else {
metadataSources.addFile( fileName );
}
}
final StandardServiceRegistryImpl ssr = (StandardServiceRegistryImpl) ssrBuilder.build();
final MetadataBuilder metadataBuilder = metadataSources.getMetadataBuilder( ssr );
if ( namingStrategy != null ) {
final ClassLoaderService classLoaderService = bsr.getService( ClassLoaderService.class );
final NamingStrategy namingStrategyInstance = (NamingStrategy) classLoaderService.classForName( namingStrategy ).newInstance();
metadataBuilder.with( namingStrategyInstance );
}
return new SchemaExport( (MetadataImplementor) metadataBuilder.build() )
.setHaltOnError( haltOnError )
.setOutputFile( outputFile.getPath() )
.setDelimiter( delimiter );
}
private String[] getFiles() {
List files = new LinkedList();
for ( Iterator i = fileSets.iterator(); i.hasNext(); ) {
FileSet fs = (FileSet) i.next();
List<String> fileNames = new LinkedList<String>();
for ( Object fileSet : fileSets ) {
FileSet fs = (FileSet) fileSet;
DirectoryScanner ds = fs.getDirectoryScanner( getProject() );
String[] dsFiles = ds.getIncludedFiles();
for (int j = 0; j < dsFiles.length; j++) {
File f = new File(dsFiles[j]);
for ( int j = 0; j < dsFiles.length; j++ ) {
File f = new File( dsFiles[j] );
if ( !f.isFile() ) {
f = new File( ds.getBasedir(), dsFiles[j] );
}
files.add( f.getAbsolutePath() );
fileNames.add( f.getAbsolutePath() );
}
}
return ArrayHelper.toStringArray(files);
}
private Configuration getConfiguration() throws Exception {
Configuration cfg = new Configuration();
if (namingStrategy!=null) {
cfg.setNamingStrategy(
(NamingStrategy) ReflectHelper.classForName(namingStrategy).newInstance()
);
}
if (configurationFile != null) {
cfg.configure( configurationFile );
}
String[] files = getFiles();
for (int i = 0; i < files.length; i++) {
String filename = files[i];
if ( filename.endsWith(".jar") ) {
cfg.addJar( new File(filename) );
}
else {
cfg.addFile(filename);
}
}
return cfg;
}
private SchemaExport getSchemaExport(Configuration cfg) throws HibernateException, IOException {
Properties properties = new Properties();
properties.putAll( cfg.getProperties() );
if (propertiesFile == null) {
properties.putAll( getProject().getProperties() );
}
else {
properties.load( new FileInputStream(propertiesFile) );
}
cfg.setProperties(properties);
return new SchemaExport(cfg)
.setHaltOnError(haltOnError)
.setOutputFile( outputFile.getPath() )
.setDelimiter(delimiter);
return ArrayHelper.toStringArray( fileNames );
}
public void setNamingStrategy(String namingStrategy) {

View File

@ -1,297 +0,0 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2008-2011, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.tool.hbm2ddl;
import java.io.FileInputStream;
import java.io.FileWriter;
import java.io.Writer;
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import org.hibernate.HibernateException;
import org.hibernate.JDBCException;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.boot.registry.internal.StandardServiceRegistryImpl;
import org.hibernate.cfg.Configuration;
import org.hibernate.cfg.Environment;
import org.hibernate.cfg.NamingStrategy;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.jdbc.internal.FormatStyle;
import org.hibernate.engine.jdbc.internal.Formatter;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.engine.jdbc.spi.SqlExceptionHelper;
import org.hibernate.engine.jdbc.spi.SqlStatementLogger;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.ReflectHelper;
import org.hibernate.internal.util.config.ConfigurationHelper;
import org.hibernate.service.ServiceRegistry;
import org.jboss.logging.Logger;
/**
* A commandline tool to update a database schema. May also be called from inside an application.
*
* @author Christoph Sturm
* @author Steve Ebersole
*/
public class SchemaUpdate {
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, SchemaUpdate.class.getName());
private final Configuration configuration;
private final ConnectionHelper connectionHelper;
private final SqlStatementLogger sqlStatementLogger;
private final SqlExceptionHelper sqlExceptionHelper;
private final Dialect dialect;
private final List<Exception> exceptions = new ArrayList<Exception>();
private Formatter formatter;
private boolean haltOnError;
private boolean format = true;
private String outputFile;
private String delimiter;
public SchemaUpdate(Configuration cfg) throws HibernateException {
this( cfg, cfg.getProperties() );
}
public SchemaUpdate(Configuration configuration, Properties properties) throws HibernateException {
this.configuration = configuration;
this.dialect = Dialect.getDialect( properties );
Properties props = new Properties();
props.putAll( dialect.getDefaultProperties() );
props.putAll( properties );
this.connectionHelper = new ManagedProviderConnectionHelper( props );
this.sqlExceptionHelper = new SqlExceptionHelper();
this.sqlStatementLogger = new SqlStatementLogger( false, true );
this.formatter = FormatStyle.DDL.getFormatter();
}
public SchemaUpdate(ServiceRegistry serviceRegistry, Configuration cfg) throws HibernateException {
this.configuration = cfg;
final JdbcServices jdbcServices = serviceRegistry.getService( JdbcServices.class );
this.dialect = jdbcServices.getDialect();
this.connectionHelper = new SuppliedConnectionProviderConnectionHelper( jdbcServices.getConnectionProvider() );
this.sqlExceptionHelper = new SqlExceptionHelper();
this.sqlStatementLogger = jdbcServices.getSqlStatementLogger();
this.formatter = ( sqlStatementLogger.isFormat() ? FormatStyle.DDL : FormatStyle.NONE ).getFormatter();
}
private static StandardServiceRegistryImpl createServiceRegistry(Properties properties) {
Environment.verifyProperties( properties );
ConfigurationHelper.resolvePlaceHolders( properties );
return (StandardServiceRegistryImpl) new StandardServiceRegistryBuilder().applySettings( properties ).build();
}
public static void main(String[] args) {
try {
Configuration cfg = new Configuration();
boolean script = true;
// If true then execute db updates, otherwise just generate and display updates
boolean doUpdate = true;
String propFile = null;
for ( int i = 0; i < args.length; i++ ) {
if ( args[i].startsWith( "--" ) ) {
if ( args[i].equals( "--quiet" ) ) {
script = false;
}
else if ( args[i].startsWith( "--properties=" ) ) {
propFile = args[i].substring( 13 );
}
else if ( args[i].startsWith( "--config=" ) ) {
cfg.configure( args[i].substring( 9 ) );
}
else if ( args[i].startsWith( "--text" ) ) {
doUpdate = false;
}
else if ( args[i].startsWith( "--naming=" ) ) {
cfg.setNamingStrategy(
( NamingStrategy ) ReflectHelper.classForName( args[i].substring( 9 ) ).newInstance()
);
}
}
else {
cfg.addFile( args[i] );
}
}
if ( propFile != null ) {
Properties props = new Properties();
props.putAll( cfg.getProperties() );
props.load( new FileInputStream( propFile ) );
cfg.setProperties( props );
}
StandardServiceRegistryImpl serviceRegistry = createServiceRegistry( cfg.getProperties() );
try {
new SchemaUpdate( serviceRegistry, cfg ).execute( script, doUpdate );
}
finally {
serviceRegistry.destroy();
}
}
catch ( Exception e ) {
LOG.unableToRunSchemaUpdate(e);
e.printStackTrace();
}
}
/**
* Execute the schema updates
*
* @param script print all DDL to the console
*/
public void execute(boolean script, boolean doUpdate) {
execute( Target.interpret( script, doUpdate ) );
}
public void execute(Target target) {
LOG.runningHbm2ddlSchemaUpdate();
Connection connection = null;
Statement stmt = null;
Writer outputFileWriter = null;
exceptions.clear();
try {
DatabaseMetadata meta;
try {
LOG.fetchingDatabaseMetadata();
connectionHelper.prepare( true );
connection = connectionHelper.getConnection();
meta = new DatabaseMetadata( connection, dialect, configuration );
stmt = connection.createStatement();
}
catch ( SQLException sqle ) {
exceptions.add( sqle );
LOG.unableToGetDatabaseMetadata(sqle);
throw sqle;
}
LOG.updatingSchema();
if ( outputFile != null ) {
LOG.writingGeneratedSchemaToFile( outputFile );
outputFileWriter = new FileWriter( outputFile );
}
List<SchemaUpdateScript> scripts = configuration.generateSchemaUpdateScriptList( dialect, meta );
for ( SchemaUpdateScript script : scripts ) {
String formatted = formatter.format( script.getScript() );
try {
if ( delimiter != null ) {
formatted += delimiter;
}
if ( target.doScript() ) {
System.out.println( formatted );
}
if ( outputFile != null ) {
outputFileWriter.write( formatted + "\n" );
}
if ( target.doExport() ) {
LOG.debug( script.getScript() );
stmt.executeUpdate( formatted );
}
}
catch ( SQLException e ) {
if (!script.isQuiet()) {
if ( haltOnError ) {
throw new JDBCException( "Error during DDL export", e );
}
exceptions.add( e );
LOG.unsuccessful(script.getScript());
LOG.error(e.getMessage());
}
}
}
LOG.schemaUpdateComplete();
}
catch ( Exception e ) {
exceptions.add( e );
LOG.unableToCompleteSchemaUpdate(e);
}
finally {
try {
if ( stmt != null ) {
stmt.close();
}
connectionHelper.release();
}
catch ( Exception e ) {
exceptions.add( e );
LOG.unableToCloseConnection(e);
}
try {
if( outputFileWriter != null ) {
outputFileWriter.close();
}
}
catch(Exception e) {
exceptions.add(e);
LOG.unableToCloseConnection(e);
}
}
}
/**
* Returns a List of all Exceptions which occured during the export.
*
* @return A List containig the Exceptions occured during the export
*/
public List getExceptions() {
return exceptions;
}
public void setHaltOnError(boolean haltOnError) {
this.haltOnError = haltOnError;
}
public void setFormat(boolean format) {
this.formatter = ( format ? FormatStyle.DDL : FormatStyle.NONE ).getFormatter();
}
public void setOutputFile(String outputFile) {
this.outputFile = outputFile;
}
public void setDelimiter(String delimiter) {
this.delimiter = delimiter;
}
}

View File

@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
* Copyright (c) 2008, 2014, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@ -20,24 +20,38 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*
*/
package org.hibernate.tool.hbm2ddl;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Collections;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Properties;
import org.hibernate.HibernateException;
import org.hibernate.cfg.Configuration;
import org.hibernate.boot.registry.BootstrapServiceRegistry;
import org.hibernate.boot.registry.BootstrapServiceRegistryBuilder;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.boot.registry.internal.StandardServiceRegistryImpl;
import org.hibernate.cfg.NamingStrategy;
import org.hibernate.internal.util.ReflectHelper;
import org.hibernate.engine.jdbc.connections.spi.ConnectionProvider;
import org.hibernate.engine.jdbc.connections.spi.JdbcConnectionAccess;
import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
import org.hibernate.internal.util.collections.ArrayHelper;
import org.hibernate.metamodel.MetadataBuilder;
import org.hibernate.metamodel.MetadataSources;
import org.hibernate.metamodel.spi.MetadataImplementor;
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
import org.hibernate.tool.schema.extract.spi.DatabaseInformationBuilder;
import org.hibernate.tool.schema.spi.SchemaManagementTool;
import org.hibernate.tool.schema.spi.SchemaMigrator;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.DirectoryScanner;
@ -62,7 +76,6 @@
* &lt;/schemaupdate&gt;
* </pre>
*
* @see SchemaUpdate
* @author Rong C Ou, Gavin King
*/
public class SchemaUpdateTask extends MatchingTask {
@ -129,8 +142,7 @@ public void execute() throws BuildException {
try {
log("Running Hibernate Core SchemaUpdate.");
log("This is an Ant task supporting only mapping files, if you want to use annotations see http://tools.hibernate.org.");
Configuration cfg = getConfiguration();
getSchemaUpdate(cfg).execute(!quiet, !text);
doIt();
}
catch (HibernateException e) {
throw new BuildException("Schema text failed: " + e.getMessage(), e);
@ -146,6 +158,116 @@ public void execute() throws BuildException {
}
}
private void doIt() throws Exception {
final BootstrapServiceRegistry bsr = new BootstrapServiceRegistryBuilder().build();
final MetadataSources metadataSources = new MetadataSources( bsr );
final StandardServiceRegistryBuilder ssrBuilder = new StandardServiceRegistryBuilder( bsr );
if ( configurationFile != null ) {
ssrBuilder.configure( configurationFile );
}
if ( propertiesFile != null ) {
ssrBuilder.loadProperties( propertiesFile );
}
ssrBuilder.applySettings( getProject().getProperties() );
for ( String fileName : getFiles() ) {
if ( fileName.endsWith(".jar") ) {
metadataSources.addJar( new File( fileName ) );
}
else {
metadataSources.addFile( fileName );
}
}
final StandardServiceRegistryImpl ssr = (StandardServiceRegistryImpl) ssrBuilder.build();
final MetadataBuilder metadataBuilder = metadataSources.getMetadataBuilder( ssr );
if ( namingStrategy != null ) {
final ClassLoaderService classLoaderService = bsr.getService( ClassLoaderService.class );
final NamingStrategy namingStrategyInstance = (NamingStrategy) classLoaderService.classForName( namingStrategy ).newInstance();
metadataBuilder.with( namingStrategyInstance );
}
final MetadataImplementor metadata = (MetadataImplementor) metadataBuilder.build();
final JdbcEnvironment jdbcEnvironment = ssr.getService( JdbcEnvironment.class );
final ConnectionProvider connectionProvider = ssr.getService( ConnectionProvider.class );
final JdbcConnectionAccess connectionAccess = new ConnectionProviderJdbcConnectionAccess( connectionProvider );
final DatabaseInformation existing = new DatabaseInformationBuilder( jdbcEnvironment, connectionAccess ).build();
final org.hibernate.tool.schema.spi.Target target = new org.hibernate.tool.schema.spi.Target() {
private Statement stmnt;
@Override
public boolean acceptsImportScriptActions() {
return false;
}
@Override
public void prepare() {
try {
stmnt = connectionAccess.obtainConnection().createStatement();
}
catch (SQLException e) {
throw new HibernateException( "Could not build JDBC Statement", e );
}
}
@Override
public void accept(String action) {
try {
stmnt.execute( action );
}
catch (SQLException e) {
throw new HibernateException( "Could not execute command via JDBC", e );
}
}
@Override
public void release() {
try {
stmnt.close();
}
catch (SQLException e) {
throw new HibernateException( "Could not release JDBC Statement", e );
}
}
};
final SchemaMigrator schemaMigrator = ssr.getService( SchemaManagementTool.class ).getSchemaMigrator( Collections.emptyMap() );
schemaMigrator.doMigration(
metadata.getDatabase(),
existing,
true,
Collections.singletonList( target )
);
}
private static class ConnectionProviderJdbcConnectionAccess implements JdbcConnectionAccess {
private final ConnectionProvider connectionProvider;
public ConnectionProviderJdbcConnectionAccess(ConnectionProvider connectionProvider) {
this.connectionProvider = connectionProvider;
}
@Override
public Connection obtainConnection() throws SQLException {
return connectionProvider.getConnection();
}
@Override
public void releaseConnection(Connection connection) throws SQLException {
connectionProvider.closeConnection( connection );
}
@Override
public boolean supportsAggressiveRelease() {
return connectionProvider.supportsAggressiveRelease();
}
}
private String[] getFiles() {
List files = new LinkedList();
@ -168,47 +290,6 @@ private String[] getFiles() {
return ArrayHelper.toStringArray( files );
}
private Configuration getConfiguration() throws Exception {
Configuration cfg = new Configuration();
if (namingStrategy!=null) {
cfg.setNamingStrategy(
(NamingStrategy) ReflectHelper.classForName( namingStrategy ).newInstance()
);
}
if (configurationFile!=null) {
cfg.configure( configurationFile );
}
String[] files = getFiles();
for (int i = 0; i < files.length; i++) {
String filename = files[i];
if ( filename.endsWith(".jar") ) {
cfg.addJar( new File(filename) );
}
else {
cfg.addFile(filename);
}
}
return cfg;
}
private SchemaUpdate getSchemaUpdate(Configuration cfg) throws HibernateException, IOException {
Properties properties = new Properties();
properties.putAll( cfg.getProperties() );
if (propertiesFile == null) {
properties.putAll( getProject().getProperties() );
}
else {
properties.load( new FileInputStream(propertiesFile) );
}
cfg.setProperties(properties);
SchemaUpdate su = new SchemaUpdate(cfg);
su.setOutputFile( outputFile.getPath() );
su.setDelimiter(delimiter);
su.setHaltOnError(haltOnError);
return su;
}
public void setNamingStrategy(String namingStrategy) {
this.namingStrategy = namingStrategy;
}

View File

@ -1,172 +0,0 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2008-2011, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.tool.hbm2ddl;
import java.io.FileInputStream;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Properties;
import org.hibernate.HibernateException;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.boot.registry.internal.StandardServiceRegistryImpl;
import org.hibernate.cfg.Configuration;
import org.hibernate.cfg.Environment;
import org.hibernate.cfg.NamingStrategy;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.ReflectHelper;
import org.hibernate.internal.util.config.ConfigurationHelper;
import org.hibernate.service.ServiceRegistry;
import org.jboss.logging.Logger;
/**
* A commandline tool to update a database schema. May also be called from
* inside an application.
*
* @author Christoph Sturm
*/
public class SchemaValidator {
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, SchemaValidator.class.getName());
private ConnectionHelper connectionHelper;
private Configuration configuration;
private Dialect dialect;
public SchemaValidator(Configuration cfg) throws HibernateException {
this( cfg, cfg.getProperties() );
}
public SchemaValidator(Configuration cfg, Properties connectionProperties) throws HibernateException {
this.configuration = cfg;
dialect = Dialect.getDialect( connectionProperties );
Properties props = new Properties();
props.putAll( dialect.getDefaultProperties() );
props.putAll( connectionProperties );
connectionHelper = new ManagedProviderConnectionHelper( props );
}
public SchemaValidator(ServiceRegistry serviceRegistry, Configuration cfg ) throws HibernateException {
this.configuration = cfg;
final JdbcServices jdbcServices = serviceRegistry.getService( JdbcServices.class );
this.dialect = jdbcServices.getDialect();
this.connectionHelper = new SuppliedConnectionProviderConnectionHelper( jdbcServices.getConnectionProvider() );
}
private static StandardServiceRegistryImpl createServiceRegistry(Properties properties) {
Environment.verifyProperties( properties );
ConfigurationHelper.resolvePlaceHolders( properties );
return (StandardServiceRegistryImpl) new StandardServiceRegistryBuilder().applySettings( properties ).build();
}
public static void main(String[] args) {
try {
Configuration cfg = new Configuration();
String propFile = null;
for ( int i = 0; i < args.length; i++ ) {
if ( args[i].startsWith( "--" ) ) {
if ( args[i].startsWith( "--properties=" ) ) {
propFile = args[i].substring( 13 );
}
else if ( args[i].startsWith( "--config=" ) ) {
cfg.configure( args[i].substring( 9 ) );
}
else if ( args[i].startsWith( "--naming=" ) ) {
cfg.setNamingStrategy(
( NamingStrategy ) ReflectHelper.classForName( args[i].substring( 9 ) ).newInstance()
);
}
}
else {
cfg.addFile( args[i] );
}
}
if ( propFile != null ) {
Properties props = new Properties();
props.putAll( cfg.getProperties() );
props.load( new FileInputStream( propFile ) );
cfg.setProperties( props );
}
StandardServiceRegistryImpl serviceRegistry = createServiceRegistry( cfg.getProperties() );
try {
new SchemaValidator( serviceRegistry, cfg ).validate();
}
finally {
serviceRegistry.destroy();
}
}
catch ( Exception e ) {
LOG.unableToRunSchemaUpdate(e);
e.printStackTrace();
}
}
/**
* Perform the validations.
*/
public void validate() {
LOG.runningSchemaValidator();
Connection connection = null;
try {
DatabaseMetadata meta;
try {
LOG.fetchingDatabaseMetadata();
connectionHelper.prepare( false );
connection = connectionHelper.getConnection();
meta = new DatabaseMetadata( connection, dialect, configuration, false );
}
catch ( SQLException sqle ) {
LOG.unableToGetDatabaseMetadata(sqle);
throw sqle;
}
configuration.validateSchema( dialect, meta );
}
catch ( SQLException e ) {
LOG.unableToCompleteSchemaValidation(e);
}
finally {
try {
connectionHelper.release();
}
catch ( Exception e ) {
LOG.unableToCloseConnection(e);
}
}
}
}

View File

@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
* Copyright (c) 2008, 2014, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC.
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@ -20,24 +20,37 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*
*/
package org.hibernate.tool.hbm2ddl;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Collections;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Properties;
import org.hibernate.HibernateException;
import org.hibernate.cfg.Configuration;
import org.hibernate.boot.registry.BootstrapServiceRegistry;
import org.hibernate.boot.registry.BootstrapServiceRegistryBuilder;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.boot.registry.internal.StandardServiceRegistryImpl;
import org.hibernate.cfg.NamingStrategy;
import org.hibernate.internal.util.ReflectHelper;
import org.hibernate.engine.jdbc.connections.spi.ConnectionProvider;
import org.hibernate.engine.jdbc.connections.spi.JdbcConnectionAccess;
import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
import org.hibernate.internal.util.collections.ArrayHelper;
import org.hibernate.metamodel.MetadataBuilder;
import org.hibernate.metamodel.MetadataSources;
import org.hibernate.metamodel.spi.MetadataImplementor;
import org.hibernate.tool.schema.extract.spi.DatabaseInformation;
import org.hibernate.tool.schema.extract.spi.DatabaseInformationBuilder;
import org.hibernate.tool.schema.spi.SchemaManagementTool;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.DirectoryScanner;
@ -61,7 +74,6 @@
* &lt;/schemaupdate&gt;
* </pre>
*
* @see SchemaValidator
* @author Gavin King
*/
public class SchemaValidatorTask extends MatchingTask {
@ -102,8 +114,7 @@ public void setConfig(File configurationFile) {
@Override
public void execute() throws BuildException {
try {
Configuration cfg = getConfiguration();
getSchemaValidator(cfg).validate();
doIt();
}
catch (HibernateException e) {
throw new BuildException("Schema text failed: " + e.getMessage(), e);
@ -119,6 +130,112 @@ public void execute() throws BuildException {
}
}
private void doIt() throws Exception {
final BootstrapServiceRegistry bsr = new BootstrapServiceRegistryBuilder().build();
final MetadataSources metadataSources = new MetadataSources( bsr );
final StandardServiceRegistryBuilder ssrBuilder = new StandardServiceRegistryBuilder( bsr );
if ( configurationFile != null ) {
ssrBuilder.configure( configurationFile );
}
if ( propertiesFile != null ) {
ssrBuilder.loadProperties( propertiesFile );
}
ssrBuilder.applySettings( getProject().getProperties() );
for ( String fileName : getFiles() ) {
if ( fileName.endsWith(".jar") ) {
metadataSources.addJar( new File( fileName ) );
}
else {
metadataSources.addFile( fileName );
}
}
final StandardServiceRegistryImpl ssr = (StandardServiceRegistryImpl) ssrBuilder.build();
final MetadataBuilder metadataBuilder = metadataSources.getMetadataBuilder( ssr );
if ( namingStrategy != null ) {
final ClassLoaderService classLoaderService = bsr.getService( ClassLoaderService.class );
final NamingStrategy namingStrategyInstance = (NamingStrategy) classLoaderService.classForName( namingStrategy ).newInstance();
metadataBuilder.with( namingStrategyInstance );
}
final MetadataImplementor metadata = (MetadataImplementor) metadataBuilder.build();
final JdbcEnvironment jdbcEnvironment = ssr.getService( JdbcEnvironment.class );
final ConnectionProvider connectionProvider = ssr.getService( ConnectionProvider.class );
final JdbcConnectionAccess connectionAccess = new ConnectionProviderJdbcConnectionAccess( connectionProvider );
final DatabaseInformation existing = new DatabaseInformationBuilder( jdbcEnvironment, connectionAccess ).build();
final org.hibernate.tool.schema.spi.Target target = new org.hibernate.tool.schema.spi.Target() {
private Statement stmnt;
@Override
public boolean acceptsImportScriptActions() {
return false;
}
@Override
public void prepare() {
try {
stmnt = connectionAccess.obtainConnection().createStatement();
}
catch (SQLException e) {
throw new HibernateException( "Could not build JDBC Statement", e );
}
}
@Override
public void accept(String action) {
try {
stmnt.execute( action );
}
catch (SQLException e) {
throw new HibernateException( "Could not execute command via JDBC", e );
}
}
@Override
public void release() {
try {
stmnt.close();
}
catch (SQLException e) {
throw new HibernateException( "Could not release JDBC Statement", e );
}
}
};
final org.hibernate.tool.schema.spi.SchemaValidator validator = ssr.getService( SchemaManagementTool.class )
.getSchemaValidator( Collections.emptyMap() );
validator.doValidation( metadata.getDatabase(), existing );
}
private static class ConnectionProviderJdbcConnectionAccess implements JdbcConnectionAccess {
private final ConnectionProvider connectionProvider;
public ConnectionProviderJdbcConnectionAccess(ConnectionProvider connectionProvider) {
this.connectionProvider = connectionProvider;
}
@Override
public Connection obtainConnection() throws SQLException {
return connectionProvider.getConnection();
}
@Override
public void releaseConnection(Connection connection) throws SQLException {
connectionProvider.closeConnection( connection );
}
@Override
public boolean supportsAggressiveRelease() {
return connectionProvider.supportsAggressiveRelease();
}
}
private String[] getFiles() {
List files = new LinkedList();
@ -141,43 +258,6 @@ private String[] getFiles() {
return ArrayHelper.toStringArray( files );
}
private Configuration getConfiguration() throws Exception {
Configuration cfg = new Configuration();
if (namingStrategy!=null) {
cfg.setNamingStrategy(
(NamingStrategy) ReflectHelper.classForName(namingStrategy).newInstance()
);
}
if (configurationFile!=null) {
cfg.configure( configurationFile );
}
String[] files = getFiles();
for (int i = 0; i < files.length; i++) {
String filename = files[i];
if ( filename.endsWith(".jar") ) {
cfg.addJar( new File(filename) );
}
else {
cfg.addFile(filename);
}
}
return cfg;
}
private SchemaValidator getSchemaValidator(Configuration cfg) throws HibernateException, IOException {
Properties properties = new Properties();
properties.putAll( cfg.getProperties() );
if (propertiesFile == null) {
properties.putAll( getProject().getProperties() );
}
else {
properties.load( new FileInputStream(propertiesFile) );
}
cfg.setProperties(properties);
return new SchemaValidator(cfg);
}
public void setNamingStrategy(String namingStrategy) {
this.namingStrategy = namingStrategy;
}

View File

@ -21,7 +21,7 @@
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jaxb.internal;
package org.hibernate.xml.internal.jaxb;
import java.io.InputStream;
import javax.xml.bind.JAXBContext;
@ -39,42 +39,34 @@
import org.jboss.logging.Logger;
import org.hibernate.internal.util.xml.BufferedXMLEventReader;
import org.hibernate.internal.util.xml.LocalXmlResourceResolver;
import org.hibernate.jaxb.spi.JaxbRoot;
import org.hibernate.jaxb.spi.Origin;
import org.hibernate.xml.internal.stax.BufferedXMLEventReader;
import org.hibernate.xml.internal.stax.LocalXmlResourceResolver;
import org.hibernate.xml.spi.BindResult;
import org.hibernate.xml.spi.Origin;
import org.hibernate.metamodel.spi.source.MappingException;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.xml.spi.XmlBinder;
/**
* @author Strong Liu <stliu@hibernate.org>
*/
abstract class AbstractJaxbProcessor {
protected static final Logger log = Logger.getLogger( AbstractJaxbProcessor.class );
// public static final String VALIDATE_XML_SETTING = "hibernate.xml.validate";
abstract class AbstractXmlBinder implements XmlBinder {
protected static final Logger log = Logger.getLogger( AbstractXmlBinder.class );
protected final ServiceRegistry serviceRegistry;
protected final boolean validateXml;
public AbstractJaxbProcessor(ServiceRegistry serviceRegistry) {
public AbstractXmlBinder(ServiceRegistry serviceRegistry) {
this( serviceRegistry, true );
// this(
// serviceRegistry,
// serviceRegistry.getService( ConfigurationService.class ).getSetting(
// VALIDATE_XML_SETTING,
// StandardConverters.BOOLEAN,
// true
// )
// );
}
public AbstractJaxbProcessor(ServiceRegistry serviceRegistry, boolean validateXml) {
public AbstractXmlBinder(ServiceRegistry serviceRegistry, boolean validateXml) {
this.serviceRegistry = serviceRegistry;
this.validateXml = validateXml;
}
public JaxbRoot unmarshal(InputStream stream, Origin origin) {
@Override
public BindResult bind(InputStream stream, Origin origin) {
try {
BufferedXMLEventReader staxReader = new BufferedXMLEventReader(staxFactory().createXMLEventReader( stream ), 100);
try {
@ -110,7 +102,7 @@ private XMLInputFactory buildStaxFactory() {
}
@SuppressWarnings( { "unchecked" })
private JaxbRoot unmarshal(XMLEventReader staxEventReader, final Origin origin) {
private BindResult unmarshal(XMLEventReader staxEventReader, final Origin origin) {
XMLEvent event;
try {
event = staxEventReader.peek();
@ -138,17 +130,14 @@ private JaxbRoot unmarshal(XMLEventReader staxEventReader, final Origin origin)
unmarshaller.setSchema( schema );
unmarshaller.setEventHandler( handler );
final Object target = unmarshaller.unmarshal( staxEventReader );
return new JaxbRoot( target, origin );
return new BindResult( target, origin );
}
catch ( JAXBException e ) {
StringBuilder builder = new StringBuilder();
builder.append( "Unable to perform unmarshalling at line number " );
builder.append( handler.getLineNumber() );
builder.append( " and column " );
builder.append( handler.getColumnNumber() );
builder.append( ". Message: " );
builder.append( handler.getMessage() );
throw new MappingException( builder.toString(), e, origin );
throw new MappingException(
"Unable to perform unmarshalling at line number " + handler.getLineNumber()
+ " and column " + handler.getColumnNumber()
+ ". Message: " + handler.getMessage(), e, origin
);
}
}
protected abstract JAXBContext getJaxbContext(XMLEvent event) throws JAXBException;

View File

@ -21,7 +21,7 @@
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jaxb.internal;
package org.hibernate.xml.internal.jaxb;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
@ -31,25 +31,25 @@
import org.jboss.logging.Logger;
import org.hibernate.internal.util.xml.MappingReader;
import org.hibernate.jaxb.spi.Origin;
import org.hibernate.xml.internal.stax.LocalSchemaLocator;
import org.hibernate.xml.spi.Origin;
import org.hibernate.jaxb.spi.cfg.JaxbHibernateConfiguration;
import org.hibernate.service.ServiceRegistry;
/**
* @author Steve Ebersole
*/
public class JaxbConfigurationProcessor extends AbstractJaxbProcessor {
private static final Logger log = Logger.getLogger( JaxbConfigurationProcessor.class );
public class ConfigurationXmlBinder extends AbstractXmlBinder {
private static final Logger log = Logger.getLogger( ConfigurationXmlBinder.class );
public static final String HIBERNATE_CONFIGURATION_URI = "http://www.hibernate.org/xsd/hibernate-configuration";
public JaxbConfigurationProcessor(ServiceRegistry serviceRegistry) {
public ConfigurationXmlBinder(ServiceRegistry serviceRegistry) {
this( serviceRegistry, true );
}
public JaxbConfigurationProcessor(ServiceRegistry serviceRegistry, boolean validateXml) {
public ConfigurationXmlBinder(ServiceRegistry serviceRegistry, boolean validateXml) {
super(serviceRegistry, validateXml);
}
@ -71,7 +71,7 @@ protected JAXBContext getJaxbContext(XMLEvent event) throws JAXBException{
@Override
protected Schema getSchema(XMLEvent event, Origin origin) throws JAXBException {
if ( schema == null ) {
schema = MappingReader.resolveLocalSchema( "org/hibernate/hibernate-configuration-4.0.xsd" );
schema = LocalSchemaLocator.resolveLocalSchema( "org/hibernate/hibernate-configuration-4.0.xsd" );
}
return schema;
}

View File

@ -21,7 +21,7 @@
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jaxb.internal;
package org.hibernate.xml.internal.jaxb;
import java.util.ArrayList;
import java.util.Iterator;
@ -36,7 +36,7 @@
import javax.xml.stream.events.XMLEvent;
import javax.xml.stream.util.EventReaderDelegate;
import org.hibernate.internal.util.xml.LocalXmlResourceResolver;
import org.hibernate.xml.internal.stax.LocalXmlResourceResolver;
/**
*

View File

@ -21,7 +21,7 @@
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jaxb.internal;
package org.hibernate.xml.internal.jaxb;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
@ -37,11 +37,11 @@
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.internal.util.xml.LocalXmlResourceResolver;
import org.hibernate.internal.util.xml.MappingReader;
import org.hibernate.xml.internal.stax.LocalXmlResourceResolver;
import org.hibernate.internal.util.xml.OriginImpl;
import org.hibernate.jaxb.spi.JaxbRoot;
import org.hibernate.jaxb.spi.Origin;
import org.hibernate.xml.internal.stax.SupportedOrmXsdVersion;
import org.hibernate.xml.spi.BindResult;
import org.hibernate.xml.spi.Origin;
import org.hibernate.jaxb.spi.hbm.JaxbHibernateMapping;
import org.hibernate.jaxb.spi.orm.JaxbEntityMappings;
import org.hibernate.metamodel.spi.source.MappingException;
@ -53,15 +53,15 @@
* @author Steve Ebersole
* @author Hardy Ferentschik
*/
public class JaxbMappingProcessor extends AbstractJaxbProcessor{
private static final Logger log = Logger.getLogger( JaxbMappingProcessor.class );
public class MappingXmlBinder extends AbstractXmlBinder {
private static final Logger log = Logger.getLogger( MappingXmlBinder.class );
public static final String HIBERNATE_MAPPING_URI = "http://www.hibernate.org/xsd/hibernate-mapping";
public JaxbMappingProcessor(ServiceRegistry serviceRegistry) {
public MappingXmlBinder(ServiceRegistry serviceRegistry) {
this( serviceRegistry, true );
}
public JaxbMappingProcessor(ServiceRegistry serviceRegistry, boolean validateXml) {
public MappingXmlBinder(ServiceRegistry serviceRegistry, boolean validateXml) {
super(serviceRegistry, validateXml);
}
@ -88,7 +88,7 @@ protected Schema getSchema(XMLEvent event, Origin origin) throws JAXBException {
validationSchema = validateXml ? resolveSupportedOrmXsd( explicitVersion, origin ) : null;
}
else {
validationSchema = validateXml ? MappingReader.SupportedOrmXsdVersion.HBM_4_0.getSchema() : null;
validationSchema = validateXml ? SupportedOrmXsdVersion.HBM_4_0.getSchema() : null;
}
return validationSchema;
}
@ -123,7 +123,7 @@ protected XMLEventReader wrapReader(XMLEventReader staxEventReader, XMLEvent eve
@SuppressWarnings( { "unchecked" })
public JaxbRoot unmarshal(Document document, Origin origin) {
public BindResult unmarshal(Document document, Origin origin) {
Element rootElement = document.getDocumentElement();
if ( rootElement == null ) {
throw new MappingException( "No root element found", origin );
@ -138,7 +138,7 @@ public JaxbRoot unmarshal(Document document, Origin origin) {
jaxbTarget = JaxbEntityMappings.class;
}
else {
validationSchema = validateXml ? MappingReader.SupportedOrmXsdVersion.HBM_4_0.getSchema() : null;
validationSchema = validateXml ? SupportedOrmXsdVersion.HBM_4_0.getSchema() : null;
jaxbTarget = JaxbHibernateMapping.class;
}
@ -153,13 +153,13 @@ public JaxbRoot unmarshal(Document document, Origin origin) {
throw new MappingException( "Unable to perform unmarshalling", e, origin );
}
return new JaxbRoot( target, origin );
return new BindResult( target, origin );
}
private Schema resolveSupportedOrmXsd(String explicitVersion, Origin origin) {
if ( StringHelper.isEmpty( explicitVersion ) ) {
return MappingReader.SupportedOrmXsdVersion.ORM_2_1.getSchema();
return SupportedOrmXsdVersion.ORM_2_1.getSchema();
}
// Here we always use JPA 2.1 schema to do the validation, since the {@link LegacyJPAEventReader} already
@ -172,13 +172,13 @@ private Schema resolveSupportedOrmXsd(String explicitVersion, Origin origin) {
// However, still check for the validity of the version by calling #parse. If someone explicitly uses a value
// that doesn't exist, we still need to throw the exception.
@SuppressWarnings("unused")
MappingReader.SupportedOrmXsdVersion version =
MappingReader.SupportedOrmXsdVersion.parse(
SupportedOrmXsdVersion version =
SupportedOrmXsdVersion.parse(
explicitVersion,
new OriginImpl( origin.getType().name(), origin.getName() )
);
// return version.getSchema();
return MappingReader.SupportedOrmXsdVersion.ORM_2_1.getSchema();
return SupportedOrmXsdVersion.ORM_2_1.getSchema();
}
}

View File

@ -21,7 +21,7 @@
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jaxb.internal;
package org.hibernate.xml.internal.jaxb;
import java.util.ArrayList;
import java.util.Iterator;
@ -35,8 +35,6 @@
import javax.xml.stream.events.XMLEvent;
import javax.xml.stream.util.EventReaderDelegate;
import org.hibernate.internal.util.xml.LocalXmlResourceResolver;
/**
* Used to wrap a StAX {@link XMLEventReader} in order to introduce namespaces into the underlying document. This
* is intended for temporary migration feature to allow legacy HBM mapping documents (DTD-based) to continue to

View File

@ -21,7 +21,7 @@
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.internal.util.xml;
package org.hibernate.xml.internal.stax;
import javax.xml.stream.XMLEventReader;
import javax.xml.stream.XMLStreamConstants;
@ -67,9 +67,6 @@ public final XMLEvent nextEvent() throws XMLStreamException {
return this.previousEvent;
}
/* (non-Javadoc)
* @see java.util.Iterator#next()
*/
@Override
public final Object next() {
try {
@ -80,9 +77,6 @@ public final Object next() {
}
}
/* (non-Javadoc)
* @see javax.xml.stream.XMLEventReader#getElementText()
*/
@Override
public final String getElementText() throws XMLStreamException {
XMLEvent event = this.previousEvent;
@ -125,9 +119,6 @@ public final String getElementText() throws XMLStreamException {
return text.toString();
}
/* (non-Javadoc)
* @see javax.xml.stream.XMLEventReader#nextTag()
*/
@Override
public final XMLEvent nextTag() throws XMLStreamException {
XMLEvent event = this.nextEvent();

View File

@ -21,7 +21,7 @@
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.internal.util.xml;
package org.hibernate.xml.internal.stax;
import java.util.ArrayList;
import java.util.LinkedList;
@ -67,9 +67,6 @@ public List<XMLEvent> getBuffer() {
return new ArrayList<XMLEvent>(this.eventBuffer);
}
/* (non-Javadoc)
* @see org.jasig.portal.xml.stream.BaseXMLEventReader#internalNextEvent()
*/
@Override
protected XMLEvent internalNextEvent() throws XMLStreamException {
//If there is an iterator to read from reset was called, use the iterator

View File

@ -21,7 +21,7 @@
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.internal.util.xml;
package org.hibernate.xml.internal.stax;
import java.util.Deque;
import java.util.LinkedList;

View File

@ -0,0 +1,91 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2014, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.xml.internal.stax;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import javax.xml.XMLConstants;
import javax.xml.transform.stream.StreamSource;
import javax.xml.validation.Schema;
import javax.xml.validation.SchemaFactory;
import org.jboss.logging.Logger;
/**
* @author Steve Ebersole
*/
public class LocalSchemaLocator {
private static final Logger log = Logger.getLogger( LocalSchemaLocator.class );
/**
* Disallow direct instantiation
*/
private LocalSchemaLocator() {
}
/**
* Given the resource name of a schema, locate its URL reference via ClassLoader lookup.
*
* @param schemaName
*
* @return
*/
public static URL resolveLocalSchemaUrl(String schemaName) {
URL url = LocalSchemaLocator.class.getClassLoader().getResource( schemaName );
if ( url == null ) {
throw new XmlInfrastructureException( "Unable to locate schema [" + schemaName + "] via classpath" );
}
return url;
}
public static Schema resolveLocalSchema(String schemaName){
return resolveLocalSchema( resolveLocalSchemaUrl( schemaName ) );
}
public static Schema resolveLocalSchema(URL schemaUrl) {
try {
InputStream schemaStream = schemaUrl.openStream();
try {
StreamSource source = new StreamSource(schemaUrl.openStream());
SchemaFactory schemaFactory = SchemaFactory.newInstance( XMLConstants.W3C_XML_SCHEMA_NS_URI );
return schemaFactory.newSchema(source);
}
catch ( Exception e ) {
throw new XmlInfrastructureException( "Unable to load schema [" + schemaUrl.toExternalForm() + "]", e );
}
finally {
try {
schemaStream.close();
}
catch ( IOException e ) {
log.debugf( "Problem closing schema stream - %s", e.toString() );
}
}
}
catch ( IOException e ) {
throw new XmlInfrastructureException( "Stream error handling schema url [" + schemaUrl.toExternalForm() + "]" );
}
}
}

View File

@ -21,26 +21,22 @@
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.internal.util.xml;
package org.hibernate.xml.internal.stax;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import javax.xml.stream.XMLStreamException;
import org.hibernate.internal.CoreLogging;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.ConfigHelper;
import org.jboss.logging.Logger;
/**
* @author Steve Ebersole
*/
public class LocalXmlResourceResolver implements javax.xml.stream.XMLResolver {
private static final CoreMessageLogger log = Logger.getMessageLogger(
CoreMessageLogger.class,
MappingReader.class.getName()
);
private static final CoreMessageLogger log = CoreLogging.messageLogger( LocalXmlResourceResolver.class );
public static final LocalXmlResourceResolver INSTANCE = new LocalXmlResourceResolver();
@ -65,10 +61,10 @@ public Object resolveEntity(String publicID, String systemID, String baseURI, St
if ( namespace != null ) {
log.debugf( "Interpreting namespace : %s", namespace );
if ( INITIAL_JPA_ORM_NS.equals( namespace ) ) {
return openUrlStream( MappingReader.SupportedOrmXsdVersion.ORM_2_0.getSchemaUrl() );
return openUrlStream( SupportedOrmXsdVersion.ORM_2_0.getSchemaUrl() );
}
else if ( SECOND_JPA_ORM_NS.equals( namespace ) ) {
return openUrlStream( MappingReader.SupportedOrmXsdVersion.ORM_2_1.getSchemaUrl() );
return openUrlStream( SupportedOrmXsdVersion.ORM_2_1.getSchemaUrl() );
}
}

View File

@ -0,0 +1,79 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2014, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.xml.internal.stax;
import java.net.URL;
import javax.xml.validation.Schema;
import org.hibernate.internal.util.xml.Origin;
/**
* @author Steve Ebersole
*/
public enum SupportedOrmXsdVersion {
ORM_1_0( "org/hibernate/jpa/orm_1_0.xsd" ),
ORM_2_0( "org/hibernate/jpa/orm_2_0.xsd" ),
ORM_2_1( "org/hibernate/jpa/orm_2_1.xsd" ),
HBM_4_0( "org/hibernate/hibernate-mapping-4.0.xsd" );
private final String schemaResourceName;
SupportedOrmXsdVersion(String schemaResourceName) {
this.schemaResourceName = schemaResourceName;
}
public static SupportedOrmXsdVersion parse(String name, Origin origin) {
if ( "1.0".equals( name ) ) {
return ORM_1_0;
}
else if ( "2.0".equals( name ) ) {
return ORM_2_0;
}
else if ( "2.1".equals( name ) ) {
return ORM_2_1;
}
else if ( "4.0".equals( name ) ) {
return HBM_4_0;
}
throw new UnsupportedOrmXsdVersionException( name, origin );
}
private URL schemaUrl;
public URL getSchemaUrl() {
if ( schemaUrl == null ) {
schemaUrl = LocalSchemaLocator.resolveLocalSchemaUrl( schemaResourceName );
}
return schemaUrl;
}
private Schema schema;
public Schema getSchema() {
if ( schema == null ) {
schema = LocalSchemaLocator.resolveLocalSchema( getSchemaUrl() );
}
return schema;
}
}

View File

@ -21,9 +21,10 @@
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.internal.util.xml;
package org.hibernate.xml.internal.stax;
import org.hibernate.HibernateException;
import org.hibernate.internal.util.xml.Origin;
/**
* @author Steve Ebersole

View File

@ -21,15 +21,11 @@
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.internal.util.xml;
package org.hibernate.xml.internal.stax;
import javax.xml.stream.XMLStreamConstants;
/**
*
*
*
*
* Note, copied from the uPortal project by permission of author. See
* https://github.com/Jasig/uPortal/blob/master/uportal-war/src/main/java/org/jasig/portal/xml/stream/XMLStreamConstantsUtils.java
*

View File

@ -21,7 +21,7 @@
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.internal.util.xml;
package org.hibernate.xml.internal.stax;
import org.hibernate.HibernateException;

View File

@ -0,0 +1,28 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2014, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
/**
* Contains basic support for Java XML Processing (JAXP) via Streaming API for XML (StAX)
*/
package org.hibernate.xml.internal.stax;

View File

@ -0,0 +1,28 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2014, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
/**
* Contains basic support for consuming XML, mainly for reading configuration and mapping XML.
*/
package org.hibernate.xml;

View File

@ -21,21 +21,21 @@
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jaxb.spi;
package org.hibernate.xml.spi;
import java.io.Serializable;
/**
* Holds information about a JAXB-unmarshalled XML document.
* Return object for the result of performing JAXB binding.
*
* @author Hardy Ferentschik
* @author Steve Ebersole
*/
public class JaxbRoot<T> implements Serializable {
public class BindResult<T> implements Serializable {
private final T root;
private final Origin origin;
public JaxbRoot(T root, Origin origin) {
public BindResult(T root, Origin origin) {
this.root = root;
this.origin = origin;
}

View File

@ -21,7 +21,7 @@
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jaxb.spi;
package org.hibernate.xml.spi;
import java.io.Serializable;

View File

@ -22,7 +22,7 @@
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jaxb.spi;
package org.hibernate.xml.spi;
/**
* From what type of source did we obtain the data

View File

@ -1,7 +1,7 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2012, Red Hat Inc. or third-party contributors as
* Copyright (c) 2014, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
@ -21,24 +21,13 @@
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.jpa.metamodel.internal.legacy;
package org.hibernate.xml.spi;
import java.io.Serializable;
import javax.persistence.metamodel.Type;
import java.io.InputStream;
/**
* Defines commonality for the JPA {@link javax.persistence.metamodel.Type} hierarchy of interfaces.
*
* @author Steve Ebersole
*/
public abstract class AbstractType<X> implements Type<X>, Serializable {
private final Class<X> javaType;
public AbstractType(Class<X> javaType) {
this.javaType = javaType;
}
public Class<X> getJavaType() {
return javaType;
}
public interface XmlBinder {
public BindResult bind(InputStream stream, Origin origin);
}

View File

@ -23,8 +23,6 @@
*/
package org.hibernate.id;
import static org.junit.Assert.assertEquals;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
@ -32,26 +30,31 @@
import java.util.Properties;
import org.hibernate.Session;
import org.hibernate.boot.registry.classloading.internal.ClassLoaderServiceImpl;
import org.hibernate.testing.env.TestingDatabaseInfo;
import org.hibernate.cfg.Configuration;
import org.hibernate.cfg.Environment;
import org.hibernate.boot.registry.BootstrapServiceRegistry;
import org.hibernate.boot.registry.BootstrapServiceRegistryBuilder;
import org.hibernate.boot.registry.StandardServiceRegistry;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.cfg.EJB3NamingStrategy;
import org.hibernate.cfg.NamingStrategy;
import org.hibernate.cfg.ObjectNameNormalizer;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.internal.SessionImpl;
import org.hibernate.jdbc.Work;
import org.hibernate.mapping.SimpleAuxiliaryDatabaseObject;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.testing.ServiceRegistryBuilder;
import org.hibernate.testing.junit4.BaseUnitTestCase;
import org.hibernate.metamodel.MetadataSources;
import org.hibernate.metamodel.spi.MetadataImplementor;
import org.hibernate.metamodel.spi.relational.Database;
import org.hibernate.type.StandardBasicTypes;
import org.hibernate.testing.junit4.BaseUnitTestCase;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
/**
* I went back to 3.3 source and grabbed the code/logic as it existed back then and crafted this
* unit test so that we can make sure the value keep being generated in the expected manner
@ -62,15 +65,14 @@
public class SequenceHiLoGeneratorNoIncrementTest extends BaseUnitTestCase {
private static final String TEST_SEQUENCE = "test_sequence";
private Configuration cfg;
private ServiceRegistry serviceRegistry;
private StandardServiceRegistry ssr;
private SessionFactoryImplementor sessionFactory;
private SequenceHiLoGenerator generator;
private SessionImplementor session;
@Before
public void setUp() throws Exception {
Properties properties = new Properties();
properties.setProperty( AvailableSettings.HBM2DDL_AUTO, "create-drop" );
properties.setProperty( SequenceGenerator.SEQUENCE, TEST_SEQUENCE );
properties.setProperty( SequenceHiLoGenerator.MAX_LO, "0" ); // JPA allocationSize of 1
properties.put(
@ -83,50 +85,51 @@ protected boolean isUseQuotedIdentifiersGlobally() {
@Override
protected NamingStrategy getNamingStrategy() {
return cfg.getNamingStrategy();
return EJB3NamingStrategy.INSTANCE;
}
}
);
Dialect dialect = TestingDatabaseInfo.DIALECT;
BootstrapServiceRegistry bsr = new BootstrapServiceRegistryBuilder().build();
StandardServiceRegistryBuilder ssrBuilder = new StandardServiceRegistryBuilder( bsr );
ssrBuilder.applySettings( properties );
ssr = ssrBuilder.build();
MetadataImplementor metadata = (MetadataImplementor) new MetadataSources( bsr ).buildMetadata( ssr );
Database database = metadata.getDatabase();
generator = new SequenceHiLoGenerator();
generator.configure( StandardBasicTypes.LONG, properties, dialect, new ClassLoaderServiceImpl() );
cfg = TestingDatabaseInfo.buildBaseConfiguration()
.setProperty( Environment.HBM2DDL_AUTO, "create-drop" );
cfg.addAuxiliaryDatabaseObject(
new SimpleAuxiliaryDatabaseObject(
generator.sqlCreateStrings( dialect )[0],
generator.sqlDropStrings( dialect )[0]
)
generator.configure(
StandardBasicTypes.LONG,
properties,
database.getJdbcEnvironment().getDialect(),
ssr.getService( ClassLoaderService.class )
);
generator.registerExportables( database );
serviceRegistry = ServiceRegistryBuilder.buildServiceRegistry( cfg.getProperties() );
sessionFactory = (SessionFactoryImplementor) cfg.buildSessionFactory( serviceRegistry );
sessionFactory = (SessionFactoryImplementor) metadata.buildSessionFactory();
}
@After
public void tearDown() throws Exception {
if(session != null && !session.isClosed()) {
((Session)session).close();
}
if ( sessionFactory != null ) {
sessionFactory.close();
}
if ( serviceRegistry != null ) {
ServiceRegistryBuilder.destroy( serviceRegistry );
if ( ssr != null ) {
StandardServiceRegistryBuilder.destroy( ssr );
}
}
@Test
public void testHiLoAlgorithm() {
session = (SessionImpl) sessionFactory.openSession();
((Session)session).beginTransaction();
SessionImpl session = (SessionImpl) sessionFactory.openSession();
session.beginTransaction();
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// initially sequence should be uninitialized
assertEquals( 0L, extractSequenceValue( (session) ) );
assertEquals( 0L, extractSequenceValue( session ) );
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// historically the hilo generators skipped the initial block of values;
@ -156,8 +159,8 @@ public void testHiLoAlgorithm() {
assertEquals( 5L, generatedValue.longValue() );
assertEquals( 5L, extractSequenceValue( (session) ) );
((Session)session).getTransaction().commit();
((Session)session).close();
session.getTransaction().commit();
session.close();
}
private long extractSequenceValue(final SessionImplementor session) {

View File

@ -23,8 +23,6 @@
*/
package org.hibernate.id;
import static org.junit.Assert.assertEquals;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
@ -32,26 +30,31 @@
import java.util.Properties;
import org.hibernate.Session;
import org.hibernate.boot.registry.classloading.internal.ClassLoaderServiceImpl;
import org.hibernate.testing.env.TestingDatabaseInfo;
import org.hibernate.cfg.Configuration;
import org.hibernate.cfg.Environment;
import org.hibernate.boot.registry.BootstrapServiceRegistry;
import org.hibernate.boot.registry.BootstrapServiceRegistryBuilder;
import org.hibernate.boot.registry.StandardServiceRegistry;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.cfg.EJB3NamingStrategy;
import org.hibernate.cfg.NamingStrategy;
import org.hibernate.cfg.ObjectNameNormalizer;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.internal.SessionImpl;
import org.hibernate.jdbc.Work;
import org.hibernate.mapping.SimpleAuxiliaryDatabaseObject;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.testing.ServiceRegistryBuilder;
import org.hibernate.testing.junit4.BaseUnitTestCase;
import org.hibernate.metamodel.MetadataSources;
import org.hibernate.metamodel.spi.MetadataImplementor;
import org.hibernate.metamodel.spi.relational.Database;
import org.hibernate.type.StandardBasicTypes;
import org.hibernate.testing.junit4.BaseUnitTestCase;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
/**
* I went back to 3.3 source and grabbed the code/logic as it existed back then and crafted this
* unit test so that we can make sure the value keep being generated in the expected manner
@ -62,38 +65,50 @@
public class SequenceHiLoGeneratorTest extends BaseUnitTestCase {
private static final String TEST_SEQUENCE = "test_sequence";
private Configuration cfg;
private ServiceRegistry serviceRegistry;
private StandardServiceRegistry ssr;
private SessionFactoryImplementor sessionFactory;
private SequenceHiLoGenerator generator;
@Before
public void setUp() throws Exception {
Properties properties = new Properties();
properties.setProperty( AvailableSettings.HBM2DDL_AUTO, "create-drop" );
properties.setProperty( SequenceGenerator.SEQUENCE, TEST_SEQUENCE );
properties.setProperty( SequenceHiLoGenerator.MAX_LO, "3" );
properties.put( PersistentIdentifierGenerator.IDENTIFIER_NORMALIZER, new ObjectNameNormalizer() {
@Override
protected boolean isUseQuotedIdentifiersGlobally() {
return false;
}
properties.put(
PersistentIdentifierGenerator.IDENTIFIER_NORMALIZER,
new ObjectNameNormalizer() {
@Override
protected boolean isUseQuotedIdentifiersGlobally() {
return false;
}
@Override
protected NamingStrategy getNamingStrategy() {
return cfg.getNamingStrategy();
}
} );
@Override
protected NamingStrategy getNamingStrategy() {
return EJB3NamingStrategy.INSTANCE;
}
}
);
Dialect dialect = TestingDatabaseInfo.DIALECT;
BootstrapServiceRegistry bsr = new BootstrapServiceRegistryBuilder().build();
StandardServiceRegistryBuilder ssrBuilder = new StandardServiceRegistryBuilder( bsr );
ssrBuilder.applySettings( properties );
ssr = ssrBuilder.build();
MetadataImplementor metadata = (MetadataImplementor) new MetadataSources( bsr ).buildMetadata( ssr );
Database database = metadata.getDatabase();
generator = new SequenceHiLoGenerator();
generator.configure( StandardBasicTypes.LONG, properties, dialect, new ClassLoaderServiceImpl() );
generator.configure(
StandardBasicTypes.LONG,
properties,
database.getJdbcEnvironment().getDialect(),
ssr.getService( ClassLoaderService.class )
);
generator.registerExportables( database );
cfg = TestingDatabaseInfo.buildBaseConfiguration().setProperty( Environment.HBM2DDL_AUTO, "create-drop" );
cfg.addAuxiliaryDatabaseObject( new SimpleAuxiliaryDatabaseObject( generator.sqlCreateStrings( dialect )[0],
generator.sqlDropStrings( dialect )[0] ) );
serviceRegistry = ServiceRegistryBuilder.buildServiceRegistry( cfg.getProperties() );
sessionFactory = (SessionFactoryImplementor) cfg.buildSessionFactory( serviceRegistry );
sessionFactory = (SessionFactoryImplementor) metadata.buildSessionFactory();
}
@After
@ -101,8 +116,8 @@ public void tearDown() throws Exception {
if ( sessionFactory != null ) {
sessionFactory.close();
}
if ( serviceRegistry != null ) {
ServiceRegistryBuilder.destroy( serviceRegistry );
if ( ssr != null ) {
StandardServiceRegistryBuilder.destroy( ssr );
}
}

View File

@ -33,7 +33,7 @@
import org.junit.Test;
import org.hibernate.jaxb.spi.SourceType;
import org.hibernate.xml.spi.SourceType;
import org.hibernate.metamodel.spi.binding.BagBinding;
import org.hibernate.metamodel.spi.binding.EntityBinding;
import org.hibernate.metamodel.spi.relational.Column;

View File

@ -27,7 +27,17 @@
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.JAXBException;
import org.hibernate.HibernateException;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.jaxb.spi.orm.JaxbEntityMappings;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.xml.internal.jaxb.MappingXmlBinder;
import org.hibernate.xml.spi.BindResult;
import org.hibernate.xml.spi.Origin;
import org.hibernate.xml.spi.SourceType;
import org.hibernate.testing.ServiceRegistryBuilder;
import org.jboss.jandex.AnnotationInstance;
import org.jboss.jandex.AnnotationValue;
@ -36,17 +46,6 @@
import org.jboss.jandex.Index;
import org.jboss.jandex.Indexer;
import org.hibernate.AnnotationException;
import org.hibernate.HibernateException;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.jaxb.internal.JaxbMappingProcessor;
import org.hibernate.jaxb.spi.JaxbRoot;
import org.hibernate.jaxb.spi.Origin;
import org.hibernate.jaxb.spi.SourceType;
import org.hibernate.jaxb.spi.orm.JaxbEntityMappings;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.testing.ServiceRegistryBuilder;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
@ -74,12 +73,12 @@ protected EntityMappingsMocker getEntityMappingsMocker(String... mappingFiles) {
ClassLoaderService classLoaderService = getServiceRegistry().getService( ClassLoaderService.class );
List<JaxbEntityMappings> xmlEntityMappingsList = new ArrayList<JaxbEntityMappings>();
for ( String fileName : mappingFiles ) {
JaxbMappingProcessor processor = new JaxbMappingProcessor( getServiceRegistry() );
JaxbRoot jaxbRoot = processor.unmarshal(
MappingXmlBinder processor = new MappingXmlBinder( getServiceRegistry() );
BindResult bindResult = processor.bind(
classLoaderService.locateResourceStream( packagePrefix + fileName ),
new Origin( SourceType.FILE, packagePrefix + fileName )
);
JaxbEntityMappings entityMappings = (JaxbEntityMappings)jaxbRoot.getRoot();
JaxbEntityMappings entityMappings = (JaxbEntityMappings) bindResult.getRoot();
xmlEntityMappingsList.add( entityMappings );

View File

@ -38,8 +38,8 @@
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.jaxb.spi.JaxbRoot;
import org.hibernate.jaxb.spi.Origin;
import org.hibernate.xml.spi.BindResult;
import org.hibernate.xml.spi.Origin;
/**
* @author Hardy Ferentschik
@ -50,7 +50,7 @@ public class XmlHelper {
private XmlHelper() {
}
public static <T> JaxbRoot<T> unmarshallXml(String fileName, String schemaName, Class<T> clazz, ClassLoaderService classLoaderService)
public static <T> BindResult<T> unmarshallXml(String fileName, String schemaName, Class<T> clazz, ClassLoaderService classLoaderService)
throws JAXBException {
Schema schema = getMappingSchema( schemaName, classLoaderService );
InputStream in = classLoaderService.locateResourceStream( fileName );
@ -60,7 +60,7 @@ public static <T> JaxbRoot<T> unmarshallXml(String fileName, String schemaName,
StreamSource stream = new StreamSource( in );
JAXBElement<T> elem = unmarshaller.unmarshal( stream, clazz );
Origin origin = new Origin( null, fileName );
return new JaxbRoot<T>( elem.getValue(), origin );
return new BindResult<T>( elem.getValue(), origin );
}
private static Schema getMappingSchema(String schemaVersion, ClassLoaderService classLoaderService) {

View File

@ -1,164 +0,0 @@
//$Id$
package org.hibernate.test.annotations;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.hibernate.HibernateException;
import org.hibernate.Query;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.Transaction;
import org.hibernate.cfg.Configuration;
import org.hibernate.cfg.Environment;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.testing.ServiceRegistryBuilder;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
/**
* @author Emmanuel Bernard
*/
public class ConfigurationTest {
private ServiceRegistry serviceRegistry;
@Before
public void setUp() {
serviceRegistry = ServiceRegistryBuilder.buildServiceRegistry( Environment.getProperties() );
}
@After
public void tearDown() {
if ( serviceRegistry != null ) {
ServiceRegistryBuilder.destroy( serviceRegistry );
}
}
@Test
public void testDeclarativeMix() throws Exception {
Configuration cfg = new Configuration();
cfg.configure( "org/hibernate/test/annotations/hibernate.cfg.xml" );
cfg.setProperty( Environment.HBM2DDL_AUTO, "create-drop" );
SessionFactory sf = cfg.buildSessionFactory( serviceRegistry );
assertNotNull( sf );
Session s = sf.openSession();
Transaction tx = s.beginTransaction();
Query q = s.createQuery( "from Boat" );
assertEquals( 0, q.list().size() );
q = s.createQuery( "from Plane" );
assertEquals( 0, q.list().size() );
tx.commit();
s.close();
sf.close();
}
@Test
public void testIgnoringHbm() throws Exception {
Configuration cfg = new Configuration();
cfg.configure( "org/hibernate/test/annotations/hibernate.cfg.xml" );
cfg.setProperty( Environment.HBM2DDL_AUTO, "create-drop" );
cfg.setProperty( Configuration.ARTEFACT_PROCESSING_ORDER, "class" );
SessionFactory sf = cfg.buildSessionFactory( serviceRegistry );
assertNotNull( sf );
Session s = sf.openSession();
Transaction tx = s.beginTransaction();
Query q;
try {
s.createQuery( "from Boat" ).list();
fail( "Boat should not be mapped" );
}
catch (HibernateException e) {
//all good
}
q = s.createQuery( "from Plane" );
assertEquals( 0, q.list().size() );
tx.commit();
s.close();
sf.close();
}
@Test
public void testPrecedenceHbm() throws Exception {
Configuration cfg = new Configuration();
cfg.configure( "org/hibernate/test/annotations/hibernate.cfg.xml" );
cfg.setProperty( Environment.HBM2DDL_AUTO, "create-drop" );
cfg.addAnnotatedClass( Boat.class );
SessionFactory sf = cfg.buildSessionFactory( serviceRegistry );
assertNotNull( sf );
Session s = sf.openSession();
s.getTransaction().begin();
Boat boat = new Boat();
boat.setSize( 12 );
boat.setWeight( 34 );
s.persist( boat );
s.getTransaction().commit();
s.clear();
Transaction tx = s.beginTransaction();
boat = (Boat) s.get( Boat.class, boat.getId() );
assertTrue( "Annotation has precedence", 34 != boat.getWeight() );
s.delete( boat );
//s.getTransaction().commit();
tx.commit();
s.close();
sf.close();
}
@Test
public void testPrecedenceAnnotation() throws Exception {
Configuration cfg = new Configuration();
cfg.configure( "org/hibernate/test/annotations/hibernate.cfg.xml" );
cfg.setProperty( Environment.HBM2DDL_AUTO, "create-drop" );
cfg.setProperty( Configuration.ARTEFACT_PROCESSING_ORDER, "class, hbm" );
cfg.addAnnotatedClass( Boat.class );
SessionFactory sf = cfg.buildSessionFactory( serviceRegistry );
assertNotNull( sf );
Session s = sf.openSession();
s.getTransaction().begin();
Boat boat = new Boat();
boat.setSize( 12 );
boat.setWeight( 34 );
s.persist( boat );
s.getTransaction().commit();
s.clear();
Transaction tx = s.beginTransaction();
boat = (Boat) s.get( Boat.class, boat.getId() );
assertTrue( "Annotation has precedence", 34 == boat.getWeight() );
s.delete( boat );
tx.commit();
s.close();
sf.close();
}
@Test
public void testHbmWithSubclassExtends() throws Exception {
Configuration cfg = new Configuration();
cfg.configure( "org/hibernate/test/annotations/hibernate.cfg.xml" );
cfg.addClass( Ferry.class );
cfg.setProperty( Environment.HBM2DDL_AUTO, "create-drop" );
SessionFactory sf = cfg.buildSessionFactory( serviceRegistry );
assertNotNull( sf );
Session s = sf.openSession();
Transaction tx = s.beginTransaction();
Query q = s.createQuery( "from Ferry" );
assertEquals( 0, q.list().size() );
q = s.createQuery( "from Plane" );
assertEquals( 0, q.list().size() );
tx.commit();
s.close();
sf.close();
}
@Test
public void testAnnReferencesHbm() throws Exception {
Configuration cfg = new Configuration();
cfg.configure( "org/hibernate/test/annotations/hibernate.cfg.xml" );
cfg.addAnnotatedClass( Port.class );
cfg.setProperty( Environment.HBM2DDL_AUTO, "create-drop" );
SessionFactory sf = cfg.buildSessionFactory( serviceRegistry );
assertNotNull( sf );
Session s = sf.openSession();
Transaction tx = s.beginTransaction();
Query q = s.createQuery( "from Boat" );
assertEquals( 0, q.list().size() );
q = s.createQuery( "from Port" );
assertEquals( 0, q.list().size() );
tx.commit();
s.close();
sf.close();
}
}

View File

@ -30,22 +30,21 @@
import java.util.List;
import java.util.TimeZone;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.hibernate.HibernateException;
import org.hibernate.Query;
import org.hibernate.Session;
import org.hibernate.StaleStateException;
import org.hibernate.Transaction;
import org.hibernate.dialect.Oracle10gDialect;
import org.hibernate.test.util.SchemaUtil;
import org.hibernate.testing.SkipForDialect;
import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase;
import org.hibernate.tool.hbm2ddl.SchemaExport;
import org.hibernate.type.StandardBasicTypes;
import org.hibernate.testing.SkipForDialect;
import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
@ -60,15 +59,10 @@ public class EntityTest extends BaseCoreFunctionalTestCase {
@Test
public void testLoad() throws Exception {
//put an object in DB
if ( isMetadataUsed() ) {
assertEquals(
"Flight",
metadata().getEntityBinding( Flight.class.getName() ).getPrimaryTableName()
);
}
else {
assertEquals( "Flight", configuration().getClassMapping( Flight.class.getName() ).getTable().getName() );
}
assertEquals(
"Flight",
metadata().getEntityBinding( Flight.class.getName() ).getPrimaryTableName()
);
Session s = openSession();
Transaction tx = s.beginTransaction();
@ -323,16 +317,11 @@ public void testFieldAccess() throws Exception {
@Test
public void testEntityName() throws Exception {
if ( isMetadataUsed() ) {
assertEquals(
"Corporation",
metadata().getEntityBinding( Company.class.getName() ).getPrimaryTableName()
);
assertEquals(
"Corporation",
metadata().getEntityBinding( Company.class.getName() ).getPrimaryTableName()
);
}
else {
assertEquals( "Corporation", configuration().getClassMapping( Company.class.getName() ).getTable().getName() );
}
Session s = openSession();
Transaction tx = s.beginTransaction();
Company comp = new Company();
@ -454,12 +443,7 @@ public void runCreateSchema() {
}
private SchemaExport schemaExport() {
if ( isMetadataUsed() ) {
return new SchemaExport( metadata() );
}
else {
return new SchemaExport( serviceRegistry(), configuration() );
}
return new SchemaExport( metadata() );
}
@After

View File

@ -38,6 +38,8 @@
import org.hibernate.testing.FailureExpectedWithNewMetamodel;
import org.hibernate.testing.ServiceRegistryBuilder;
import org.hibernate.testing.TestForIssue;
import org.hibernate.testing.junit4.BaseUnitTestCase;
import org.hibernate.tuple.entity.EntityTuplizer;
import static org.junit.Assert.assertTrue;
@ -50,7 +52,7 @@
* @author Hardy Ferentschik
*/
@SuppressWarnings({ "deprecation" })
public class AccessMappingTest {
public class AccessMappingTest extends BaseUnitTestCase {
private ServiceRegistry serviceRegistry;
@Before

View File

@ -45,6 +45,8 @@
public class BeanValidationAutoTest extends BaseCoreFunctionalTestCase {
@Test
public void testListeners() {
fail( "HARDY : needs the changes in BeanValidationIntegrator" );
CupHolder ch = new CupHolder();
ch.setRadius( new BigDecimal( "12" ) );
Session s = openSession();

View File

@ -26,17 +26,14 @@
import java.math.BigDecimal;
import javax.validation.ConstraintViolationException;
import org.junit.Test;
import org.hibernate.Session;
import org.hibernate.Transaction;
import org.hibernate.cfg.Configuration;
import org.hibernate.mapping.Column;
import org.hibernate.mapping.PersistentClass;
import org.hibernate.metamodel.spi.binding.EntityBinding;
import org.hibernate.metamodel.spi.binding.SingularAttributeBinding;
import org.hibernate.test.util.SchemaUtil;
import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase;
import org.junit.Test;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
@ -65,21 +62,14 @@ public void testListeners() {
@Test
public void testDDLDisabled() {
final boolean isNullable;
if ( isMetadataUsed() ) {
final EntityBinding entityBinding = metadata().getEntityBinding( Address.class.getName() );
final SingularAttributeBinding attributeBinding =
(SingularAttributeBinding) entityBinding.locateAttributeBinding( "country" );
final EntityBinding entityBinding = metadata().getEntityBinding( Address.class.getName() );
final SingularAttributeBinding attributeBinding =
(SingularAttributeBinding) entityBinding.locateAttributeBinding( "country" );
final org.hibernate.metamodel.spi.relational.Column column =
(org.hibernate.metamodel.spi.relational.Column) attributeBinding.getValues().get( 0 );
isNullable = column.isNullable();
}
else {
PersistentClass classMapping = configuration().getClassMapping( Address.class.getName() );
Column countryColumn = (Column) classMapping.getProperty( "country" ).getColumnIterator().next();
isNullable = countryColumn.isNullable();
}
final org.hibernate.metamodel.spi.relational.Column column =
(org.hibernate.metamodel.spi.relational.Column) attributeBinding.getValues().get( 0 );
isNullable = column.isNullable();
assertTrue( "DDL constraints are applied", isNullable );
}

View File

@ -44,10 +44,11 @@
* @author Emmanuel Bernard
*/
@FailureExpectedWithNewMetamodel
public class BeanValidationGroupsTest extends BaseCoreFunctionalTestCase {
@Test
public void testListeners() {
fail( "HARDY : needs the changes in BeanValidationIntegrator" );
CupHolder ch = new CupHolder();
ch.setRadius( new BigDecimal( "12" ) );
Session s = openSession();

View File

@ -48,6 +48,8 @@
public class BeanValidationProvidedFactoryTest extends BaseCoreFunctionalTestCase {
@Test
public void testListeners() {
fail( "HARDY : needs the changes in BeanValidationIntegrator" );
CupHolder ch = new CupHolder();
ch.setRadius( new BigDecimal( "12" ) );
Session s = openSession();

View File

@ -23,19 +23,14 @@
*/
package org.hibernate.test.annotations.beanvalidation;
import org.junit.Test;
import org.hibernate.cfg.Configuration;
import org.hibernate.mapping.Column;
import org.hibernate.mapping.PersistentClass;
import org.hibernate.mapping.Property;
import org.hibernate.metamodel.spi.binding.EntityBinding;
import org.hibernate.metamodel.spi.binding.SingularAttributeBinding;
import org.hibernate.metamodel.spi.relational.PrimaryKey;
import org.hibernate.test.util.SchemaUtil;
import org.hibernate.testing.FailureExpectedWithNewMetamodel;
import org.hibernate.testing.TestForIssue;
import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase;
import org.hibernate.test.util.SchemaUtil;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
@ -51,108 +46,53 @@ public class DDLTest extends BaseCoreFunctionalTestCase {
@Test
@FailureExpectedWithNewMetamodel
public void testBasicDDL() {
if ( isMetadataUsed() ) {
org.hibernate.metamodel.spi.relational.Column stateColumn = SchemaUtil.getColumn( Address.class, "state", metadata() );
assertEquals( stateColumn.getSize().getLength(), 3 );
org.hibernate.metamodel.spi.relational.Column zipColumn = SchemaUtil.getColumn( Address.class, "zip", metadata() );
assertEquals( zipColumn.getSize().getLength(), 5 );
assertFalse( zipColumn.isNullable() );
}
else {
PersistentClass classMapping = configuration().getClassMapping( Address.class.getName() );
Column stateColumn = (Column) classMapping.getProperty( "state" ).getColumnIterator().next();
assertEquals( stateColumn.getLength(), 3 );
Column zipColumn = (Column) classMapping.getProperty( "zip" ).getColumnIterator().next();
assertEquals( zipColumn.getLength(), 5 );
assertFalse( zipColumn.isNullable() );
}
org.hibernate.metamodel.spi.relational.Column stateColumn = SchemaUtil.getColumn( Address.class, "state", metadata() );
assertEquals( stateColumn.getSize().getLength(), 3 );
org.hibernate.metamodel.spi.relational.Column zipColumn = SchemaUtil.getColumn( Address.class, "zip", metadata() );
assertEquals( zipColumn.getSize().getLength(), 5 );
assertFalse( zipColumn.isNullable() );
}
@Test
@FailureExpectedWithNewMetamodel
public void testApplyOnIdColumn() throws Exception {
if ( isMetadataUsed() ) {
PrimaryKey id = SchemaUtil.getPrimaryKey( Tv.class, metadata() );
assertEquals( "Validator annotation not applied on ids", 2,
id.getColumns().get( 0 ).getSize().getLength() );
}
else {
PersistentClass classMapping = configuration().getClassMapping( Tv.class.getName() );
Column serialColumn = (Column) classMapping.getIdentifierProperty().getColumnIterator().next();
assertEquals( "Validator annotation not applied on ids", 2, serialColumn.getLength() );
}
PrimaryKey id = SchemaUtil.getPrimaryKey( Tv.class, metadata() );
assertEquals( "Validator annotation not applied on ids", 2, id.getColumns().get( 0 ).getSize().getLength() );
}
@Test
@TestForIssue( jiraKey = "HHH-5281" )
@FailureExpectedWithNewMetamodel
public void testLengthConstraint() throws Exception {
if ( isMetadataUsed() ) {
org.hibernate.metamodel.spi.relational.Column column = SchemaUtil.getColumn( Tv.class, "model", metadata() );
assertEquals( column.getSize().getLength(), 5 );
}
else {
PersistentClass classMapping = configuration().getClassMapping( Tv.class.getName() );
Column modelColumn = (Column) classMapping.getProperty( "model" ).getColumnIterator().next();
assertEquals( modelColumn.getLength(), 5 );
}
org.hibernate.metamodel.spi.relational.Column column = SchemaUtil.getColumn( Tv.class, "model", metadata() );
assertEquals( column.getSize().getLength(), 5 );
}
@Test
@FailureExpectedWithNewMetamodel
public void testApplyOnManyToOne() throws Exception {
if ( isMetadataUsed() ) {
org.hibernate.metamodel.spi.relational.Column column = SchemaUtil.getColumn( TvOwner.class, "tv_serial", metadata() );
assertEquals( "Validator annotations not applied on associations", false, column.isNullable() );
}
else {
PersistentClass classMapping = configuration().getClassMapping( TvOwner.class.getName() );
Column serialColumn = (Column) classMapping.getProperty( "tv" ).getColumnIterator().next();
assertEquals( "Validator annotations not applied on associations", false, serialColumn.isNullable() );
}
org.hibernate.metamodel.spi.relational.Column column = SchemaUtil.getColumn( TvOwner.class, "tv_serial", metadata() );
assertEquals( "Validator annotations not applied on associations", false, column.isNullable() );
}
@Test
public void testSingleTableAvoidNotNull() throws Exception {
if ( isMetadataUsed() ) {
org.hibernate.metamodel.spi.relational.Column column = SchemaUtil.getColumn( Rock.class, "bit", metadata() );
assertTrue( "Notnull should not be applied on single tables", column.isNullable() );
}
else {
PersistentClass classMapping = configuration().getClassMapping( Rock.class.getName() );
Column serialColumn = (Column) classMapping.getProperty( "bit" ).getColumnIterator().next();
assertTrue( "Notnull should not be applied on single tables", serialColumn.isNullable() );
}
org.hibernate.metamodel.spi.relational.Column column = SchemaUtil.getColumn( Rock.class, "bit", metadata() );
assertTrue( "Notnull should not be applied on single tables", column.isNullable() );
}
@Test
@FailureExpectedWithNewMetamodel
public void testNotNullOnlyAppliedIfEmbeddedIsNotNullItself() throws Exception {
if ( isMetadataUsed() ) {
org.hibernate.metamodel.spi.relational.Column column = SchemaUtil.getColumn( Tv.class, "frequency", metadata() );
assertEquals(
"Validator annotations are applied on tuner as it is @NotNull", false, column.isNullable()
);
org.hibernate.metamodel.spi.relational.Column column = SchemaUtil.getColumn( Tv.class, "frequency", metadata() );
assertEquals(
"Validator annotations are applied on tuner as it is @NotNull", false, column.isNullable()
);
column = SchemaUtil.getColumn( Tv.class, "`time`", metadata() );
assertEquals(
"Validator annotations were not applied on recorder", true, column.isNullable()
);
}
else {
PersistentClass classMapping = configuration().getClassMapping( Tv.class.getName() );
Property property = classMapping.getProperty( "tuner.frequency" );
Column serialColumn = (Column) property.getColumnIterator().next();
assertEquals(
"Validator annotations are applied on tuner as it is @NotNull", false, serialColumn.isNullable()
);
property = classMapping.getProperty( "recorder.time" );
serialColumn = (Column) property.getColumnIterator().next();
assertEquals(
"Validator annotations are applied on tuner as it is @NotNull", true, serialColumn.isNullable()
);
}
column = SchemaUtil.getColumn( Tv.class, "`time`", metadata() );
assertEquals(
"Validator annotations were not applied on recorder", true, column.isNullable()
);
}
@Override

Some files were not shown because too many files have changed in this diff Show More