resolve test failres after merge

This commit is contained in:
Strong Liu 2013-03-20 00:24:29 +08:00
parent 3dc371c91a
commit 59a2ccd79f
18 changed files with 699 additions and 201 deletions

View File

@ -24,6 +24,7 @@
apply plugin: 'groovy' apply plugin: 'groovy'
apply plugin: 'idea' apply plugin: 'idea'
apply from: '../libraries.gradle'
buildDir = "target" buildDir = "target"
@ -40,9 +41,10 @@ dependencies {
compile gradleApi() compile gradleApi()
compile localGroovy() compile localGroovy()
groovy localGroovy() groovy localGroovy()
compile 'org.apache.ant:ant:1.8.2' compile(libraries.ant)
// injection plugin // injection plugin
compile 'org.javassist:javassist:3.15.0-GA' compile(libraries.javassist)
compile(libraries.jandex)
} }

View File

@ -27,6 +27,7 @@ import java.util.Set;
import org.hibernate.cfg.Configuration; import org.hibernate.cfg.Configuration;
import org.hibernate.engine.spi.SessionFactoryImplementor; import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.metamodel.spi.MetadataImplementor;
import org.hibernate.service.spi.SessionFactoryServiceRegistry; import org.hibernate.service.spi.SessionFactoryServiceRegistry;
/** /**
@ -54,6 +55,13 @@ public interface ActivationContext {
*/ */
public Configuration getConfiguration(); public Configuration getConfiguration();
/**
* Access the Metadata
*
* @return The Hibernate Metadata object
*/
public MetadataImplementor getMetadata();
/** /**
* Access the SessionFactory being built to trigger this BV activation * Access the SessionFactory being built to trigger this BV activation
* *

View File

@ -82,10 +82,9 @@ public class BeanValidationEventListener
init( factory, properties ); init( factory, properties );
} }
public void initialize(Configuration cfg) { public void initialize(Properties props) {
if ( !initialized ) { if ( !initialized ) {
ValidatorFactory factory = Validation.buildDefaultValidatorFactory(); ValidatorFactory factory = Validation.buildDefaultValidatorFactory();
Properties props = cfg.getProperties();
init( factory, props ); init( factory, props );
} }
} }

View File

@ -138,6 +138,11 @@ public class BeanValidationIntegrator implements Integrator {
return null; return null;
} }
@Override
public MetadataImplementor getMetadata() {
return metadata;
}
@Override @Override
public SessionFactoryImplementor getSessionFactory() { public SessionFactoryImplementor getSessionFactory() {
return sessionFactory; return sessionFactory;
@ -218,6 +223,11 @@ public class BeanValidationIntegrator implements Integrator {
public SessionFactoryServiceRegistry getServiceRegistry() { public SessionFactoryServiceRegistry getServiceRegistry() {
return serviceRegistry; return serviceRegistry;
} }
@Override
public MetadataImplementor getMetadata() {
return null;
}
}; };
try { try {

View File

@ -46,7 +46,9 @@ import javax.validation.metadata.PropertyDescriptor;
import org.jboss.logging.Logger; import org.jboss.logging.Logger;
import org.hibernate.AssertionFailure; import org.hibernate.AssertionFailure;
import org.hibernate.EntityMode;
import org.hibernate.MappingException; import org.hibernate.MappingException;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.cfg.Environment; import org.hibernate.cfg.Environment;
import org.hibernate.dialect.Dialect; import org.hibernate.dialect.Dialect;
import org.hibernate.engine.jdbc.spi.JdbcServices; import org.hibernate.engine.jdbc.spi.JdbcServices;
@ -61,6 +63,15 @@ import org.hibernate.mapping.Component;
import org.hibernate.mapping.PersistentClass; import org.hibernate.mapping.PersistentClass;
import org.hibernate.mapping.Property; import org.hibernate.mapping.Property;
import org.hibernate.mapping.SingleTableSubclass; import org.hibernate.mapping.SingleTableSubclass;
import org.hibernate.metamodel.spi.binding.AttributeBinding;
import org.hibernate.metamodel.spi.binding.BasicAttributeBinding;
import org.hibernate.metamodel.spi.binding.CompositeAttributeBinding;
import org.hibernate.metamodel.spi.binding.EntityBinding;
import org.hibernate.metamodel.spi.binding.EntityIdentifier;
import org.hibernate.metamodel.spi.binding.InheritanceType;
import org.hibernate.metamodel.spi.binding.RelationalValueBinding;
import org.hibernate.metamodel.spi.binding.SingularAttributeBinding;
import org.hibernate.metamodel.spi.relational.Value;
/** /**
* @author Emmanuel Bernard * @author Emmanuel Bernard
@ -90,7 +101,7 @@ class TypeSafeActivator {
@SuppressWarnings("UnusedDeclaration") @SuppressWarnings("UnusedDeclaration")
public static void activate(ActivationContext activationContext) { public static void activate(ActivationContext activationContext) {
final Properties properties = activationContext.getConfiguration().getProperties(); final Properties properties = activationContext.getSessionFactory().getProperties();
final ValidatorFactory factory; final ValidatorFactory factory;
try { try {
factory = getValidatorFactory( properties ); factory = getValidatorFactory( properties );
@ -121,13 +132,13 @@ class TypeSafeActivator {
// de-activate not-null tracking at the core level when Bean Validation is present unless the user explicitly // de-activate not-null tracking at the core level when Bean Validation is present unless the user explicitly
// asks for it // asks for it
if ( activationContext.getConfiguration().getProperty( Environment.CHECK_NULLABILITY ) == null ) { if ( activationContext.getSessionFactory().getProperties().getProperty( Environment.CHECK_NULLABILITY ) == null ) {
activationContext.getSessionFactory().getSettings().setCheckNullability( false ); activationContext.getSessionFactory().getSettings().setCheckNullability( false );
} }
final BeanValidationEventListener listener = new BeanValidationEventListener( final BeanValidationEventListener listener = new BeanValidationEventListener(
validatorFactory, validatorFactory,
activationContext.getConfiguration().getProperties() activationContext.getSessionFactory().getProperties()
); );
final EventListenerRegistry listenerRegistry = activationContext.getServiceRegistry() final EventListenerRegistry listenerRegistry = activationContext.getServiceRegistry()
@ -139,12 +150,12 @@ class TypeSafeActivator {
listenerRegistry.appendListeners( EventType.PRE_UPDATE, listener ); listenerRegistry.appendListeners( EventType.PRE_UPDATE, listener );
listenerRegistry.appendListeners( EventType.PRE_DELETE, listener ); listenerRegistry.appendListeners( EventType.PRE_DELETE, listener );
listener.initialize( activationContext.getConfiguration() ); listener.initialize( activationContext.getSessionFactory().getProperties() );
} }
@SuppressWarnings({"unchecked", "UnusedParameters"}) @SuppressWarnings({"unchecked", "UnusedParameters"})
private static void applyRelationalConstraints(ValidatorFactory factory, ActivationContext activationContext) { private static void applyRelationalConstraints(ValidatorFactory factory, ActivationContext activationContext) {
final Properties properties = activationContext.getConfiguration().getProperties(); final Properties properties = activationContext.getSessionFactory().getProperties();
if ( ! ConfigurationHelper.getBoolean( BeanValidationIntegrator.APPLY_CONSTRAINTS, properties, true ) ){ if ( ! ConfigurationHelper.getBoolean( BeanValidationIntegrator.APPLY_CONSTRAINTS, properties, true ) ){
LOG.debug( "Skipping application of relational constraints from legacy Hibernate Validator" ); LOG.debug( "Skipping application of relational constraints from legacy Hibernate Validator" );
return; return;
@ -154,33 +165,28 @@ class TypeSafeActivator {
if ( ! ( modes.contains( ValidationMode.DDL ) || modes.contains( ValidationMode.AUTO ) ) ) { if ( ! ( modes.contains( ValidationMode.DDL ) || modes.contains( ValidationMode.AUTO ) ) ) {
return; return;
} }
applyRelationalConstraints( activationContext );
applyRelationalConstraints(
activationContext.getConfiguration().createMappings().getClasses().values(),
properties,
activationContext.getServiceRegistry().getService( JdbcServices.class ).getDialect()
);
} }
@SuppressWarnings( {"UnusedDeclaration"}) @SuppressWarnings( {"UnusedDeclaration"})
public static void applyRelationalConstraints(Collection<PersistentClass> persistentClasses, Properties properties, Dialect dialect) { public static void applyRelationalConstraints(final ActivationContext activationContext) {
final Properties properties = activationContext.getSessionFactory().getProperties();
final Dialect dialect = activationContext.getServiceRegistry().getService( JdbcServices.class ).getDialect();
final ClassLoaderService classLoaderService = activationContext.getServiceRegistry().getService( ClassLoaderService.class );
ValidatorFactory factory = getValidatorFactory( properties ); ValidatorFactory factory = getValidatorFactory( properties );
Class<?>[] groupsArray = new GroupsPerOperation( properties ).get( GroupsPerOperation.Operation.DDL ); Class<?>[] groupsArray = new GroupsPerOperation( properties ).get( GroupsPerOperation.Operation.DDL );
Set<Class<?>> groups = new HashSet<Class<?>>( Arrays.asList( groupsArray ) ); Set<Class<?>> groups = new HashSet<Class<?>>( Arrays.asList( groupsArray ) );
if ( activationContext.getConfiguration() != null ) {
Collection<PersistentClass> persistentClasses = activationContext.getConfiguration().createMappings().getClasses().values();
for ( PersistentClass persistentClass : persistentClasses ) { for ( PersistentClass persistentClass : persistentClasses ) {
final String className = persistentClass.getClassName(); final String className = persistentClass.getClassName();
if ( className == null || className.length() == 0 ) { if ( StringHelper.isEmpty( className ) ) {
continue; continue;
} }
Class<?> clazz; Class<?> clazz = classLoaderService.classForName( className );
try {
clazz = ReflectHelper.classForName( className, TypeSafeActivator.class );
}
catch ( ClassNotFoundException e ) {
throw new AssertionFailure( "Entity class not found", e );
}
try { try {
applyDDL( "", persistentClass, clazz, factory, groups, true, dialect ); applyDDL( "", persistentClass, clazz, factory, groups, true, dialect );
@ -189,6 +195,70 @@ class TypeSafeActivator {
LOG.unableToApplyConstraints( className, e ); LOG.unableToApplyConstraints( className, e );
} }
} }
} else if (activationContext.getMetadata()!=null){
for ( final EntityBinding entityBinding : activationContext.getMetadata().getEntityBindings() ) {
if ( entityBinding.getHierarchyDetails().getEntityMode() != EntityMode.POJO ) {
continue;
}
final String className = entityBinding.getEntity().getClassName();
if ( StringHelper.isEmpty( className ) ) {
continue;
}
Class<?> clazz = classLoaderService.classForName( className );
try {
applyDDL( "", entityBinding, clazz, factory, groups, true, dialect );
}
catch ( Exception e ) {
LOG.unableToApplyConstraints( className, e );
}
}
}
}
private static void applyDDL(
String prefix,
EntityBinding entityBinding,
Class<?> clazz,
ValidatorFactory factory,
Set<Class<?>> groups,
boolean activateNotNull,
Dialect dialect) {
final BeanDescriptor descriptor = factory.getValidator().getConstraintsForClass( clazz );
//no bean level constraints can be applied, go to the properties
for ( PropertyDescriptor propertyDesc : descriptor.getConstrainedProperties() ) {
AttributeBinding attributeBinding = findAttributeBindingByName(
entityBinding,
prefix + propertyDesc.getPropertyName()
);
boolean hasNotNull;
if ( attributeBinding != null ) {
hasNotNull = applyConstraints(
propertyDesc.getConstraintDescriptors(), attributeBinding, propertyDesc, groups, activateNotNull, dialect
);
if ( (attributeBinding instanceof CompositeAttributeBinding) && propertyDesc.isCascaded() ) {
Class<?> componentClass = ( (CompositeAttributeBinding) attributeBinding ).getClassReference();
/*
* we can apply not null if the upper component let's us activate not null
* and if the property is not null.
* Otherwise, all sub columns should be left nullable
*/
final boolean canSetNotNullOnColumns = activateNotNull && hasNotNull;
applyDDL(
prefix + propertyDesc.getPropertyName() + ".",
entityBinding, componentClass, factory, groups,
canSetNotNullOnColumns,
dialect
);
}
//FIXME add collection of components
}
}
} }
private static void applyDDL( private static void applyDDL(
@ -229,7 +299,44 @@ class TypeSafeActivator {
} }
} }
} }
private static boolean applyConstraints(
Set<ConstraintDescriptor<?>> constraintDescriptors,
AttributeBinding attributeBinding,
PropertyDescriptor propertyDesc,
Set<Class<?>> groups,
boolean canApplyNotNull,
Dialect dialect) {
boolean hasNotNull = false;
for ( ConstraintDescriptor<?> descriptor : constraintDescriptors ) {
if ( groups != null && Collections.disjoint( descriptor.getGroups(), groups ) ) {
continue;
}
if ( canApplyNotNull ) {
hasNotNull = hasNotNull || applyNotNull( attributeBinding, descriptor );
}
// apply bean validation specific constraints
applyDigits( attributeBinding, descriptor );
applySize( attributeBinding, descriptor, propertyDesc );
applyMin( attributeBinding, descriptor, dialect );
applyMax( attributeBinding, descriptor, dialect );
// apply hibernate validator specific constraints - we cannot import any HV specific classes though!
// no need to check explicitly for @Range. @Range is a composed constraint using @Min and @Max which
// will be taken care later
applyLength( attributeBinding, descriptor, propertyDesc );
// pass an empty set as composing constraints inherit the main constraint and thus are matching already
hasNotNull = hasNotNull || applyConstraints(
descriptor.getComposingConstraints(),
attributeBinding, propertyDesc, null,
canApplyNotNull,
dialect
);
}
return hasNotNull;
}
private static boolean applyConstraints( private static boolean applyConstraints(
Set<ConstraintDescriptor<?>> constraintDescriptors, Set<ConstraintDescriptor<?>> constraintDescriptors,
Property property, Property property,
@ -268,7 +375,33 @@ class TypeSafeActivator {
} }
return hasNotNull; return hasNotNull;
} }
private static void applyMin(AttributeBinding property, ConstraintDescriptor<?> descriptor, Dialect dialect) {
if ( Min.class.equals( descriptor.getAnnotation().annotationType() ) ) {
@SuppressWarnings("unchecked")
ConstraintDescriptor<Min> minConstraint = (ConstraintDescriptor<Min>) descriptor;
long min = minConstraint.getAnnotation().value();
org.hibernate.metamodel.spi.relational.Column col = getSingleColumn( property );
if( col == null ) {
return;
}
String checkConstraint = col.getColumnName().getText(dialect) + ">=" + min;
applySQLCheck( col, checkConstraint );
}
}
private static void applyMax(AttributeBinding property, ConstraintDescriptor<?> descriptor, Dialect dialect) {
if ( Max.class.equals( descriptor.getAnnotation().annotationType() ) ) {
@SuppressWarnings("unchecked")
ConstraintDescriptor<Max> maxConstraint = (ConstraintDescriptor<Max>) descriptor;
long max = maxConstraint.getAnnotation().value();
org.hibernate.metamodel.spi.relational.Column col = getSingleColumn( property );
if( col == null ) {
return;
} String checkConstraint = col.getColumnName().getText(dialect) + "<=" + max;
applySQLCheck( col, checkConstraint );
}
}
private static void applyMin(Property property, ConstraintDescriptor<?> descriptor, Dialect dialect) { private static void applyMin(Property property, ConstraintDescriptor<?> descriptor, Dialect dialect) {
if ( Min.class.equals( descriptor.getAnnotation().annotationType() ) ) { if ( Min.class.equals( descriptor.getAnnotation().annotationType() ) ) {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
@ -291,7 +424,15 @@ class TypeSafeActivator {
applySQLCheck( col, checkConstraint ); applySQLCheck( col, checkConstraint );
} }
} }
private static void applySQLCheck(org.hibernate.metamodel.spi.relational.Column col, String checkConstraint) {
String existingCheck = col.getCheckCondition();
// need to check whether the new check is already part of the existing check, because applyDDL can be called
// multiple times
if ( StringHelper.isNotEmpty( existingCheck ) && !existingCheck.contains( checkConstraint ) ) {
checkConstraint = col.getCheckCondition() + " AND " + checkConstraint;
}
col.setCheckCondition( checkConstraint );
}
private static void applySQLCheck(Column col, String checkConstraint) { private static void applySQLCheck(Column col, String checkConstraint) {
String existingCheck = col.getCheckConstraint(); String existingCheck = col.getCheckConstraint();
// need to check whether the new check is already part of the existing check, because applyDDL can be called // need to check whether the new check is already part of the existing check, because applyDDL can be called
@ -302,6 +443,42 @@ class TypeSafeActivator {
col.setCheckConstraint( checkConstraint ); col.setCheckConstraint( checkConstraint );
} }
private static boolean applyNotNull(AttributeBinding property, ConstraintDescriptor<?> descriptor) {
boolean hasNotNull = false;
if ( NotNull.class.equals( descriptor.getAnnotation().annotationType() ) ) {
EntityBinding entityBinding = property.getContainer().seekEntityBinding();
InheritanceType inheritanceType = entityBinding.getHierarchyDetails().getInheritanceType();
// properties of a single table inheritance configuration should not be forced to null
if(InheritanceType.SINGLE_TABLE.equals( inheritanceType )) {
return false;
}
if ( property instanceof CompositeAttributeBinding ) {
Iterator<AttributeBinding> iter
= ( ( CompositeAttributeBinding ) property)
.attributeBindings().iterator();
while( iter.hasNext() ) {
applyNullConstraint( iter.next() );
}
} else {
applyNullConstraint( property );
}
hasNotNull = true;
}
return hasNotNull;
}
private static void applyNullConstraint(AttributeBinding attributeBinding) {
org.hibernate.metamodel.spi.relational.Column column
= getSingleColumn( attributeBinding );
if ( column != null ) {
// TODO check with components as in the old configuration approach. see above (HF)
column.setNullable( false );
}
}
private static boolean applyNotNull(Property property, ConstraintDescriptor<?> descriptor) { private static boolean applyNotNull(Property property, ConstraintDescriptor<?> descriptor) {
boolean hasNotNull = false; boolean hasNotNull = false;
if ( NotNull.class.equals( descriptor.getAnnotation().annotationType() ) ) { if ( NotNull.class.equals( descriptor.getAnnotation().annotationType() ) ) {
@ -320,7 +497,18 @@ class TypeSafeActivator {
} }
return hasNotNull; return hasNotNull;
} }
private static void applyDigits(AttributeBinding property, ConstraintDescriptor<?> descriptor) {
if ( Digits.class.equals( descriptor.getAnnotation().annotationType() ) ) {
@SuppressWarnings("unchecked")
ConstraintDescriptor<Digits> digitsConstraint = (ConstraintDescriptor<Digits>) descriptor;
int integerDigits = digitsConstraint.getAnnotation().integer();
int fractionalDigits = digitsConstraint.getAnnotation().fraction();
org.hibernate.metamodel.spi.relational.Column col = getSingleColumn( property );
if(col==null)return;
col.getSize().setPrecision( integerDigits + fractionalDigits );
col.getSize().setScale( fractionalDigits );
}
}
private static void applyDigits(Property property, ConstraintDescriptor<?> descriptor) { private static void applyDigits(Property property, ConstraintDescriptor<?> descriptor) {
if ( Digits.class.equals( descriptor.getAnnotation().annotationType() ) ) { if ( Digits.class.equals( descriptor.getAnnotation().annotationType() ) ) {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
@ -333,6 +521,37 @@ class TypeSafeActivator {
} }
} }
private static org.hibernate.metamodel.spi.relational.Column getSingleColumn(AttributeBinding attributeBinding) {
if ( !( attributeBinding.getAttribute().isSingular() ) ) {
// TODO verify that's correct (HF)
return null;
}
SingularAttributeBinding basicAttributeBinding = ( SingularAttributeBinding ) attributeBinding;
RelationalValueBinding valueBinding = basicAttributeBinding.getRelationalValueBindings().get( 0 );
Value value = valueBinding.getValue();
if ( valueBinding.isDerived() ) {
return null;
}
return ( org.hibernate.metamodel.spi.relational.Column ) value;
}
private static void applySize(AttributeBinding property, ConstraintDescriptor<?> descriptor, PropertyDescriptor propertyDescriptor) {
if ( Size.class.equals( descriptor.getAnnotation().annotationType() )
&& String.class.equals( propertyDescriptor.getElementClass() ) ) {
@SuppressWarnings("unchecked")
ConstraintDescriptor<Size> sizeConstraint = (ConstraintDescriptor<Size>) descriptor;
int max = sizeConstraint.getAnnotation().max();
org.hibernate.metamodel.spi.relational.Column col = getSingleColumn( property );
if ( col == null ) {
return;
}
if ( max < Integer.MAX_VALUE ) {
col.getSize().setLength( max );
}
}
}
private static void applySize(Property property, ConstraintDescriptor<?> descriptor, PropertyDescriptor propertyDescriptor) { private static void applySize(Property property, ConstraintDescriptor<?> descriptor, PropertyDescriptor propertyDescriptor) {
if ( Size.class.equals( descriptor.getAnnotation().annotationType() ) if ( Size.class.equals( descriptor.getAnnotation().annotationType() )
&& String.class.equals( propertyDescriptor.getElementClass() ) ) { && String.class.equals( propertyDescriptor.getElementClass() ) ) {
@ -345,7 +564,22 @@ class TypeSafeActivator {
} }
} }
} }
private static void applyLength(AttributeBinding property, ConstraintDescriptor<?> descriptor, PropertyDescriptor propertyDescriptor) {
if ( "org.hibernate.validator.constraints.Length".equals(
descriptor.getAnnotation().annotationType().getName()
)
&& String.class.equals( propertyDescriptor.getElementClass() ) ) {
@SuppressWarnings("unchecked")
int max = (Integer) descriptor.getAttributes().get( "max" );
org.hibernate.metamodel.spi.relational.Column col = getSingleColumn( property );
if( col == null ){
return;
}
if ( max < Integer.MAX_VALUE ) {
col.getSize().setLength( max );
}
}
}
private static void applyLength(Property property, ConstraintDescriptor<?> descriptor, PropertyDescriptor propertyDescriptor) { private static void applyLength(Property property, ConstraintDescriptor<?> descriptor, PropertyDescriptor propertyDescriptor) {
if ( "org.hibernate.validator.constraints.Length".equals( if ( "org.hibernate.validator.constraints.Length".equals(
descriptor.getAnnotation().annotationType().getName() descriptor.getAnnotation().annotationType().getName()
@ -360,6 +594,46 @@ class TypeSafeActivator {
} }
} }
private static AttributeBinding findAttributeBindingByName(EntityBinding entityBinding,
String attrName) {
AttributeBinding attrBinding = null;
EntityIdentifier identifier = entityBinding.getHierarchyDetails().getEntityIdentifier();
BasicAttributeBinding idAttrBinding = null; //identifier.getValueBinding();
String idAttrName = idAttrBinding != null ? idAttrBinding.getAttribute().getName() : null;
try {
if ( attrName == null || attrName.length() == 0 || attrName.equals( idAttrName ) ) {
attrBinding = idAttrBinding; // default to id
}
else {
if ( attrName.indexOf( idAttrName + "." ) == 0 ) {
attrBinding = idAttrBinding;
attrName = attrName.substring( idAttrName.length() + 1 );
}
for ( StringTokenizer st = new StringTokenizer( attrName, "." ); st.hasMoreElements(); ) {
String element = st.nextToken();
if ( attrBinding == null ) {
attrBinding = entityBinding.locateAttributeBinding( element );
}
else {
return null; // TODO: if (attrBinding.isComposite()) ...
}
}
}
}
catch ( MappingException error ) {
try {
//if we do not find it try to check the identifier mapper
if ( !identifier.isIdentifierMapper() ) {
return null;
}
// TODO: finish once composite/embedded/component IDs get worked out
}
catch ( MappingException ee ) {
return null;
}
}
return attrBinding;
}
/** /**
* @param associatedClass * @param associatedClass
* @param propertyName * @param propertyName

View File

@ -49,13 +49,13 @@ import org.hibernate.LockMode;
import org.hibernate.cache.CacheException; import org.hibernate.cache.CacheException;
import org.hibernate.dialect.Dialect; import org.hibernate.dialect.Dialect;
import org.hibernate.engine.jdbc.dialect.spi.DialectResolver; import org.hibernate.engine.jdbc.dialect.spi.DialectResolver;
import org.hibernate.engine.jndi.JndiException;
import org.hibernate.engine.jndi.JndiNameException;
import org.hibernate.engine.loading.internal.CollectionLoadContext; import org.hibernate.engine.loading.internal.CollectionLoadContext;
import org.hibernate.engine.loading.internal.EntityLoadContext; import org.hibernate.engine.loading.internal.EntityLoadContext;
import org.hibernate.engine.spi.CollectionKey; import org.hibernate.engine.spi.CollectionKey;
import org.hibernate.engine.spi.SessionFactoryImplementor; import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.id.IntegralDataTypeHolder; import org.hibernate.id.IntegralDataTypeHolder;
import org.hibernate.engine.jndi.JndiException;
import org.hibernate.engine.jndi.JndiNameException;
import org.hibernate.type.BasicType; import org.hibernate.type.BasicType;
import org.hibernate.type.SerializationException; import org.hibernate.type.SerializationException;
import org.hibernate.type.Type; import org.hibernate.type.Type;
@ -1576,62 +1576,20 @@ public interface CoreMessageLogger extends BasicLogger {
@Message(value = "NaturalId queries executed to database: %s", id = 442) @Message(value = "NaturalId queries executed to database: %s", id = 442)
void naturalIdQueriesExecuted(long naturalIdQueriesExecutionCount); void naturalIdQueriesExecuted(long naturalIdQueriesExecutionCount);
@Message(value = "Unable to find mapping information for %s. Are you sure all annotated classes and configuration files are added?", id = 443)
String missingEntitySource(String entityName);
@Message(value = "@JoinTable annotation without an association. Check %s#%s", id = 444)
String joinTableForNonAssociationAttribute(String entityName, String propertyName);
@Message(value = "@CollectionTable annotation without a @ElementCollection. Check %s#%s", id = 445)
String collectionTableWithoutElementCollection(String entityName, String propertyName);
@Message(value = "@CollectionTable and @JoinTable specified on the same attribute. Check %s#%s", id = 446)
String collectionTableAndJoinTableUsedTogether(String entityName, String propertyName);
@LogMessage(level = WARN) @LogMessage(level = WARN)
@Message( @Message(
value = "Dialect [%s] limits the number of elements in an IN predicate to %s entries. " + value = "Dialect [%s] limits the number of elements in an IN predicate to %s entries. " +
"However, the given parameter list [%s] contained %s entries, which will likely cause failures " + "However, the given parameter list [%s] contained %s entries, which will likely cause failures " +
"to execute the query in the database", "to execute the query in the database",
id = 447 id = 443
) )
void tooManyInExpressions(String dialectName, int limit, String paramName, int size); void tooManyInExpressions(String dialectName, int limit, String paramName, int size);
@LogMessage( level = ERROR )
@Message( value = "Illegal argument on static metamodel field injection : %s#%s; expected type : %s; encountered type : %s", id = 448 )
void illegalArgumentOnStaticMetamodelFieldInjection( String metamodelClassName,
String attributeName,
String attributeJavaType,
String metamodelFieldJavaType );
@LogMessage( level = ERROR )
@Message( value = "Unable to locate static metamodel field : %s#%s", id = 449 )
void unableToLocateStaticMetamodelField( String metamodelClassName,
String attributeName );
@Message(value = "The access type of class %s is AccessType.FIELD. To override the access for an attribute " +
"@Access has to be placed on the property (getter)", id = 450)
String accessTypeOverrideShouldBeAnnotatedOnProperty( String className );
@Message(value = "The access type of class %s is AccessType.FIELD. To override the access for an attribute " +
"@Access has to be placed on the property (getter) with an access type of AccessType.PROPERTY. " +
"Using AccessType.FIELD on the property has no effect", id = 451)
String accessTypeOverrideShouldBeProperty( String className );
@Message(value = "The access type of class %s is AccessType.PROPERTY. To override the access for a field " +
"@Access has to be placed on the field ", id = 452)
String accessTypeOverrideShouldBeAnnotatedOnField( String className );
@Message(value = "The access type of class %s is AccessType.PROPERTY. To override the access for a field " +
"@Access has to be placed on the field with an access type of AccessType.FIELD. " +
"Using AccessType.PROPERTY on the field has no effect", id = 453)
String accessTypeOverrideShouldBeField( String className );
@LogMessage(level = WARN) @LogMessage(level = WARN)
@Message( @Message(
value = "Encountered request for locking however dialect reports that database prefers locking be done in a " + value = "Encountered request for locking however dialect reports that database prefers locking be done in a " +
"separate select (follow-on locking); results will be locked after initial query executes", "separate select (follow-on locking); results will be locked after initial query executes",
id = 454 id = 444
) )
void usingFollowOnLocking(); void usingFollowOnLocking();
@ -1639,7 +1597,7 @@ public interface CoreMessageLogger extends BasicLogger {
@Message( @Message(
value = "Alias-specific lock modes requested, which is not currently supported with follow-on locking; " + value = "Alias-specific lock modes requested, which is not currently supported with follow-on locking; " +
"all acquired locks will be [%s]", "all acquired locks will be [%s]",
id = 455 id = 445
) )
void aliasSpecificLockingWithFollowOnLocking(LockMode lockMode); void aliasSpecificLockingWithFollowOnLocking(LockMode lockMode);
@ -1647,7 +1605,7 @@ public interface CoreMessageLogger extends BasicLogger {
@Message( @Message(
value = "embed-xml attributes were intended to be used for DOM4J entity mode. Since that entity mode has been " + value = "embed-xml attributes were intended to be used for DOM4J entity mode. Since that entity mode has been " +
"removed, embed-xml attributes are no longer supported and should be removed from mappings.", "removed, embed-xml attributes are no longer supported and should be removed from mappings.",
id = 456 id = 446
) )
void embedXmlAttributesNoLongerSupported(); void embedXmlAttributesNoLongerSupported();
@ -1661,4 +1619,44 @@ public interface CoreMessageLogger extends BasicLogger {
@LogMessage(level = INFO) @LogMessage(level = INFO)
@Message( value = "'javax.persistence.validation.mode' named multiple values : %s", id = 448 ) @Message( value = "'javax.persistence.validation.mode' named multiple values : %s", id = 448 )
void multipleValidationModes(String modes); void multipleValidationModes(String modes);
@Message(value = "@CollectionTable and @JoinTable specified on the same attribute. Check %s#%s", id = 449)
String collectionTableAndJoinTableUsedTogether(String entityName, String propertyName);
@Message(value = "@CollectionTable annotation without a @ElementCollection. Check %s#%s", id = 450)
String collectionTableWithoutElementCollection(String entityName, String propertyName);
@Message(value = "@JoinTable annotation without an association. Check %s#%s", id = 451)
String joinTableForNonAssociationAttribute(String entityName, String propertyName);
@LogMessage( level = ERROR )
@Message( value = "Illegal argument on static metamodel field injection : %s#%s; expected type : %s; encountered type : %s", id = 452 )
void illegalArgumentOnStaticMetamodelFieldInjection( String metamodelClassName,
String attributeName,
String attributeJavaType,
String metamodelFieldJavaType );
@LogMessage( level = ERROR )
@Message( value = "Unable to locate static metamodel field : %s#%s", id = 453 )
void unableToLocateStaticMetamodelField( String metamodelClassName,
String attributeName );
@Message(value = "The access type of class %s is AccessType.FIELD. To override the access for an attribute " +
"@Access has to be placed on the property (getter)", id = 454)
String accessTypeOverrideShouldBeAnnotatedOnProperty( String className );
@Message(value = "The access type of class %s is AccessType.FIELD. To override the access for an attribute " +
"@Access has to be placed on the property (getter) with an access type of AccessType.PROPERTY. " +
"Using AccessType.FIELD on the property has no effect", id = 455)
String accessTypeOverrideShouldBeProperty( String className );
@Message(value = "The access type of class %s is AccessType.PROPERTY. To override the access for a field " +
"@Access has to be placed on the field ", id = 456)
String accessTypeOverrideShouldBeAnnotatedOnField( String className );
@Message(value = "The access type of class %s is AccessType.PROPERTY. To override the access for a field " +
"@Access has to be placed on the field with an access type of AccessType.FIELD. " +
"Using AccessType.PROPERTY on the field has no effect", id = 457)
String accessTypeOverrideShouldBeField( String className );
} }

View File

@ -127,8 +127,11 @@ public class EnumConversionHelper {
case LOCK: { case LOCK: {
return CascadeStyles.LOCK; return CascadeStyles.LOCK;
} }
case DELETE_ORPHAN: {
return CascadeStyles.DELETE_ORPHAN;
}
default: { default: {
throw new AssertionFailure( "Unknown cascade type" ); throw new AssertionFailure( "Unknown cascade type: " + cascadeType );
} }
} }
} }

View File

@ -40,6 +40,7 @@ import org.hibernate.HibernateException;
import org.hibernate.engine.spi.SessionFactoryImplementor; import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SessionImplementor; import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.internal.util.ReflectHelper; import org.hibernate.internal.util.ReflectHelper;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.internal.util.config.ConfigurationHelper; import org.hibernate.internal.util.config.ConfigurationHelper;
import org.hibernate.usertype.DynamicParameterizedType; import org.hibernate.usertype.DynamicParameterizedType;
import org.hibernate.usertype.EnhancedUserType; import org.hibernate.usertype.EnhancedUserType;
@ -70,7 +71,10 @@ import org.hibernate.usertype.LoggableUserType;
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public class EnumType implements EnhancedUserType, DynamicParameterizedType,LoggableUserType, Serializable { public class EnumType implements EnhancedUserType, DynamicParameterizedType,LoggableUserType, Serializable {
private static final Logger LOG = Logger.getLogger( EnumType.class.getName() ); private static final Logger LOG = Logger.getLogger( EnumType.class.getName() );
/**
* @deprecated use {@link DynamicParameterizedType#RETURNED_CLASS} instead.
*/
@Deprecated
public static final String ENUM = "enumClass"; public static final String ENUM = "enumClass";
public static final String NAMED = "useNamed"; public static final String NAMED = "useNamed";
public static final String TYPE = "type"; public static final String TYPE = "type";
@ -234,13 +238,15 @@ public class EnumType implements EnhancedUserType, DynamicParameterizedType,Logg
} }
else { else {
String enumClassName = (String) parameters.get( ENUM ); String enumClassName = (String) parameters.get( ENUM );
if( StringHelper.isEmpty( enumClassName )){
enumClassName = (String)parameters.get( DynamicParameterizedType.RETURNED_CLASS );
}
try { try {
enumClass = ReflectHelper.classForName( enumClassName, this.getClass() ).asSubclass( Enum.class ); enumClass = ReflectHelper.classForName( enumClassName, this.getClass() ).asSubclass( Enum.class );
} }
catch ( ClassNotFoundException exception ) { catch ( ClassNotFoundException exception ) {
throw new HibernateException( "Enum class not found", exception ); throw new HibernateException( "Enum class not found", exception );
} }
final Object useNamedSetting = parameters.get( NAMED ); final Object useNamedSetting = parameters.get( NAMED );
if ( useNamedSetting != null ) { if ( useNamedSetting != null ) {
final boolean useNamed = ConfigurationHelper.getBoolean( NAMED, parameters ); final boolean useNamed = ConfigurationHelper.getBoolean( NAMED, parameters );

View File

@ -82,7 +82,7 @@ public class ElementCollectionBindingTest extends BaseAnnotationBindingTestCase
catch ( MappingException e ) { catch ( MappingException e ) {
assertEquals( "Unexpected error origin", TestEntity.class.getName(), e.getOrigin().getName() ); assertEquals( "Unexpected error origin", TestEntity.class.getName(), e.getOrigin().getName() );
assertEquals( "Unexpected type", SourceType.ANNOTATION, e.getOrigin().getType() ); assertEquals( "Unexpected type", SourceType.ANNOTATION, e.getOrigin().getType() );
assertTrue( "Wrong error message", e.getMessage().startsWith( "HHH000444" ) ); assertTrue( "Wrong error message", e.getMessage().startsWith( "HHH000451" ) );
} }
} }
@ -115,7 +115,7 @@ public class ElementCollectionBindingTest extends BaseAnnotationBindingTestCase
catch ( MappingException e ) { catch ( MappingException e ) {
assertEquals( "Unexpected error origin", TestEntity2.class.getName(), e.getOrigin().getName() ); assertEquals( "Unexpected error origin", TestEntity2.class.getName(), e.getOrigin().getName() );
assertEquals( "Unexpected type", SourceType.ANNOTATION, e.getOrigin().getType() ); assertEquals( "Unexpected type", SourceType.ANNOTATION, e.getOrigin().getType() );
assertTrue( "Wrong error message", e.getMessage().startsWith( "HHH000446" ) ); assertTrue( "Wrong error message", e.getMessage().startsWith( "HHH000449" ) );
} }
} }

View File

@ -34,7 +34,6 @@ import org.hibernate.cfg.Configuration;
import org.hibernate.metamodel.spi.relational.Column; import org.hibernate.metamodel.spi.relational.Column;
import org.hibernate.test.util.SchemaUtil; import org.hibernate.test.util.SchemaUtil;
import org.hibernate.testing.DialectChecks; import org.hibernate.testing.DialectChecks;
import org.hibernate.testing.FailureExpectedWithNewMetamodel;
import org.hibernate.testing.RequiresDialectFeature; import org.hibernate.testing.RequiresDialectFeature;
import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase; import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase;
@ -75,7 +74,6 @@ public class DDLWithoutCallbackTest extends BaseCoreFunctionalTestCase {
@Test @Test
@RequiresDialectFeature(DialectChecks.SupportsColumnCheck.class) @RequiresDialectFeature(DialectChecks.SupportsColumnCheck.class)
@FailureExpectedWithNewMetamodel
public void testRangeChecksGetApplied() { public void testRangeChecksGetApplied() {
Range range = new Range( 1 ); Range range = new Range( 1 );
assertDatabaseConstraintViolationThrown( range ); assertDatabaseConstraintViolationThrown( range );

View File

@ -44,6 +44,7 @@ import org.hibernate.test.annotations.embedded.Person;
import org.hibernate.test.annotations.embedded.Summary; import org.hibernate.test.annotations.embedded.Summary;
import org.hibernate.test.annotations.embedded.WealthyPerson; import org.hibernate.test.annotations.embedded.WealthyPerson;
import org.hibernate.test.event.collection.detached.Alias; import org.hibernate.test.event.collection.detached.Alias;
import org.hibernate.testing.FailureExpectedWithNewMetamodel;
import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase; import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
@ -54,6 +55,7 @@ import static org.junit.Assert.assertTrue;
/** /**
* @author Strong Liu <stliu@hibernate.org> * @author Strong Liu <stliu@hibernate.org>
*/ */
@FailureExpectedWithNewMetamodel
public class IndexTest extends BaseCoreFunctionalTestCase { public class IndexTest extends BaseCoreFunctionalTestCase {
@Override @Override
protected Class<?>[] getAnnotatedClasses() { protected Class<?>[] getAnnotatedClasses() {
@ -157,7 +159,7 @@ public class IndexTest extends BaseCoreFunctionalTestCase {
assertSame( collectionTable, index.getTable() ); assertSame( collectionTable, index.getTable() );
} }
@Test // @Test
public void testTableGeneratorIndex(){ public void testTableGeneratorIndex(){
//todo //todo
} }

View File

@ -1,15 +1,28 @@
//$Id: A320.java 14736 2008-06-04 14:23:42Z hardy.ferentschik $ //$Id: A320.java 14736 2008-06-04 14:23:42Z hardy.ferentschik $
package org.hibernate.test.annotations.onetoone.primarykey; package org.hibernate.test.annotations.onetoone.primarykey;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Properties;
import org.jboss.logging.Logger; import org.jboss.logging.Logger;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import org.hibernate.cfg.AnnotationConfiguration; import org.hibernate.cfg.AnnotationConfiguration;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.cfg.Environment; import org.hibernate.cfg.Environment;
import org.hibernate.dialect.SQLServerDialect; import org.hibernate.dialect.SQLServerDialect;
import org.hibernate.mapping.MetadataSource;
import org.hibernate.metamodel.MetadataSources;
import org.hibernate.metamodel.spi.MetadataImplementor;
import org.hibernate.service.ServiceRegistry; import org.hibernate.service.ServiceRegistry;
import org.hibernate.testing.FailureExpectedWithNewMetamodel;
import org.hibernate.testing.ServiceRegistryBuilder; import org.hibernate.testing.ServiceRegistryBuilder;
import org.hibernate.testing.junit4.BaseUnitTestCase;
import org.hibernate.tool.schema.spi.SchemaCreator;
import org.hibernate.tool.schema.spi.SchemaManagementTool;
/** /**
* Test harness for ANN-742. * Test harness for ANN-742.
@ -17,27 +30,53 @@ import org.hibernate.testing.ServiceRegistryBuilder;
* @author Hardy Ferentschik * @author Hardy Ferentschik
* *
*/ */
public class NullablePrimaryKeyTest { @FailureExpectedWithNewMetamodel
public class NullablePrimaryKeyTest extends BaseUnitTestCase {
private static final Logger log = Logger.getLogger( NullablePrimaryKeyTest.class ); private static final Logger log = Logger.getLogger( NullablePrimaryKeyTest.class );
@Test @Test
public void testGeneratedSql() { public void testGeneratedSql() {
Properties properties = new Properties();
ServiceRegistry serviceRegistry = null; properties.putAll( Environment.getProperties() );
properties.setProperty( AvailableSettings.DIALECT, SQLServerDialect.class.getName() );
ServiceRegistry serviceRegistry = ServiceRegistryBuilder.buildServiceRegistry( properties );
try { try {
AnnotationConfiguration config = new AnnotationConfiguration(); MetadataSources metadataSource = new MetadataSources(serviceRegistry);
config.addAnnotatedClass(Address.class); metadataSource.addAnnotatedClass( Address.class ).addAnnotatedClass( Person.class );
config.addAnnotatedClass(Person.class); MetadataImplementor metadata = (MetadataImplementor) metadataSource.buildMetadata();
serviceRegistry = ServiceRegistryBuilder.buildServiceRegistry( Environment.getProperties() ); metadata.getDatabase().getJdbcEnvironment();
config.buildSessionFactory( serviceRegistry );
String[] schema = config SchemaManagementTool schemaManagementTool = serviceRegistry.getService( SchemaManagementTool.class );
.generateSchemaCreationScript(new SQLServerDialect()); SchemaCreator schemaCreator = schemaManagementTool.getSchemaCreator( new HashMap() );
for (String s : schema) { final List<String> commands = new ArrayList<String>();
final org.hibernate.tool.schema.spi.Target target = new org.hibernate.tool.schema.spi.Target() {
@Override
public boolean acceptsImportScriptActions() {
return false;
}
@Override
public void prepare() {
commands.clear();
}
@Override
public void accept(String command) {
commands.add( command );
}
@Override
public void release() {
}
};
schemaCreator.doCreation( metadata.getDatabase(), false, target );
for ( String s : commands ) {
log.debug( s ); log.debug( s );
} }
String expectedMappingTableSql = "create table personAddress (person_id numeric(19,0) not null, " + String expectedMappingTableSql = "create table personAddress (person_id numeric(19,0) not null, " +
"address_id numeric(19,0), primary key (person_id))"; "address_id numeric(19,0), primary key (person_id))";
Assert.assertEquals( "Wrong SQL", expectedMappingTableSql, schema[2] ); Assert.assertEquals( "Wrong SQL", expectedMappingTableSql, commands.get( 2 ) );
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage()); Assert.fail(e.getMessage());
} }
finally { finally {

View File

@ -28,6 +28,7 @@ import org.junit.Test;
import org.hibernate.Session; import org.hibernate.Session;
import org.hibernate.collection.internal.PersistentMap; import org.hibernate.collection.internal.PersistentMap;
import org.hibernate.testing.FailureExpectedWithNewMetamodel;
import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase; import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
@ -41,6 +42,7 @@ import static org.junit.Assert.assertTrue;
* *
* @author Steve Ebersole * @author Steve Ebersole
*/ */
@FailureExpectedWithNewMetamodel
public class PersistentMapTest extends BaseCoreFunctionalTestCase { public class PersistentMapTest extends BaseCoreFunctionalTestCase {
@Override @Override
public String[] getMappings() { public String[] getMappings() {

View File

@ -30,6 +30,7 @@ import org.hibernate.Query;
import org.hibernate.Session; import org.hibernate.Session;
import org.hibernate.dialect.MySQLDialect; import org.hibernate.dialect.MySQLDialect;
import org.hibernate.dialect.SQLServerDialect; import org.hibernate.dialect.SQLServerDialect;
import org.hibernate.testing.FailureExpectedWithNewMetamodel;
import org.hibernate.testing.SkipForDialect; import org.hibernate.testing.SkipForDialect;
import org.hibernate.testing.SkipForDialects; import org.hibernate.testing.SkipForDialects;
import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase; import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase;
@ -41,6 +42,7 @@ import static org.junit.Assert.assertTrue;
/** /**
* @author Janario Oliveira * @author Janario Oliveira
*/ */
@FailureExpectedWithNewMetamodel
public class NamedNativeQueryTest extends BaseCoreFunctionalTestCase { public class NamedNativeQueryTest extends BaseCoreFunctionalTestCase {
private FromEntity createFrom(String name, String lastName) { private FromEntity createFrom(String name, String lastName) {

View File

@ -35,6 +35,8 @@ import org.hibernate.dialect.Dialect;
import org.hibernate.dialect.H2Dialect; import org.hibernate.dialect.H2Dialect;
import org.hibernate.engine.spi.Mapping; import org.hibernate.engine.spi.Mapping;
import org.hibernate.mapping.AuxiliaryDatabaseObject; import org.hibernate.mapping.AuxiliaryDatabaseObject;
import org.hibernate.metamodel.spi.MetadataImplementor;
import org.hibernate.metamodel.spi.relational.Database;
import org.hibernate.procedure.ProcedureCall; import org.hibernate.procedure.ProcedureCall;
import org.hibernate.procedure.ProcedureResult; import org.hibernate.procedure.ProcedureResult;
import org.hibernate.result.ResultSetReturn; import org.hibernate.result.ResultSetReturn;
@ -53,23 +55,25 @@ import static org.junit.Assert.fail;
*/ */
@RequiresDialect( H2Dialect.class ) @RequiresDialect( H2Dialect.class )
public class StoredProcedureTest extends BaseCoreFunctionalTestCase { public class StoredProcedureTest extends BaseCoreFunctionalTestCase {
@Override
protected void configure(Configuration configuration) {
super.configure( configuration );
configuration.addAuxiliaryDatabaseObject(
new AuxiliaryDatabaseObject() {
@Override
public void addDialectScope(String dialectName) {
}
@Override
protected void afterConstructAndConfigureMetadata(MetadataImplementor metadataImplementor) {
Database database = metadataImplementor.getDatabase();
database.addAuxiliaryDatabaseObject( new org.hibernate.metamodel.spi.relational.AuxiliaryDatabaseObject() {
@Override @Override
public boolean appliesToDialect(Dialect dialect) { public boolean appliesToDialect(Dialect dialect) {
return H2Dialect.class.isInstance( dialect ); return H2Dialect.class.isInstance( dialect );
} }
@Override @Override
public String sqlCreateString(Dialect dialect, Mapping p, String defaultCatalog, String defaultSchema) { public boolean beforeTablesOnCreation() {
return "CREATE ALIAS findOneUser AS $$\n" + return true;
}
@Override
public String[] sqlCreateStrings(Dialect dialect) {
String [] strings = new String[1];
strings[0] = "CREATE ALIAS findOneUser AS $$\n" +
"import org.h2.tools.SimpleResultSet;\n" + "import org.h2.tools.SimpleResultSet;\n" +
"import java.sql.*;\n" + "import java.sql.*;\n" +
"@CODE\n" + "@CODE\n" +
@ -81,29 +85,37 @@ public class StoredProcedureTest extends BaseCoreFunctionalTestCase {
" return rs;\n" + " return rs;\n" +
"}\n" + "}\n" +
"$$"; "$$";
return strings;
} }
@Override @Override
public String sqlDropString(Dialect dialect, String defaultCatalog, String defaultSchema) { public String[] sqlDropStrings(Dialect dialect) {
return "DROP ALIAS findUser IF EXISTS"; String [] strings = new String[1];
strings[0]= "DROP ALIAS findUser IF EXISTS";
return strings;
} }
}
);
configuration.addAuxiliaryDatabaseObject(
new AuxiliaryDatabaseObject() {
@Override @Override
public void addDialectScope(String dialectName) { public String getExportIdentifier() {
return "findOneUser";
} }
} );
database.addAuxiliaryDatabaseObject( new org.hibernate.metamodel.spi.relational.AuxiliaryDatabaseObject() {
@Override @Override
public boolean appliesToDialect(Dialect dialect) { public boolean appliesToDialect(Dialect dialect) {
return H2Dialect.class.isInstance( dialect ); return H2Dialect.class.isInstance( dialect );
} }
@Override @Override
public String sqlCreateString(Dialect dialect, Mapping p, String defaultCatalog, String defaultSchema) { public boolean beforeTablesOnCreation() {
return "CREATE ALIAS findUsers AS $$\n" + return true;
}
@Override
public String[] sqlCreateStrings(Dialect dialect) {
String [] strings = new String[1];
strings[0] = "CREATE ALIAS findUsers AS $$\n" +
"import org.h2.tools.SimpleResultSet;\n" + "import org.h2.tools.SimpleResultSet;\n" +
"import java.sql.*;\n" + "import java.sql.*;\n" +
"@CODE\n" + "@CODE\n" +
@ -117,29 +129,37 @@ public class StoredProcedureTest extends BaseCoreFunctionalTestCase {
" return rs;\n" + " return rs;\n" +
"}\n" + "}\n" +
"$$"; "$$";
return strings;
} }
@Override @Override
public String sqlDropString(Dialect dialect, String defaultCatalog, String defaultSchema) { public String[] sqlDropStrings(Dialect dialect) {
return "DROP ALIAS findUser IF EXISTS"; String [] strings = new String[1];
strings[0]= "DROP ALIAS findUser IF EXISTS";
return strings;
} }
}
);
configuration.addAuxiliaryDatabaseObject(
new AuxiliaryDatabaseObject() {
@Override @Override
public void addDialectScope(String dialectName) { public String getExportIdentifier() {
return "findUser2";
} }
} );
database.addAuxiliaryDatabaseObject( new org.hibernate.metamodel.spi.relational.AuxiliaryDatabaseObject() {
@Override @Override
public boolean appliesToDialect(Dialect dialect) { public boolean appliesToDialect(Dialect dialect) {
return H2Dialect.class.isInstance( dialect ); return H2Dialect.class.isInstance( dialect );
} }
@Override @Override
public String sqlCreateString(Dialect dialect, Mapping p, String defaultCatalog, String defaultSchema) { public boolean beforeTablesOnCreation() {
return "CREATE ALIAS findUserRange AS $$\n" + return true;
}
@Override
public String[] sqlCreateStrings(Dialect dialect) {
String [] strings = new String[1];
strings[0] = "CREATE ALIAS findUserRange AS $$\n" +
"import org.h2.tools.SimpleResultSet;\n" + "import org.h2.tools.SimpleResultSet;\n" +
"import java.sql.*;\n" + "import java.sql.*;\n" +
"@CODE\n" + "@CODE\n" +
@ -153,16 +173,133 @@ public class StoredProcedureTest extends BaseCoreFunctionalTestCase {
" return rs;\n" + " return rs;\n" +
"}\n" + "}\n" +
"$$"; "$$";
return strings;
} }
@Override @Override
public String sqlDropString(Dialect dialect, String defaultCatalog, String defaultSchema) { public String[] sqlDropStrings(Dialect dialect) {
return "DROP ALIAS findUser IF EXISTS"; String [] strings = new String[1];
strings[0]= "DROP ALIAS findUser IF EXISTS";
return strings;
} }
@Override
public String getExportIdentifier() {
return "findUser";
} }
); } );
} }
// @Override
// protected void configure(Configuration configuration) {
// super.configure( configuration );
// configuration.addAuxiliaryDatabaseObject(
// new AuxiliaryDatabaseObject() {
// @Override
// public void addDialectScope(String dialectName) {
// }
//
// @Override
// public boolean appliesToDialect(Dialect dialect) {
// return H2Dialect.class.isInstance( dialect );
// }
//
// @Override
// public String sqlCreateString(Dialect dialect, Mapping p, String defaultCatalog, String defaultSchema) {
// return "CREATE ALIAS findOneUser AS $$\n" +
// "import org.h2.tools.SimpleResultSet;\n" +
// "import java.sql.*;\n" +
// "@CODE\n" +
// "ResultSet findOneUser() {\n" +
// " SimpleResultSet rs = new SimpleResultSet();\n" +
// " rs.addColumn(\"ID\", Types.INTEGER, 10, 0);\n" +
// " rs.addColumn(\"NAME\", Types.VARCHAR, 255, 0);\n" +
// " rs.addRow(1, \"Steve\");\n" +
// " return rs;\n" +
// "}\n" +
// "$$";
// }
//
// @Override
// public String sqlDropString(Dialect dialect, String defaultCatalog, String defaultSchema) {
// return "DROP ALIAS findUser IF EXISTS";
// }
// }
// );
//
// configuration.addAuxiliaryDatabaseObject(
// new AuxiliaryDatabaseObject() {
// @Override
// public void addDialectScope(String dialectName) {
// }
//
// @Override
// public boolean appliesToDialect(Dialect dialect) {
// return H2Dialect.class.isInstance( dialect );
// }
//
// @Override
// public String sqlCreateString(Dialect dialect, Mapping p, String defaultCatalog, String defaultSchema) {
// return "CREATE ALIAS findUsers AS $$\n" +
// "import org.h2.tools.SimpleResultSet;\n" +
// "import java.sql.*;\n" +
// "@CODE\n" +
// "ResultSet findUsers() {\n" +
// " SimpleResultSet rs = new SimpleResultSet();\n" +
// " rs.addColumn(\"ID\", Types.INTEGER, 10, 0);\n" +
// " rs.addColumn(\"NAME\", Types.VARCHAR, 255, 0);\n" +
// " rs.addRow(1, \"Steve\");\n" +
// " rs.addRow(2, \"John\");\n" +
// " rs.addRow(3, \"Jane\");\n" +
// " return rs;\n" +
// "}\n" +
// "$$";
// }
//
// @Override
// public String sqlDropString(Dialect dialect, String defaultCatalog, String defaultSchema) {
// return "DROP ALIAS findUser IF EXISTS";
// }
// }
// );
//
// configuration.addAuxiliaryDatabaseObject(
// new AuxiliaryDatabaseObject() {
// @Override
// public void addDialectScope(String dialectName) {
// }
//
// @Override
// public boolean appliesToDialect(Dialect dialect) {
// return H2Dialect.class.isInstance( dialect );
// }
//
// @Override
// public String sqlCreateString(Dialect dialect, Mapping p, String defaultCatalog, String defaultSchema) {
// return "CREATE ALIAS findUserRange AS $$\n" +
// "import org.h2.tools.SimpleResultSet;\n" +
// "import java.sql.*;\n" +
// "@CODE\n" +
// "ResultSet findUserRange(int start, int end) {\n" +
// " SimpleResultSet rs = new SimpleResultSet();\n" +
// " rs.addColumn(\"ID\", Types.INTEGER, 10, 0);\n" +
// " rs.addColumn(\"NAME\", Types.VARCHAR, 255, 0);\n" +
// " for ( int i = start; i < end; i++ ) {\n" +
// " rs.addRow(1, \"User \" + i );\n" +
// " }\n" +
// " return rs;\n" +
// "}\n" +
// "$$";
// }
//
// @Override
// public String sqlDropString(Dialect dialect, String defaultCatalog, String defaultSchema) {
// return "DROP ALIAS findUser IF EXISTS";
// }
// }
// );
// }
@Test @Test
public void baseTest() { public void baseTest() {
Session session = openSession(); Session session = openSession();

View File

@ -0,0 +1,17 @@
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.Target=System.out
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=%d{ABSOLUTE} %5p %c{1}:%L - %m%n
log4j.rootLogger=info, stdout
log4j.logger.org.hibernate.tool.hbm2ddl=trace
log4j.logger.org.hibernate.testing.cache=debug
# SQL Logging - HHH-6833
log4j.logger.org.hibernate.SQL=debug
log4j.logger.org.hibernate.hql.internal.ast=debug
log4j.logger.org.hibernate.sql.ordering.antlr=debug

View File

@ -27,7 +27,8 @@ import javax.persistence.PersistenceException;
import java.sql.Connection; import java.sql.Connection;
import java.sql.SQLException; import java.sql.SQLException;
import org.hibernate.engine.jdbc.spi.JdbcConnectionAccess; import org.hibernate.engine.jdbc.connections.spi.JdbcConnectionAccess;
/** /**
* Defines access to a JDBC Connection for use in Schema generation * Defines access to a JDBC Connection for use in Schema generation

View File

@ -42,9 +42,9 @@ import org.hibernate.boot.registry.selector.spi.StrategySelector;
import org.hibernate.cfg.Configuration; import org.hibernate.cfg.Configuration;
import org.hibernate.dialect.Dialect; import org.hibernate.dialect.Dialect;
import org.hibernate.engine.jdbc.connections.spi.ConnectionProvider; import org.hibernate.engine.jdbc.connections.spi.ConnectionProvider;
import org.hibernate.engine.jdbc.connections.spi.JdbcConnectionAccess;
import org.hibernate.engine.jdbc.dialect.spi.DatabaseInfoDialectResolver; import org.hibernate.engine.jdbc.dialect.spi.DatabaseInfoDialectResolver;
import org.hibernate.engine.jdbc.dialect.spi.DialectResolver; import org.hibernate.engine.jdbc.dialect.spi.DialectResolver;
import org.hibernate.engine.jdbc.spi.JdbcConnectionAccess;
import org.hibernate.internal.util.StringHelper; import org.hibernate.internal.util.StringHelper;
import org.hibernate.internal.util.config.ConfigurationHelper; import org.hibernate.internal.util.config.ConfigurationHelper;
import org.hibernate.jpa.AvailableSettings; import org.hibernate.jpa.AvailableSettings;