Merge branch 'master' of github.com:hibernate/hibernate-core
This commit is contained in:
commit
ed48037692
|
@ -32,7 +32,6 @@ import org.hibernate.cache.RegionFactory;
|
|||
import org.hibernate.engine.jdbc.JdbcSupport;
|
||||
import org.hibernate.engine.jdbc.batch.internal.BatchBuilder;
|
||||
import org.hibernate.hql.QueryTranslatorFactory;
|
||||
import org.hibernate.jdbc.BatcherFactory;
|
||||
import org.hibernate.jdbc.util.SQLStatementLogger;
|
||||
import org.hibernate.transaction.TransactionFactory;
|
||||
import org.hibernate.transaction.TransactionManagerLookup;
|
||||
|
@ -78,7 +77,6 @@ public final class Settings {
|
|||
private QueryCacheFactory queryCacheFactory;
|
||||
private TransactionFactory transactionFactory;
|
||||
private TransactionManagerLookup transactionManagerLookup;
|
||||
private BatcherFactory batcherFactory;
|
||||
private BatchBuilder batchBuilder;
|
||||
private QueryTranslatorFactory queryTranslatorFactory;
|
||||
private boolean wrapResultSetsEnabled;
|
||||
|
@ -228,10 +226,6 @@ public final class Settings {
|
|||
return flushBeforeCompletionEnabled;
|
||||
}
|
||||
|
||||
public BatcherFactory getBatcherFactory() {
|
||||
return batcherFactory;
|
||||
}
|
||||
|
||||
public BatchBuilder getBatchBuilder() {
|
||||
return batchBuilder;
|
||||
}
|
||||
|
@ -419,10 +413,6 @@ public final class Settings {
|
|||
this.flushBeforeCompletionEnabled = flushBeforeCompletionEnabled;
|
||||
}
|
||||
|
||||
void setBatcherFactory(BatcherFactory batcher) {
|
||||
this.batcherFactory = batcher;
|
||||
}
|
||||
|
||||
void setBatcherBuilder(BatchBuilder batchBuilder) {
|
||||
this.batchBuilder = batchBuilder;
|
||||
}
|
||||
|
|
|
@ -44,9 +44,6 @@ import org.hibernate.engine.jdbc.spi.ExtractedDatabaseMetaData;
|
|||
import org.hibernate.engine.jdbc.spi.JdbcServices;
|
||||
import org.hibernate.hql.QueryTranslatorFactory;
|
||||
import org.hibernate.internal.util.config.ConfigurationHelper;
|
||||
import org.hibernate.jdbc.BatcherFactory;
|
||||
import org.hibernate.jdbc.BatchingBatcherFactory;
|
||||
import org.hibernate.jdbc.NonBatchingBatcherFactory;
|
||||
import org.hibernate.jdbc.util.SQLStatementLogger;
|
||||
import org.hibernate.transaction.TransactionFactory;
|
||||
import org.hibernate.transaction.TransactionFactoryFactory;
|
||||
|
@ -115,7 +112,6 @@ public class SettingsFactory implements Serializable {
|
|||
boolean jdbcBatchVersionedData = ConfigurationHelper.getBoolean(Environment.BATCH_VERSIONED_DATA, properties, false);
|
||||
if (batchSize>0) log.info("JDBC batch updates for versioned data: " + enabledDisabled(jdbcBatchVersionedData) );
|
||||
settings.setJdbcBatchVersionedData(jdbcBatchVersionedData);
|
||||
settings.setBatcherFactory( createBatcherFactory(properties, batchSize) );
|
||||
settings.setBatcherBuilder( createBatchBuilder(properties, batchSize) );
|
||||
|
||||
boolean useScrollableResultSets = ConfigurationHelper.getBoolean(Environment.USE_SCROLLABLE_RESULTSET, properties, meta.supportsScrollableResults());
|
||||
|
@ -350,47 +346,25 @@ public class SettingsFactory implements Serializable {
|
|||
}
|
||||
}
|
||||
|
||||
protected BatcherFactory createBatcherFactory(Properties properties, int batchSize) {
|
||||
String batcherClass = properties.getProperty(Environment.BATCH_STRATEGY);
|
||||
BatcherFactory batcherFactory = null;
|
||||
if (batcherClass==null) {
|
||||
batcherFactory = batchSize == 0
|
||||
? new NonBatchingBatcherFactory()
|
||||
: new BatchingBatcherFactory( );
|
||||
}
|
||||
else {
|
||||
log.info("Batcher factory: " + batcherClass);
|
||||
try {
|
||||
batcherFactory = (BatcherFactory) ReflectHelper.classForName(batcherClass).newInstance();
|
||||
}
|
||||
catch (Exception cnfe) {
|
||||
throw new HibernateException("could not instantiate BatcherFactory: " + batcherClass, cnfe);
|
||||
}
|
||||
}
|
||||
batcherFactory.setJdbcBatchSize( batchSize );
|
||||
return batcherFactory;
|
||||
}
|
||||
|
||||
protected BatchBuilder createBatchBuilder(Properties properties, int batchSize) {
|
||||
//FIXME: uncomment to use BatchBuilder
|
||||
/*
|
||||
String batchBuilderClass = properties.getProperty(Environment.BATCH_STRATEGY);
|
||||
BatchBuilder batchBuilder;
|
||||
if (batchBuilderClass==null) {
|
||||
return batchSize > 0
|
||||
batchBuilder = batchSize > 0
|
||||
? new BatchBuilder( batchSize )
|
||||
: new BatchBuilder();
|
||||
}
|
||||
else {
|
||||
log.info("Batcher factory: " + batchBuilderClass);
|
||||
log.info("Batch factory: " + batchBuilderClass);
|
||||
try {
|
||||
return (BatchBuilder) ReflectHelper.classForName(batchBuilderClass).newInstance();
|
||||
batchBuilder = (BatchBuilder) ReflectHelper.classForName(batchBuilderClass).newInstance();
|
||||
}
|
||||
catch (Exception cnfe) {
|
||||
throw new HibernateException("could not instantiate BatchBuilder: " + batchBuilderClass, cnfe);
|
||||
}
|
||||
}
|
||||
*/
|
||||
return null;
|
||||
batchBuilder.setJdbcBatchSize( batchSize );
|
||||
return batchBuilder;
|
||||
}
|
||||
|
||||
protected TransactionFactory createTransactionFactory(Properties properties) {
|
||||
|
|
|
@ -22,8 +22,6 @@
|
|||
* Boston, MA 02110-1301 USA
|
||||
*/
|
||||
|
||||
// $Id$
|
||||
|
||||
package org.hibernate.cfg.annotations.reflection;
|
||||
|
||||
import java.beans.Introspector;
|
||||
|
@ -47,6 +45,7 @@ import javax.persistence.AttributeOverride;
|
|||
import javax.persistence.AttributeOverrides;
|
||||
import javax.persistence.Basic;
|
||||
import javax.persistence.CascadeType;
|
||||
import javax.persistence.CollectionTable;
|
||||
import javax.persistence.Column;
|
||||
import javax.persistence.ColumnResult;
|
||||
import javax.persistence.DiscriminatorColumn;
|
||||
|
@ -77,6 +76,12 @@ import javax.persistence.Lob;
|
|||
import javax.persistence.ManyToMany;
|
||||
import javax.persistence.ManyToOne;
|
||||
import javax.persistence.MapKey;
|
||||
import javax.persistence.MapKeyClass;
|
||||
import javax.persistence.MapKeyColumn;
|
||||
import javax.persistence.MapKeyEnumerated;
|
||||
import javax.persistence.MapKeyJoinColumn;
|
||||
import javax.persistence.MapKeyJoinColumns;
|
||||
import javax.persistence.MapKeyTemporal;
|
||||
import javax.persistence.MappedSuperclass;
|
||||
import javax.persistence.NamedNativeQueries;
|
||||
import javax.persistence.NamedNativeQuery;
|
||||
|
@ -85,6 +90,7 @@ import javax.persistence.NamedQuery;
|
|||
import javax.persistence.OneToMany;
|
||||
import javax.persistence.OneToOne;
|
||||
import javax.persistence.OrderBy;
|
||||
import javax.persistence.OrderColumn;
|
||||
import javax.persistence.PostLoad;
|
||||
import javax.persistence.PostPersist;
|
||||
import javax.persistence.PostRemove;
|
||||
|
@ -112,6 +118,7 @@ import javax.persistence.ElementCollection;
|
|||
import org.dom4j.Attribute;
|
||||
import org.dom4j.Element;
|
||||
import org.hibernate.AnnotationException;
|
||||
import org.hibernate.annotations.Cascade;
|
||||
import org.hibernate.annotations.CollectionOfElements;
|
||||
import org.hibernate.annotations.Columns;
|
||||
import org.hibernate.annotations.common.annotationfactory.AnnotationDescriptor;
|
||||
|
@ -205,6 +212,14 @@ public class JPAOverridenAnnotationReader implements AnnotationReader {
|
|||
annotationToXml.put( PostRemove.class, "post-remove" );
|
||||
annotationToXml.put( PostUpdate.class, "post-update" );
|
||||
annotationToXml.put( PostLoad.class, "post-load" );
|
||||
annotationToXml.put( CollectionTable.class, "collection-table" );
|
||||
annotationToXml.put( MapKeyClass.class, "map-key-class" );
|
||||
annotationToXml.put( MapKeyTemporal.class, "map-key-temporal" );
|
||||
annotationToXml.put( MapKeyEnumerated.class, "map-key-enumerated" );
|
||||
annotationToXml.put( MapKeyColumn.class, "map-key-column" );
|
||||
annotationToXml.put( MapKeyJoinColumn.class, "map-key-join-column" );
|
||||
annotationToXml.put( MapKeyJoinColumns.class, "map-key-join-column" );
|
||||
annotationToXml.put( OrderColumn.class, "order-column" );
|
||||
}
|
||||
|
||||
private XMLContext xmlContext;
|
||||
|
@ -658,9 +673,17 @@ public class JPAOverridenAnnotationReader implements AnnotationReader {
|
|||
Annotation annotation = getPrimaryKeyJoinColumns( element, defaults );
|
||||
addIfNotNull( annotationList, annotation );
|
||||
copyBooleanAttribute( ad, element, "optional" );
|
||||
copyBooleanAttribute( ad, element, "orphan-removal" );
|
||||
copyStringAttribute( ad, element, "mapped-by", false );
|
||||
getOrderBy( annotationList, element );
|
||||
getMapKey( annotationList, element );
|
||||
getMapKeyClass( annotationList, element, defaults );
|
||||
getMapKeyColumn(annotationList, element);
|
||||
//TODO: support order-column
|
||||
//TODO: support map-key-temporal
|
||||
//TODO: support map-key-enumerated
|
||||
//TODO: support map-key-attribute-override
|
||||
//TODO: support map-key-join-column
|
||||
annotationList.add( AnnotationFactory.create( ad ) );
|
||||
getAccessType( annotationList, element );
|
||||
}
|
||||
|
@ -701,18 +724,26 @@ public class JPAOverridenAnnotationReader implements AnnotationReader {
|
|||
addIfNotNull( annotationList, annotation );
|
||||
annotation = getJavaAnnotation( Columns.class );
|
||||
addIfNotNull( annotationList, annotation );
|
||||
annotation = getJavaAnnotation( MapKeyClass.class );
|
||||
addIfNotNull( annotationList, annotation );
|
||||
annotation = getJavaAnnotation( MapKeyTemporal.class );
|
||||
addIfNotNull( annotationList, annotation );
|
||||
annotation = getJavaAnnotation( MapKeyEnumerated.class );
|
||||
addIfNotNull( annotationList, annotation );
|
||||
annotation = getJavaAnnotation( MapKeyColumn.class );
|
||||
addIfNotNull( annotationList, annotation );
|
||||
annotation = getJavaAnnotation( MapKeyJoinColumn.class );
|
||||
addIfNotNull( annotationList, annotation );
|
||||
annotation = getJavaAnnotation( MapKeyJoinColumns.class );
|
||||
addIfNotNull( annotationList, annotation );
|
||||
annotation = getJavaAnnotation( OrderColumn.class );
|
||||
addIfNotNull( annotationList, annotation );
|
||||
annotation = getJavaAnnotation( Cascade.class );
|
||||
addIfNotNull( annotationList, annotation );
|
||||
}
|
||||
else if ( isJavaAnnotationPresent( ElementCollection.class ) ) { //JPA2
|
||||
annotation = overridesDefaultsInJoinTable( getJavaAnnotation( ElementCollection.class ), defaults );
|
||||
addIfNotNull( annotationList, annotation );
|
||||
annotation = getJavaAnnotation( JoinColumn.class );
|
||||
addIfNotNull( annotationList, annotation );
|
||||
annotation = getJavaAnnotation( JoinColumns.class );
|
||||
addIfNotNull( annotationList, annotation );
|
||||
annotation = getJavaAnnotation( PrimaryKeyJoinColumn.class );
|
||||
addIfNotNull( annotationList, annotation );
|
||||
annotation = getJavaAnnotation( PrimaryKeyJoinColumns.class );
|
||||
addIfNotNull( annotationList, annotation );
|
||||
annotation = getJavaAnnotation( MapKey.class );
|
||||
addIfNotNull( annotationList, annotation );
|
||||
annotation = getJavaAnnotation( OrderBy.class );
|
||||
|
@ -733,7 +764,21 @@ public class JPAOverridenAnnotationReader implements AnnotationReader {
|
|||
addIfNotNull( annotationList, annotation );
|
||||
annotation = getJavaAnnotation( Column.class );
|
||||
addIfNotNull( annotationList, annotation );
|
||||
annotation = getJavaAnnotation( Columns.class );
|
||||
annotation = getJavaAnnotation( OrderColumn.class );
|
||||
addIfNotNull( annotationList, annotation );
|
||||
annotation = getJavaAnnotation( MapKeyClass.class );
|
||||
addIfNotNull( annotationList, annotation );
|
||||
annotation = getJavaAnnotation( MapKeyTemporal.class );
|
||||
addIfNotNull( annotationList, annotation );
|
||||
annotation = getJavaAnnotation( MapKeyEnumerated.class );
|
||||
addIfNotNull( annotationList, annotation );
|
||||
annotation = getJavaAnnotation( MapKeyColumn.class );
|
||||
addIfNotNull( annotationList, annotation );
|
||||
annotation = getJavaAnnotation( MapKeyJoinColumn.class );
|
||||
addIfNotNull( annotationList, annotation );
|
||||
annotation = getJavaAnnotation( MapKeyJoinColumns.class );
|
||||
addIfNotNull( annotationList, annotation );
|
||||
annotation = getJavaAnnotation( CollectionTable.class );
|
||||
addIfNotNull( annotationList, annotation );
|
||||
}
|
||||
else if ( isJavaAnnotationPresent( CollectionOfElements.class ) ) { //legacy Hibernate
|
||||
|
@ -798,13 +843,32 @@ public class JPAOverridenAnnotationReader implements AnnotationReader {
|
|||
if ( "element-collection".equals( element.getName() ) ) {
|
||||
AnnotationDescriptor ad = new AnnotationDescriptor( ElementCollection.class );
|
||||
addTargetClass( element, ad, "target-class", defaults );
|
||||
getFetchType( ad, element );
|
||||
getOrderBy( annotationList, element );
|
||||
//TODO: support order-column
|
||||
getMapKey( annotationList, element );
|
||||
getMapKeyClass( annotationList, element, defaults );
|
||||
//TODO: support map-key-temporal
|
||||
//TODO: support map-key-enumerated
|
||||
//TODO: support map-key-attribute-override
|
||||
getMapKeyColumn(annotationList, element);
|
||||
//TODO: support map-key-join-column
|
||||
Annotation annotation = getColumn(element.element( "column" ), false, element);
|
||||
addIfNotNull(annotationList, annotation);
|
||||
getTemporal(annotationList, element);
|
||||
getEnumerated(annotationList, element);
|
||||
getLob(annotationList, element);
|
||||
annotation = getAttributeOverrides( element, defaults );
|
||||
addIfNotNull( annotationList, annotation );
|
||||
annotation = getAssociationOverrides( element, defaults );
|
||||
addIfNotNull( annotationList, annotation );
|
||||
getCollectionTable(annotationList, element, defaults);
|
||||
annotationList.add( AnnotationFactory.create( ad ) );
|
||||
|
||||
getAccessType( annotationList, element );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void getOrderBy(List<Annotation> annotationList, Element element) {
|
||||
Element subelement = element != null ? element.element( "order-by" ) : null;
|
||||
if ( subelement != null ) {
|
||||
|
@ -824,6 +888,73 @@ public class JPAOverridenAnnotationReader implements AnnotationReader {
|
|||
annotationList.add( AnnotationFactory.create( ad ) );
|
||||
}
|
||||
}
|
||||
|
||||
private void getMapKeyColumn(List<Annotation> annotationList, Element element) {
|
||||
Element subelement = element != null ? element.element( "map-key-column" ) : null;
|
||||
if ( subelement != null ) {
|
||||
AnnotationDescriptor ad = new AnnotationDescriptor( MapKeyColumn.class );
|
||||
copyStringAttribute( ad, subelement, "name", false );
|
||||
copyBooleanAttribute( ad, subelement, "unique" );
|
||||
copyBooleanAttribute( ad, subelement, "nullable" );
|
||||
copyBooleanAttribute( ad, subelement, "insertable" );
|
||||
copyBooleanAttribute( ad, subelement, "updatable" );
|
||||
copyStringAttribute( ad, subelement, "column-definition", false );
|
||||
copyStringAttribute( ad, subelement, "table", false );
|
||||
copyIntegerAttribute( ad, subelement, "length" );
|
||||
copyIntegerAttribute( ad, subelement, "precision" );
|
||||
copyIntegerAttribute( ad, subelement, "scale" );
|
||||
annotationList.add( AnnotationFactory.create( ad ) );
|
||||
}
|
||||
}
|
||||
|
||||
private void getMapKeyClass(List<Annotation> annotationList, Element element, XMLContext.Default defaults) {
|
||||
String nodeName = "map-key-class";
|
||||
Element subelement = element != null ? element.element( nodeName ) : null;
|
||||
if ( subelement != null ) {
|
||||
String mapKeyClassName = subelement.attributeValue( "class" );
|
||||
AnnotationDescriptor ad = new AnnotationDescriptor( MapKeyClass.class );
|
||||
if ( StringHelper.isNotEmpty( mapKeyClassName ) ) {
|
||||
Class clazz;
|
||||
try {
|
||||
clazz = ReflectHelper.classForName(
|
||||
XMLContext.buildSafeClassName( mapKeyClassName, defaults ),
|
||||
this.getClass()
|
||||
);
|
||||
}
|
||||
catch (ClassNotFoundException e) {
|
||||
throw new AnnotationException(
|
||||
"Unable to find " + element.getPath() + " " + nodeName + ": " + mapKeyClassName, e
|
||||
);
|
||||
}
|
||||
ad.setValue( "value", clazz );
|
||||
}
|
||||
annotationList.add( AnnotationFactory.create( ad ) );
|
||||
}
|
||||
}
|
||||
|
||||
private void getCollectionTable(List<Annotation> annotationList, Element element, XMLContext.Default defaults) {
|
||||
Element subelement = element != null ? element.element( "collection-table" ) : null;
|
||||
if ( subelement != null ) {
|
||||
AnnotationDescriptor annotation = new AnnotationDescriptor( CollectionTable.class );
|
||||
copyStringAttribute( annotation, subelement, "name", false );
|
||||
copyStringAttribute( annotation, subelement, "catalog", false );
|
||||
if ( StringHelper.isNotEmpty( defaults.getCatalog() )
|
||||
&& StringHelper.isEmpty( (String) annotation.valueOf( "catalog" ) ) ) {
|
||||
annotation.setValue( "catalog", defaults.getCatalog() );
|
||||
}
|
||||
copyStringAttribute( annotation, subelement, "schema", false );
|
||||
if ( StringHelper.isNotEmpty( defaults.getSchema() )
|
||||
&& StringHelper.isEmpty( (String) annotation.valueOf( "schema" ) ) ) {
|
||||
annotation.setValue( "schema", defaults.getSchema() );
|
||||
}
|
||||
JoinColumn[] joinColumns = getJoinColumns( subelement, false );
|
||||
if ( joinColumns.length > 0 ) {
|
||||
annotation.setValue( "joinColumns", joinColumns );
|
||||
}
|
||||
buildUniqueConstraints( annotation, subelement );
|
||||
annotationList.add( AnnotationFactory.create( annotation ) );
|
||||
}
|
||||
}
|
||||
|
||||
private void buildJoinColumns(List<Annotation> annotationList, Element element) {
|
||||
JoinColumn[] joinColumns = getJoinColumns( element, false );
|
||||
|
@ -1216,7 +1347,7 @@ public class JPAOverridenAnnotationReader implements AnnotationReader {
|
|||
}
|
||||
|
||||
private AssociationOverrides getAssociationOverrides(Element tree, XMLContext.Default defaults) {
|
||||
List<AssociationOverride> attributes = (List<AssociationOverride>) buildAssociationOverrides( tree );
|
||||
List<AssociationOverride> attributes = buildAssociationOverrides( tree );
|
||||
if ( defaults.canUseJavaAnnotations() ) {
|
||||
AssociationOverride annotation = getJavaAnnotation( AssociationOverride.class );
|
||||
addAssociationOverrideIfNeeded( annotation, attributes );
|
||||
|
@ -1432,7 +1563,7 @@ public class JPAOverridenAnnotationReader implements AnnotationReader {
|
|||
}
|
||||
|
||||
private SqlResultSetMappings getSqlResultSetMappings(Element tree, XMLContext.Default defaults) {
|
||||
List<SqlResultSetMapping> results = (List<SqlResultSetMapping>) buildSqlResultsetMappings( tree, defaults );
|
||||
List<SqlResultSetMapping> results = buildSqlResultsetMappings( tree, defaults );
|
||||
if ( defaults.canUseJavaAnnotations() ) {
|
||||
SqlResultSetMapping annotation = getJavaAnnotation( SqlResultSetMapping.class );
|
||||
addSqlResultsetMappingIfNeeded( annotation, results );
|
||||
|
|
|
@ -23,7 +23,6 @@
|
|||
*/
|
||||
package org.hibernate.engine.jdbc.batch.internal;
|
||||
|
||||
import java.sql.Connection;
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.SQLException;
|
||||
import java.util.LinkedHashMap;
|
||||
|
@ -34,9 +33,8 @@ import org.slf4j.LoggerFactory;
|
|||
|
||||
import org.hibernate.engine.jdbc.batch.spi.Batch;
|
||||
import org.hibernate.engine.jdbc.batch.spi.BatchObserver;
|
||||
import org.hibernate.engine.jdbc.spi.JdbcServices;
|
||||
import org.hibernate.engine.jdbc.spi.LogicalConnectionImplementor;
|
||||
import org.hibernate.engine.jdbc.internal.proxy.ProxyBuilder;
|
||||
import org.hibernate.engine.jdbc.spi.SQLExceptionHelper;
|
||||
import org.hibernate.engine.jdbc.spi.SQLStatementLogger;
|
||||
|
||||
/**
|
||||
* Convenience base class for implementors of the Batch interface.
|
||||
|
@ -46,16 +44,21 @@ import org.hibernate.engine.jdbc.internal.proxy.ProxyBuilder;
|
|||
public abstract class AbstractBatchImpl implements Batch {
|
||||
private static final Logger log = LoggerFactory.getLogger( AbstractBatchImpl.class );
|
||||
|
||||
private final SQLStatementLogger statementLogger;
|
||||
private final SQLExceptionHelper exceptionHelper;
|
||||
private Object key;
|
||||
private LogicalConnectionImplementor logicalConnection;
|
||||
private Connection connectionProxy;
|
||||
private LinkedHashMap<String,PreparedStatement> statements = new LinkedHashMap<String,PreparedStatement>();
|
||||
private LinkedHashSet<BatchObserver> observers = new LinkedHashSet<BatchObserver>();
|
||||
|
||||
protected AbstractBatchImpl(Object key, LogicalConnectionImplementor logicalConnection) {
|
||||
protected AbstractBatchImpl(Object key,
|
||||
SQLStatementLogger statementLogger,
|
||||
SQLExceptionHelper exceptionHelper) {
|
||||
if ( key == null || statementLogger == null || exceptionHelper == null ) {
|
||||
throw new IllegalArgumentException( "key, statementLogger, and exceptionHelper must be non-null." );
|
||||
}
|
||||
this.key = key;
|
||||
this.logicalConnection = logicalConnection;
|
||||
this.connectionProxy = ProxyBuilder.buildConnection( logicalConnection );
|
||||
this.statementLogger = statementLogger;
|
||||
this.exceptionHelper = exceptionHelper;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -67,12 +70,21 @@ public abstract class AbstractBatchImpl implements Batch {
|
|||
protected abstract void doExecuteBatch();
|
||||
|
||||
/**
|
||||
* Convenience access to the underlying JDBC services.
|
||||
* Convenience access to the SQLException helper.
|
||||
*
|
||||
* @return The underlying SQLException helper.
|
||||
*/
|
||||
protected SQLExceptionHelper getSqlExceptionHelper() {
|
||||
return exceptionHelper;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convenience access to the SQL statement logger.
|
||||
*
|
||||
* @return The underlying JDBC services.
|
||||
*/
|
||||
protected JdbcServices getJdbcServices() {
|
||||
return logicalConnection.getJdbcServices();
|
||||
protected SQLStatementLogger getSqlStatementLogger() {
|
||||
return statementLogger;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -101,31 +113,39 @@ public abstract class AbstractBatchImpl implements Batch {
|
|||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public final PreparedStatement getBatchStatement(String sql, boolean callable) {
|
||||
public final PreparedStatement getBatchStatement(Object key, String sql) {
|
||||
checkConsistentBatchKey( key );
|
||||
if ( sql == null ) {
|
||||
throw new IllegalArgumentException( "sql must be non-null." );
|
||||
}
|
||||
PreparedStatement statement = statements.get( sql );
|
||||
if ( statement == null ) {
|
||||
statement = buildBatchStatement( sql, callable );
|
||||
statements.put( sql, statement );
|
||||
}
|
||||
else {
|
||||
log.debug( "reusing batch statement" );
|
||||
getJdbcServices().getSqlStatementLogger().logStatement( sql );
|
||||
}
|
||||
if ( statement != null ) {
|
||||
log.debug( "reusing prepared statement" );
|
||||
statementLogger.logStatement( sql );
|
||||
}
|
||||
return statement;
|
||||
}
|
||||
|
||||
private PreparedStatement buildBatchStatement(String sql, boolean callable) {
|
||||
try {
|
||||
if ( callable ) {
|
||||
return connectionProxy.prepareCall( sql );
|
||||
}
|
||||
else {
|
||||
return connectionProxy.prepareStatement( sql );
|
||||
}
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
// TODO: should this be final???
|
||||
@Override
|
||||
public void addBatchStatement(Object key, String sql, PreparedStatement preparedStatement) {
|
||||
checkConsistentBatchKey( key );
|
||||
if ( sql == null ) {
|
||||
throw new IllegalArgumentException( "sql must be non-null." );
|
||||
}
|
||||
catch ( SQLException sqle ) {
|
||||
log.error( "sqlexception escaped proxy", sqle );
|
||||
throw getJdbcServices().getSqlExceptionHelper().convert( sqle, "could not prepare batch statement", sql );
|
||||
if ( statements.put( sql, preparedStatement ) != null ) {
|
||||
log.error( "PreparedStatement was already in the batch, [" + sql + "]." );
|
||||
}
|
||||
}
|
||||
|
||||
protected void checkConsistentBatchKey(Object key) {
|
||||
if ( ! this.key.equals( key ) ) {
|
||||
throw new IllegalStateException(
|
||||
"specified key ["+ key + "] is different from internal batch key [" + this.key + "]."
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -142,7 +162,7 @@ public abstract class AbstractBatchImpl implements Batch {
|
|||
doExecuteBatch();
|
||||
}
|
||||
finally {
|
||||
releaseStatements();
|
||||
release();
|
||||
}
|
||||
}
|
||||
finally {
|
||||
|
|
|
@ -28,6 +28,8 @@ import org.slf4j.LoggerFactory;
|
|||
|
||||
import org.hibernate.engine.jdbc.batch.spi.Batch;
|
||||
import org.hibernate.engine.jdbc.spi.LogicalConnectionImplementor;
|
||||
import org.hibernate.engine.jdbc.spi.SQLExceptionHelper;
|
||||
import org.hibernate.engine.jdbc.spi.SQLStatementLogger;
|
||||
|
||||
/**
|
||||
* A builder for {@link Batch} instances.
|
||||
|
@ -46,15 +48,17 @@ public class BatchBuilder {
|
|||
this.size = size;
|
||||
}
|
||||
|
||||
public void setSize(int size) {
|
||||
public void setJdbcBatchSize(int size) {
|
||||
this.size = size;
|
||||
}
|
||||
|
||||
public Batch buildBatch(Object key, LogicalConnectionImplementor logicalConnection) {
|
||||
public Batch buildBatch(Object key,
|
||||
SQLStatementLogger statementLogger,
|
||||
SQLExceptionHelper exceptionHelper) {
|
||||
log.trace( "building batch [size={}]", size );
|
||||
return size > 1
|
||||
? new BatchingBatch( key, logicalConnection, size )
|
||||
: new NonBatchingBatch( key, logicalConnection );
|
||||
? new BatchingBatch( key, statementLogger, exceptionHelper, size )
|
||||
: new NonBatchingBatch( key, statementLogger, exceptionHelper );
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -25,18 +25,24 @@ package org.hibernate.engine.jdbc.batch.internal;
|
|||
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.hibernate.AssertionFailure;
|
||||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.engine.jdbc.spi.LogicalConnectionImplementor;
|
||||
import org.hibernate.engine.jdbc.spi.SQLExceptionHelper;
|
||||
import org.hibernate.engine.jdbc.spi.SQLStatementLogger;
|
||||
import org.hibernate.jdbc.Expectation;
|
||||
|
||||
/**
|
||||
* A {@link org.hibernate.engine.jdbc.batch.spi.Batch} implementation which does batching based on a given size. Once the batch size is exceeded, the
|
||||
* batch is implicitly executed.
|
||||
* A {@link org.hibernate.engine.jdbc.batch.spi.Batch} implementation which does
|
||||
* batching based on a given size. Once the batch size is reached for a statement
|
||||
* in the batch, the entire batch is implicitly executed.
|
||||
*
|
||||
* @author Steve Ebersole
|
||||
*/
|
||||
|
@ -44,33 +50,54 @@ public class BatchingBatch extends AbstractBatchImpl {
|
|||
private static final Logger log = LoggerFactory.getLogger( BatchingBatch.class );
|
||||
|
||||
private final int batchSize;
|
||||
private Expectation[] expectations;
|
||||
private int batchPosition;
|
||||
|
||||
public BatchingBatch(Object key, LogicalConnectionImplementor logicalConnection, int batchSize) {
|
||||
super( key, logicalConnection );
|
||||
// TODO: A Map is used for expectations so it is possible to track when a batch
|
||||
// is full (i.e., when the batch for a particular statement exceeds batchSize)
|
||||
// Until HHH-5797 is fixed, there will only be 1 statement in a batch, so it won't
|
||||
// be necessary to track expectations by statement.
|
||||
private Map<String, List<Expectation>> expectationsBySql;
|
||||
private int maxBatchPosition;
|
||||
|
||||
public BatchingBatch(Object key,
|
||||
SQLStatementLogger statementLogger,
|
||||
SQLExceptionHelper exceptionHelper,
|
||||
int batchSize) {
|
||||
super( key, statementLogger, exceptionHelper );
|
||||
this.batchSize = batchSize;
|
||||
this.expectations = new Expectation[ batchSize ];
|
||||
this.expectationsBySql = new HashMap<String, List<Expectation>>();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public void addToBatch(Expectation expectation) {
|
||||
if ( !expectation.canBeBatched() ) {
|
||||
public void addToBatch(Object key, String sql, Expectation expectation) {
|
||||
checkConsistentBatchKey( key );
|
||||
if ( sql == null || expectation == null ) {
|
||||
throw new AssertionFailure( "sql or expection was null." );
|
||||
}
|
||||
if ( ! expectation.canBeBatched() ) {
|
||||
throw new HibernateException( "attempting to batch an operation which cannot be batched" );
|
||||
}
|
||||
for ( Map.Entry<String,PreparedStatement> entry : getStatements().entrySet() ) {
|
||||
try {
|
||||
entry.getValue().addBatch();
|
||||
}
|
||||
catch ( SQLException e ) {
|
||||
log.error( "sqlexception escaped proxy", e );
|
||||
throw getJdbcServices().getSqlExceptionHelper().convert( e, "could not perform addBatch", entry.getKey() );
|
||||
}
|
||||
final PreparedStatement statement = getStatements().get( sql );
|
||||
try {
|
||||
statement.addBatch();
|
||||
}
|
||||
expectations[ batchPosition++ ] = expectation;
|
||||
if ( batchPosition == batchSize ) {
|
||||
catch ( SQLException e ) {
|
||||
log.error( "sqlexception escaped proxy", e );
|
||||
throw getSqlExceptionHelper().convert( e, "could not perform addBatch", sql );
|
||||
}
|
||||
List<Expectation> expectations = expectationsBySql.get( sql );
|
||||
if ( expectations == null ) {
|
||||
expectations = new ArrayList<Expectation>( batchSize );
|
||||
expectationsBySql.put( sql, expectations );
|
||||
}
|
||||
expectations.add( expectation );
|
||||
maxBatchPosition = Math.max( maxBatchPosition, expectations.size() );
|
||||
|
||||
// TODO: When HHH-5797 is fixed the following if-block should probably be moved before
|
||||
// adding the batch to the current statement (to detect that we have finished
|
||||
// with the previous entity).
|
||||
if ( maxBatchPosition == batchSize ) {
|
||||
notifyObserversImplicitExecution();
|
||||
doExecuteBatch();
|
||||
}
|
||||
|
@ -80,46 +107,89 @@ public class BatchingBatch extends AbstractBatchImpl {
|
|||
* {@inheritDoc}
|
||||
*/
|
||||
protected void doExecuteBatch() {
|
||||
if ( batchPosition == 0 ) {
|
||||
if ( maxBatchPosition == 0 ) {
|
||||
log.debug( "no batched statements to execute" );
|
||||
}
|
||||
else {
|
||||
if ( log.isDebugEnabled() ) {
|
||||
log.debug( "Executing batch size: " + batchPosition );
|
||||
log.debug( "Executing {} statements with maximum batch size {} ",
|
||||
getStatements().size(), maxBatchPosition
|
||||
);
|
||||
}
|
||||
|
||||
try {
|
||||
for ( Map.Entry<String,PreparedStatement> entry : getStatements().entrySet() ) {
|
||||
try {
|
||||
final PreparedStatement statement = entry.getValue();
|
||||
checkRowCounts( statement.executeBatch(), statement );
|
||||
}
|
||||
catch ( SQLException e ) {
|
||||
log.error( "sqlexception escaped proxy", e );
|
||||
throw getJdbcServices().getSqlExceptionHelper()
|
||||
.convert( e, "could not perform addBatch", entry.getKey() );
|
||||
}
|
||||
}
|
||||
executeStatements();
|
||||
}
|
||||
catch ( RuntimeException re ) {
|
||||
log.error( "Exception executing batch [{}]", re.getMessage() );
|
||||
throw re;
|
||||
}
|
||||
finally {
|
||||
batchPosition = 0;
|
||||
for ( List<Expectation> expectations : expectationsBySql.values() ) {
|
||||
expectations.clear();
|
||||
}
|
||||
maxBatchPosition = 0;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
private void checkRowCounts(int[] rowCounts, PreparedStatement ps) throws SQLException, HibernateException {
|
||||
private void executeStatements() {
|
||||
for ( Map.Entry<String,PreparedStatement> entry : getStatements().entrySet() ) {
|
||||
final String sql = entry.getKey();
|
||||
final PreparedStatement statement = entry.getValue();
|
||||
final List<Expectation> expectations = expectationsBySql.get( sql );
|
||||
if ( batchSize < expectations.size() ) {
|
||||
throw new IllegalStateException(
|
||||
"Number of expectations [" + expectations.size() +
|
||||
"] is greater than batch size [" + batchSize +
|
||||
"] for statement [" + sql +
|
||||
"]"
|
||||
);
|
||||
}
|
||||
if ( expectations.size() > 0 ) {
|
||||
if ( log.isDebugEnabled() ) {
|
||||
log.debug( "Executing with batch of size {}: {}", expectations.size(), sql );
|
||||
}
|
||||
executeStatement( sql, statement, expectations );
|
||||
expectations.clear();
|
||||
}
|
||||
else {
|
||||
if ( log.isDebugEnabled() ) {
|
||||
log.debug( "Skipped executing because batch size is 0: ", sql );
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void executeStatement(String sql, PreparedStatement ps, List<Expectation> expectations) {
|
||||
try {
|
||||
checkRowCounts( sql, ps.executeBatch(), ps, expectations );
|
||||
}
|
||||
catch ( SQLException e ) {
|
||||
log.error( "sqlexception escaped proxy", e );
|
||||
throw getSqlExceptionHelper()
|
||||
.convert( e, "could not execute statement: " + sql );
|
||||
}
|
||||
}
|
||||
|
||||
private void checkRowCounts(String sql, int[] rowCounts, PreparedStatement ps, List<Expectation> expectations) {
|
||||
int numberOfRowCounts = rowCounts.length;
|
||||
if ( numberOfRowCounts != batchPosition ) {
|
||||
if ( numberOfRowCounts != expectations.size() ) {
|
||||
log.warn( "JDBC driver did not return the expected number of row counts" );
|
||||
}
|
||||
for ( int i = 0; i < numberOfRowCounts; i++ ) {
|
||||
expectations[i].verifyOutcome( rowCounts[i], ps, i );
|
||||
try {
|
||||
for ( int i = 0; i < numberOfRowCounts; i++ ) {
|
||||
expectations.get( i ).verifyOutcome( rowCounts[i], ps, i );
|
||||
}
|
||||
}
|
||||
catch ( SQLException e ) {
|
||||
log.error( "sqlexception escaped proxy", e );
|
||||
throw getSqlExceptionHelper()
|
||||
.convert( e, "row count verification failed for statement: ", sql );
|
||||
}
|
||||
}
|
||||
|
||||
public void release() {
|
||||
expectationsBySql.clear();
|
||||
maxBatchPosition = 0;
|
||||
}
|
||||
}
|
|
@ -25,12 +25,12 @@ package org.hibernate.engine.jdbc.batch.internal;
|
|||
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Map;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.hibernate.engine.jdbc.spi.LogicalConnectionImplementor;
|
||||
import org.hibernate.engine.jdbc.spi.SQLExceptionHelper;
|
||||
import org.hibernate.engine.jdbc.spi.SQLStatementLogger;
|
||||
import org.hibernate.jdbc.Expectation;
|
||||
|
||||
/**
|
||||
|
@ -42,22 +42,26 @@ import org.hibernate.jdbc.Expectation;
|
|||
public class NonBatchingBatch extends AbstractBatchImpl {
|
||||
private static final Logger log = LoggerFactory.getLogger( NonBatchingBatch.class );
|
||||
|
||||
protected NonBatchingBatch(Object key, LogicalConnectionImplementor logicalConnection) {
|
||||
super( key, logicalConnection );
|
||||
protected NonBatchingBatch(Object key,
|
||||
SQLStatementLogger statementLogger,
|
||||
SQLExceptionHelper exceptionHelper) {
|
||||
super( key, statementLogger, exceptionHelper );
|
||||
}
|
||||
|
||||
public void addToBatch(Expectation expectation) {
|
||||
public void addToBatch(Object key, String sql, Expectation expectation) {
|
||||
checkConsistentBatchKey( key );
|
||||
if ( sql == null ) {
|
||||
throw new IllegalArgumentException( "sql must be non-null." );
|
||||
}
|
||||
notifyObserversImplicitExecution();
|
||||
for ( Map.Entry<String,PreparedStatement> entry : getStatements().entrySet() ) {
|
||||
try {
|
||||
final PreparedStatement statement = entry.getValue();
|
||||
final int rowCount = statement.executeUpdate();
|
||||
expectation.verifyOutcome( rowCount, statement, 0 );
|
||||
}
|
||||
catch ( SQLException e ) {
|
||||
log.error( "sqlexception escaped proxy", e );
|
||||
throw getJdbcServices().getSqlExceptionHelper().convert( e, "could not execute batch statement", entry.getKey() );
|
||||
}
|
||||
try {
|
||||
final PreparedStatement statement = getStatements().get( sql );
|
||||
final int rowCount = statement.executeUpdate();
|
||||
expectation.verifyOutcome( rowCount, statement, 0 );
|
||||
}
|
||||
catch ( SQLException e ) {
|
||||
log.error( "sqlexception escaped proxy", e );
|
||||
throw getSqlExceptionHelper().convert( e, "could not execute batch statement", sql );
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -52,20 +52,30 @@ public interface Batch {
|
|||
public void addObserver(BatchObserver observer);
|
||||
|
||||
/**
|
||||
* Get a statement which is part of the batch, creating if necessary (and storing for next time).
|
||||
* Get a statement which is part of the batch.
|
||||
*
|
||||
* @param sql The SQL statement.
|
||||
* @param callable Is the SQL statement callable?
|
||||
* @return The prepared statement instance, representing the SQL statement.
|
||||
* @return the prepared statement representing the SQL statement, if the batch contained it;
|
||||
* null, otherwise.
|
||||
*/
|
||||
public PreparedStatement getBatchStatement(String sql, boolean callable);
|
||||
public PreparedStatement getBatchStatement(Object key, String sql);
|
||||
|
||||
/**
|
||||
* Add a prepared statement to the batch.
|
||||
*
|
||||
* @param sql The SQL statement.
|
||||
*/
|
||||
public void addBatchStatement(Object key, String sql, PreparedStatement preparedStatement);
|
||||
|
||||
|
||||
/**
|
||||
* Indicates completion of the current part of the batch.
|
||||
*
|
||||
* @param key
|
||||
* @param sql
|
||||
* @param expectation The expectation for the part's result.
|
||||
*/
|
||||
public void addToBatch(Expectation expectation);
|
||||
public void addToBatch(Object key, String sql, Expectation expectation);
|
||||
|
||||
/**
|
||||
* Execute this batch.
|
||||
|
|
|
@ -30,7 +30,6 @@ import java.io.ObjectOutputStream;
|
|||
import java.sql.CallableStatement;
|
||||
import java.sql.Connection;
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
|
@ -41,12 +40,11 @@ import org.hibernate.ConnectionReleaseMode;
|
|||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.Interceptor;
|
||||
import org.hibernate.ScrollMode;
|
||||
import org.hibernate.TransactionException;
|
||||
import org.hibernate.engine.SessionFactoryImplementor;
|
||||
import org.hibernate.engine.jdbc.internal.proxy.ProxyBuilder;
|
||||
import org.hibernate.engine.jdbc.batch.internal.BatchBuilder;
|
||||
import org.hibernate.engine.jdbc.batch.spi.Batch;
|
||||
import org.hibernate.engine.jdbc.spi.ConnectionManager;
|
||||
import org.hibernate.engine.jdbc.spi.ConnectionObserver;
|
||||
import org.hibernate.jdbc.Batcher;
|
||||
import org.hibernate.jdbc.Expectation;
|
||||
|
||||
/**
|
||||
|
@ -67,15 +65,13 @@ public class ConnectionManagerImpl implements ConnectionManager {
|
|||
|
||||
// TODO: check if it's ok to change the method names in Callback
|
||||
|
||||
private transient SessionFactoryImplementor factory;
|
||||
private transient Connection proxiedConnection;
|
||||
private transient Interceptor interceptor;
|
||||
|
||||
private final Callback callback;
|
||||
private long transactionTimeout = -1;
|
||||
boolean isTransactionTimeoutSet;
|
||||
|
||||
private transient LogicalConnectionImpl logicalConnection;
|
||||
private transient StatementPreparer statementPreparer;
|
||||
private final transient BatchBuilder batchBuilder;
|
||||
private Batch batch;
|
||||
|
||||
/**
|
||||
* Constructs a ConnectionManager.
|
||||
|
@ -99,8 +95,7 @@ public class ConnectionManagerImpl implements ConnectionManager {
|
|||
suppliedConnection,
|
||||
releaseMode,
|
||||
factory.getJdbcServices(),
|
||||
factory.getStatistics() != null ? factory.getStatisticsImplementor() : null,
|
||||
factory.getSettings().getBatcherFactory()
|
||||
factory.getStatistics() != null ? factory.getStatisticsImplementor() : null
|
||||
)
|
||||
);
|
||||
}
|
||||
|
@ -114,16 +109,12 @@ public class ConnectionManagerImpl implements ConnectionManager {
|
|||
Interceptor interceptor,
|
||||
LogicalConnectionImpl logicalConnection
|
||||
) {
|
||||
this.factory = factory;
|
||||
this.callback = callback;
|
||||
this.interceptor = interceptor;
|
||||
setupConnection( logicalConnection );
|
||||
}
|
||||
|
||||
private void setupConnection(LogicalConnectionImpl logicalConnection) {
|
||||
this.logicalConnection = logicalConnection;
|
||||
this.logicalConnection.addObserver( callback );
|
||||
proxiedConnection = ProxyBuilder.buildConnection( logicalConnection );
|
||||
this.statementPreparer = new StatementPreparer( logicalConnection, factory.getSettings() );
|
||||
this.batchBuilder = factory.getSettings().getBatchBuilder();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -271,7 +262,9 @@ public class ConnectionManagerImpl implements ConnectionManager {
|
|||
log.debug( "transaction completed on session with on_close connection release mode; be sure to close the session to release JDBC resources!" );
|
||||
}
|
||||
}
|
||||
unsetTransactionTimeout();
|
||||
if ( statementPreparer != null ) {
|
||||
statementPreparer.unsetTransactionTimeout();
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isAfterTransactionRelease() {
|
||||
|
@ -282,37 +275,15 @@ public class ConnectionManagerImpl implements ConnectionManager {
|
|||
return logicalConnection.getConnectionReleaseMode() == ConnectionReleaseMode.ON_CLOSE;
|
||||
}
|
||||
|
||||
public boolean isLogicallyConnected() {
|
||||
private boolean isLogicallyConnected() {
|
||||
return logicalConnection != null && logicalConnection.isOpen();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setTransactionTimeout(int seconds) {
|
||||
isTransactionTimeoutSet = true;
|
||||
transactionTimeout = System.currentTimeMillis() / 1000 + seconds;
|
||||
statementPreparer.setTransactionTimeout( seconds );
|
||||
}
|
||||
|
||||
/**
|
||||
* Unset the transaction timeout, called after the end of a
|
||||
* transaction.
|
||||
*/
|
||||
private void unsetTransactionTimeout() {
|
||||
isTransactionTimeoutSet = false;
|
||||
}
|
||||
|
||||
private void setStatementTimeout(PreparedStatement preparedStatement) throws SQLException {
|
||||
if ( isTransactionTimeoutSet ) {
|
||||
int timeout = (int) ( transactionTimeout - ( System.currentTimeMillis() / 1000 ) );
|
||||
if ( timeout <= 0) {
|
||||
throw new TransactionException("transaction timeout expired");
|
||||
}
|
||||
else {
|
||||
preparedStatement.setQueryTimeout(timeout);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* To be called after Session completion. Used to release the JDBC
|
||||
* connection.
|
||||
|
@ -337,6 +308,7 @@ public class ConnectionManagerImpl implements ConnectionManager {
|
|||
if ( ! isLogicallyConnected() ) {
|
||||
throw new IllegalStateException( "cannot manually disconnect because not logically connected." );
|
||||
}
|
||||
releaseBatch();
|
||||
return logicalConnection.manualDisconnect();
|
||||
}
|
||||
|
||||
|
@ -382,10 +354,14 @@ public class ConnectionManagerImpl implements ConnectionManager {
|
|||
}
|
||||
try {
|
||||
log.trace( "performing cleanup" );
|
||||
releaseBatch();
|
||||
statementPreparer.close();
|
||||
Connection c = logicalConnection.close();
|
||||
return c;
|
||||
}
|
||||
finally {
|
||||
batch = null;
|
||||
statementPreparer = null;
|
||||
logicalConnection = null;
|
||||
}
|
||||
}
|
||||
|
@ -412,105 +388,68 @@ public class ConnectionManagerImpl implements ConnectionManager {
|
|||
afterStatement();
|
||||
}
|
||||
|
||||
private abstract class StatementPreparer {
|
||||
private final String sql;
|
||||
StatementPreparer(String sql) {
|
||||
this.sql = getSQL( sql );
|
||||
}
|
||||
public String getSqlToPrepare() {
|
||||
return sql;
|
||||
}
|
||||
abstract PreparedStatement doPrepare() throws SQLException;
|
||||
public void afterPrepare(PreparedStatement preparedStatement) throws SQLException {
|
||||
setStatementTimeout( preparedStatement );
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a non-batchable prepared statement to use for inserting / deleting / updating,
|
||||
* using JDBC3 getGeneratedKeys ({@link java.sql.Connection#prepareStatement(String, int)}).
|
||||
* <p/>
|
||||
* If not explicitly closed via {@link java.sql.PreparedStatement#close()}, it will be
|
||||
* released when the session is closed or disconnected.
|
||||
*/
|
||||
@Override
|
||||
public PreparedStatement prepareStatement(String sql, final int autoGeneratedKeys)
|
||||
throws HibernateException {
|
||||
if ( autoGeneratedKeys == PreparedStatement.RETURN_GENERATED_KEYS ) {
|
||||
checkAutoGeneratedKeysSupportEnabled();
|
||||
}
|
||||
StatementPreparer statementPreparer = new StatementPreparer( sql ) {
|
||||
public PreparedStatement doPrepare() throws SQLException {
|
||||
return proxiedConnection.prepareStatement( getSqlToPrepare(), autoGeneratedKeys );
|
||||
}
|
||||
};
|
||||
return prepareStatement( statementPreparer, true );
|
||||
executeBatch();
|
||||
return statementPreparer.prepareStatement( getSQL( sql ), autoGeneratedKeys );
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a non-batchable prepared statement to use for inserting / deleting / updating.
|
||||
* using JDBC3 getGeneratedKeys ({@link java.sql.Connection#prepareStatement(String, String[])}).
|
||||
* <p/>
|
||||
* If not explicitly closed via {@link java.sql.PreparedStatement#close()}, it will be
|
||||
* released when the session is closed or disconnected.
|
||||
*/
|
||||
@Override
|
||||
public PreparedStatement prepareStatement(String sql, final String[] columnNames) {
|
||||
checkAutoGeneratedKeysSupportEnabled();
|
||||
StatementPreparer statementPreparer = new StatementPreparer( sql ) {
|
||||
public PreparedStatement doPrepare() throws SQLException {
|
||||
return proxiedConnection.prepareStatement( getSqlToPrepare(), columnNames );
|
||||
}
|
||||
};
|
||||
return prepareStatement( statementPreparer, true );
|
||||
}
|
||||
|
||||
private void checkAutoGeneratedKeysSupportEnabled() {
|
||||
if ( ! factory.getSettings().isGetGeneratedKeysEnabled() ) {
|
||||
throw new AssertionFailure("getGeneratedKeys() support is not enabled");
|
||||
}
|
||||
executeBatch();
|
||||
return statementPreparer.prepareStatement( getSQL( sql ), columnNames );
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a non-batchable prepared statement to use for selecting. Does not
|
||||
* result in execution of the current batch.
|
||||
* <p/>
|
||||
* If not explicitly closed via {@link java.sql.PreparedStatement#close()},
|
||||
* it will be released when the session is closed or disconnected.
|
||||
*/
|
||||
@Override
|
||||
public PreparedStatement prepareSelectStatement(String sql) {
|
||||
return prepareStatement( sql, false, false );
|
||||
return statementPreparer.prepareStatement( getSQL( sql ), false );
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a non-batchable prepared statement to use for inserting / deleting / updating.
|
||||
* <p/>
|
||||
* If not explicitly closed via {@link java.sql.PreparedStatement#close()}, it will be
|
||||
* released when the session is closed or disconnected.
|
||||
*/
|
||||
@Override
|
||||
public PreparedStatement prepareStatement(String sql, final boolean isCallable) {
|
||||
return prepareStatement( sql, isCallable, true );
|
||||
executeBatch();
|
||||
return statementPreparer.prepareStatement( getSQL( sql ), isCallable );
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a non-batchable callable statement to use for inserting / deleting / updating.
|
||||
* <p/>
|
||||
* If not explicitly closed via {@link java.sql.PreparedStatement#close()}, it will be
|
||||
* released when the session is closed or disconnected.
|
||||
*/
|
||||
@Override
|
||||
public CallableStatement prepareCallableStatement(String sql) {
|
||||
executeBatch();
|
||||
log.trace("preparing callable statement");
|
||||
return CallableStatement.class.cast( prepareStatement( sql, true, true ) );
|
||||
}
|
||||
|
||||
public PreparedStatement prepareStatement(String sql, final boolean isCallable, boolean forceExecuteBatch) {
|
||||
StatementPreparer statementPreparer = new StatementPreparer( sql ) {
|
||||
public PreparedStatement doPrepare() throws SQLException {
|
||||
return prepareStatementInternal( getSqlToPrepare(), isCallable );
|
||||
}
|
||||
};
|
||||
return prepareStatement( statementPreparer, forceExecuteBatch );
|
||||
}
|
||||
|
||||
private PreparedStatement prepareStatementInternal(String sql, boolean isCallable) throws SQLException {
|
||||
return isCallable ?
|
||||
proxiedConnection.prepareCall( sql ) :
|
||||
proxiedConnection.prepareStatement( sql );
|
||||
}
|
||||
|
||||
private PreparedStatement prepareScrollableStatementInternal(String sql,
|
||||
ScrollMode scrollMode,
|
||||
boolean isCallable) throws SQLException {
|
||||
return isCallable ?
|
||||
proxiedConnection.prepareCall(
|
||||
sql, scrollMode.toResultSetType(), ResultSet.CONCUR_READ_ONLY
|
||||
) :
|
||||
proxiedConnection.prepareStatement(
|
||||
sql, scrollMode.toResultSetType(), ResultSet.CONCUR_READ_ONLY
|
||||
);
|
||||
return CallableStatement.class.cast( statementPreparer.prepareStatement( getSQL( sql ), true ) );
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -518,105 +457,97 @@ public class ConnectionManagerImpl implements ConnectionManager {
|
|||
* (might be called many times before a single call to <tt>executeBatch()</tt>).
|
||||
* After setting parameters, call <tt>addToBatch</tt> - do not execute the
|
||||
* statement explicitly.
|
||||
* @see org.hibernate.jdbc.Batcher#addToBatch
|
||||
* @see org.hibernate.engine.jdbc.batch.spi.Batch#addToBatch
|
||||
* <p/>
|
||||
* If not explicitly closed via {@link java.sql.PreparedStatement#close()}, it will be
|
||||
* released when the session is closed or disconnected.
|
||||
*/
|
||||
public PreparedStatement prepareBatchStatement(String sql, boolean isCallable) {
|
||||
String batchUpdateSQL = getSQL( sql );
|
||||
|
||||
PreparedStatement batchUpdate = getBatcher().getStatement( batchUpdateSQL );
|
||||
if ( batchUpdate == null ) {
|
||||
batchUpdate = prepareStatement( batchUpdateSQL, isCallable, true ); // calls executeBatch()
|
||||
getBatcher().setStatement( batchUpdateSQL, batchUpdate );
|
||||
@Override
|
||||
public PreparedStatement prepareBatchStatement(Object key, String sql, boolean isCallable) {
|
||||
if ( key == null ) {
|
||||
throw new IllegalArgumentException( "batch key must be non-null." );
|
||||
}
|
||||
else {
|
||||
log.debug( "reusing prepared statement" );
|
||||
factory.getJdbcServices().getSqlStatementLogger().logStatement( batchUpdateSQL );
|
||||
String actualSQL = getSQL( sql );
|
||||
PreparedStatement batchUpdate = null;
|
||||
if ( batch != null ) {
|
||||
if ( key.equals( batch.getKey() ) ) {
|
||||
batchUpdate = batch.getBatchStatement( key, actualSQL );
|
||||
}
|
||||
else {
|
||||
batch.execute();
|
||||
batch = null;
|
||||
}
|
||||
}
|
||||
if ( batch == null ) {
|
||||
batch = batchBuilder.buildBatch(
|
||||
key,
|
||||
logicalConnection.getJdbcServices().getSqlStatementLogger(),
|
||||
logicalConnection.getJdbcServices().getSqlExceptionHelper()
|
||||
);
|
||||
}
|
||||
if ( batchUpdate == null ) {
|
||||
batchUpdate = statementPreparer.prepareStatement( actualSQL, isCallable );
|
||||
batch.addBatchStatement( key, actualSQL, batchUpdate );
|
||||
}
|
||||
return batchUpdate;
|
||||
}
|
||||
|
||||
private Batcher getBatcher() {
|
||||
return logicalConnection.getBatcher();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a prepared statement for use in loading / querying. If not explicitly
|
||||
* released by <tt>closeQueryStatement()</tt>, it will be released when the
|
||||
* session is closed or disconnected.
|
||||
* Get a prepared statement for use in loading / querying. Does not
|
||||
* result in execution of the current batch.
|
||||
* <p/>
|
||||
* If not explicitly closed via {@link java.sql.PreparedStatement#close()},
|
||||
* it will be released when the session is closed or disconnected.
|
||||
*/
|
||||
@Override
|
||||
public PreparedStatement prepareQueryStatement(
|
||||
String sql,
|
||||
final boolean isScrollable,
|
||||
final ScrollMode scrollMode,
|
||||
final boolean isCallable
|
||||
) {
|
||||
if ( isScrollable && ! factory.getSettings().isScrollableResultSetsEnabled() ) {
|
||||
throw new AssertionFailure("scrollable result sets are not enabled");
|
||||
}
|
||||
StatementPreparer statementPreparer = new StatementPreparer( sql ) {
|
||||
public PreparedStatement doPrepare() throws SQLException {
|
||||
PreparedStatement ps =
|
||||
isScrollable ?
|
||||
prepareScrollableStatementInternal( getSqlToPrepare(), scrollMode, isCallable ) :
|
||||
prepareStatementInternal( getSqlToPrepare(), isCallable )
|
||||
;
|
||||
return ps;
|
||||
}
|
||||
public void afterPrepare(PreparedStatement preparedStatement) throws SQLException {
|
||||
super.afterPrepare( preparedStatement );
|
||||
setStatementFetchSize( preparedStatement, getSqlToPrepare() );
|
||||
logicalConnection.getResourceRegistry().registerLastQuery( preparedStatement );
|
||||
}
|
||||
};
|
||||
return prepareStatement( statementPreparer, false );
|
||||
}
|
||||
|
||||
private void setStatementFetchSize(PreparedStatement statement, String sql) throws SQLException {
|
||||
if ( factory.getSettings().getJdbcFetchSize() != null ) {
|
||||
statement.setFetchSize( factory.getSettings().getJdbcFetchSize() );
|
||||
}
|
||||
}
|
||||
|
||||
private PreparedStatement prepareStatement(StatementPreparer preparer, boolean forceExecuteBatch) {
|
||||
if ( forceExecuteBatch ) {
|
||||
executeBatch();
|
||||
}
|
||||
try {
|
||||
PreparedStatement ps = preparer.doPrepare();
|
||||
preparer.afterPrepare( ps );
|
||||
return ps;
|
||||
}
|
||||
catch ( SQLException sqle ) {
|
||||
log.error( "sqlexception escaped proxy", sqle );
|
||||
throw logicalConnection.getJdbcServices().getSqlExceptionHelper().convert(
|
||||
sqle, "could not prepare statement", preparer.getSqlToPrepare()
|
||||
);
|
||||
}
|
||||
final boolean isCallable) {
|
||||
PreparedStatement ps = (
|
||||
isScrollable ?
|
||||
statementPreparer.prepareScrollableQueryStatement(
|
||||
getSQL( sql ), scrollMode, isCallable
|
||||
) :
|
||||
statementPreparer.prepareQueryStatement(
|
||||
getSQL( sql ), isCallable
|
||||
)
|
||||
);
|
||||
logicalConnection.getResourceRegistry().registerLastQuery( ps );
|
||||
return ps;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cancel the current query statement
|
||||
*/
|
||||
@Override
|
||||
public void cancelLastQuery() throws HibernateException {
|
||||
logicalConnection.getResourceRegistry().cancelLastQuery();
|
||||
}
|
||||
|
||||
public void abortBatch(SQLException sqle) {
|
||||
getBatcher().abortBatch( sqle );
|
||||
}
|
||||
|
||||
public void addToBatch(Expectation expectation ) {
|
||||
try {
|
||||
getBatcher().addToBatch( expectation );
|
||||
}
|
||||
catch (SQLException sqle) {
|
||||
throw logicalConnection.getJdbcServices().getSqlExceptionHelper().convert(
|
||||
sqle, "could not add to batch statement" );
|
||||
}
|
||||
@Override
|
||||
public void addToBatch(Object batchKey, String sql, Expectation expectation) {
|
||||
batch.addToBatch( batchKey, sql, expectation );
|
||||
}
|
||||
|
||||
@Override
|
||||
public void executeBatch() throws HibernateException {
|
||||
getBatcher().executeBatch();
|
||||
if ( batch != null ) {
|
||||
batch.execute();
|
||||
batch.release(); // needed?
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void abortBatch() {
|
||||
releaseBatch();
|
||||
}
|
||||
|
||||
private void releaseBatch() {
|
||||
if ( batch != null ) {
|
||||
batch.release();
|
||||
}
|
||||
}
|
||||
|
||||
private String getSQL(String sql) {
|
||||
|
@ -673,8 +604,7 @@ public class ConnectionManagerImpl implements ConnectionManager {
|
|||
ois,
|
||||
factory.getJdbcServices(),
|
||||
factory.getStatistics() != null ? factory.getStatisticsImplementor() : null,
|
||||
connectionReleaseMode,
|
||||
factory.getSettings().getBatcherFactory()
|
||||
connectionReleaseMode
|
||||
)
|
||||
);
|
||||
}
|
||||
|
|
|
@ -23,14 +23,11 @@
|
|||
*/
|
||||
package org.hibernate.engine.jdbc.internal;
|
||||
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.Statement;
|
||||
import java.util.ConcurrentModificationException;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
|
@ -42,8 +39,6 @@ import org.hibernate.engine.jdbc.spi.JdbcWrapper;
|
|||
import org.hibernate.engine.jdbc.spi.SQLExceptionHelper;
|
||||
import org.hibernate.engine.jdbc.spi.JdbcResourceRegistry;
|
||||
import org.hibernate.engine.jdbc.spi.InvalidatableWrapper;
|
||||
import org.hibernate.jdbc.Batcher;
|
||||
import org.hibernate.jdbc.BatcherFactory;
|
||||
|
||||
/**
|
||||
* Standard implementation of the {@link org.hibernate.engine.jdbc.spi.JdbcResourceRegistry} contract
|
||||
|
@ -56,13 +51,11 @@ public class JdbcResourceRegistryImpl implements JdbcResourceRegistry {
|
|||
private final HashMap<Statement,Set<ResultSet>> xref = new HashMap<Statement,Set<ResultSet>>();
|
||||
private final Set<ResultSet> unassociatedResultSets = new HashSet<ResultSet>();
|
||||
private final SQLExceptionHelper exceptionHelper;
|
||||
private final Batcher batcher;
|
||||
|
||||
private Statement lastQuery;
|
||||
|
||||
public JdbcResourceRegistryImpl(SQLExceptionHelper exceptionHelper, BatcherFactory batcherFactory) {
|
||||
public JdbcResourceRegistryImpl(SQLExceptionHelper exceptionHelper) {
|
||||
this.exceptionHelper = exceptionHelper;
|
||||
this.batcher = batcherFactory.createBatcher( exceptionHelper );
|
||||
}
|
||||
|
||||
public void register(Statement statement) {
|
||||
|
@ -73,10 +66,6 @@ public class JdbcResourceRegistryImpl implements JdbcResourceRegistry {
|
|||
xref.put( statement, null );
|
||||
}
|
||||
|
||||
public Batcher getBatcher() {
|
||||
return batcher;
|
||||
}
|
||||
|
||||
@SuppressWarnings({ "unchecked" })
|
||||
public void registerLastQuery(Statement statement) {
|
||||
log.trace( "registering last query statement [{}]", statement );
|
||||
|
@ -183,7 +172,6 @@ public class JdbcResourceRegistryImpl implements JdbcResourceRegistry {
|
|||
}
|
||||
|
||||
private void cleanup() {
|
||||
batcher.closeStatements();
|
||||
for ( Map.Entry<Statement,Set<ResultSet>> entry : xref.entrySet() ) {
|
||||
if ( entry.getValue() != null ) {
|
||||
for ( ResultSet resultSet : entry.getValue() ) {
|
||||
|
|
|
@ -41,8 +41,6 @@ import org.hibernate.engine.jdbc.spi.JdbcResourceRegistry;
|
|||
import org.hibernate.engine.jdbc.spi.JdbcServices;
|
||||
import org.hibernate.engine.jdbc.spi.ConnectionObserver;
|
||||
import org.hibernate.engine.jdbc.spi.LogicalConnectionImplementor;
|
||||
import org.hibernate.jdbc.Batcher;
|
||||
import org.hibernate.jdbc.BatcherFactory;
|
||||
import org.hibernate.jdbc.BorrowedConnectionProxy;
|
||||
import org.hibernate.stat.StatisticsImplementor;
|
||||
|
||||
|
@ -60,7 +58,7 @@ public class LogicalConnectionImpl implements LogicalConnectionImplementor {
|
|||
private final ConnectionReleaseMode connectionReleaseMode;
|
||||
private final JdbcServices jdbcServices;
|
||||
private final StatisticsImplementor statisticsImplementor;
|
||||
private final JdbcResourceRegistryImpl jdbcResourceRegistry;
|
||||
private final JdbcResourceRegistry jdbcResourceRegistry;
|
||||
private final List<ConnectionObserver> observers = new ArrayList<ConnectionObserver>();
|
||||
|
||||
private boolean releasesEnabled = true;
|
||||
|
@ -72,31 +70,20 @@ public class LogicalConnectionImpl implements LogicalConnectionImplementor {
|
|||
public LogicalConnectionImpl(Connection userSuppliedConnection,
|
||||
ConnectionReleaseMode connectionReleaseMode,
|
||||
JdbcServices jdbcServices,
|
||||
StatisticsImplementor statisticsImplementor,
|
||||
BatcherFactory batcherFactory
|
||||
StatisticsImplementor statisticsImplementor
|
||||
) {
|
||||
this.jdbcServices = jdbcServices;
|
||||
this.statisticsImplementor = statisticsImplementor;
|
||||
this( connectionReleaseMode,
|
||||
jdbcServices,
|
||||
statisticsImplementor,
|
||||
userSuppliedConnection != null,
|
||||
false
|
||||
);
|
||||
this.physicalConnection = userSuppliedConnection;
|
||||
this.connectionReleaseMode =
|
||||
determineConnectionReleaseMode(
|
||||
jdbcServices, userSuppliedConnection != null, connectionReleaseMode
|
||||
);
|
||||
this.jdbcResourceRegistry =
|
||||
new JdbcResourceRegistryImpl(
|
||||
getJdbcServices().getSqlExceptionHelper(),
|
||||
batcherFactory
|
||||
);
|
||||
|
||||
this.isUserSuppliedConnection = ( userSuppliedConnection != null );
|
||||
this.isClosed = false;
|
||||
}
|
||||
|
||||
// used for deserialization
|
||||
private LogicalConnectionImpl(ConnectionReleaseMode connectionReleaseMode,
|
||||
JdbcServices jdbcServices,
|
||||
StatisticsImplementor statisticsImplementor,
|
||||
BatcherFactory batcherFactory,
|
||||
boolean isUserSuppliedConnection,
|
||||
boolean isClosed) {
|
||||
this.connectionReleaseMode = determineConnectionReleaseMode(
|
||||
|
@ -105,10 +92,7 @@ public class LogicalConnectionImpl implements LogicalConnectionImplementor {
|
|||
this.jdbcServices = jdbcServices;
|
||||
this.statisticsImplementor = statisticsImplementor;
|
||||
this.jdbcResourceRegistry =
|
||||
new JdbcResourceRegistryImpl(
|
||||
getJdbcServices().getSqlExceptionHelper(),
|
||||
batcherFactory
|
||||
);
|
||||
new JdbcResourceRegistryImpl( getJdbcServices().getSqlExceptionHelper() );
|
||||
|
||||
this.isUserSuppliedConnection = isUserSuppliedConnection;
|
||||
this.isClosed = isClosed;
|
||||
|
@ -230,10 +214,6 @@ public class LogicalConnectionImpl implements LogicalConnectionImplementor {
|
|||
return connectionReleaseMode;
|
||||
}
|
||||
|
||||
public Batcher getBatcher() {
|
||||
return jdbcResourceRegistry.getBatcher();
|
||||
}
|
||||
|
||||
public boolean hasBorrowedConnection() {
|
||||
return borrowedConnection != null;
|
||||
}
|
||||
|
@ -436,14 +416,12 @@ public class LogicalConnectionImpl implements LogicalConnectionImplementor {
|
|||
public static LogicalConnectionImpl deserialize(ObjectInputStream ois,
|
||||
JdbcServices jdbcServices,
|
||||
StatisticsImplementor statisticsImplementor,
|
||||
ConnectionReleaseMode connectionReleaseMode,
|
||||
BatcherFactory batcherFactory
|
||||
ConnectionReleaseMode connectionReleaseMode
|
||||
) throws IOException {
|
||||
return new LogicalConnectionImpl(
|
||||
connectionReleaseMode,
|
||||
jdbcServices,
|
||||
statisticsImplementor,
|
||||
batcherFactory,
|
||||
ois.readBoolean(),
|
||||
ois.readBoolean()
|
||||
);
|
||||
|
|
|
@ -0,0 +1,276 @@
|
|||
/*
|
||||
* Hibernate, Relational Persistence for Idiomatic Java
|
||||
*
|
||||
* Copyright (c) 2010, Red Hat Inc. or third-party contributors as
|
||||
* indicated by the @author tags or express copyright attribution
|
||||
* statements applied by the authors. All third-party contributions are
|
||||
* distributed under license by Red Hat Inc.
|
||||
*
|
||||
* This copyrighted material is made available to anyone wishing to use, modify,
|
||||
* copy, or redistribute it subject to the terms and conditions of the GNU
|
||||
* Lesser General Public License, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
||||
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
* for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with this distribution; if not, write to:
|
||||
* Free Software Foundation, Inc.
|
||||
* 51 Franklin Street, Fifth Floor
|
||||
* Boston, MA 02110-1301 USA
|
||||
*/
|
||||
package org.hibernate.engine.jdbc.internal;
|
||||
|
||||
import java.sql.Connection;
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.Statement;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.hibernate.AssertionFailure;
|
||||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.ScrollMode;
|
||||
import org.hibernate.TransactionException;
|
||||
import org.hibernate.cfg.Settings;
|
||||
import org.hibernate.engine.jdbc.internal.proxy.ProxyBuilder;
|
||||
import org.hibernate.engine.jdbc.spi.LogicalConnectionImplementor;
|
||||
import org.hibernate.engine.jdbc.spi.SQLExceptionHelper;
|
||||
|
||||
/**
|
||||
* Prepares statements.
|
||||
*
|
||||
* @author Gail Badner
|
||||
*/
|
||||
public class StatementPreparer {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger( StatementPreparer.class );
|
||||
|
||||
// TODO: Move JDBC settings into a different object...
|
||||
private final Settings settings;
|
||||
private final Connection proxiedConnection;
|
||||
private final SQLExceptionHelper sqlExceptionHelper;
|
||||
|
||||
private long transactionTimeout = -1;
|
||||
boolean isTransactionTimeoutSet;
|
||||
|
||||
/**
|
||||
* Constructs a StatementPreparer object
|
||||
* @param logicalConnection - the logical connection
|
||||
* @param settings - contains settings configured for preparing statements
|
||||
*/
|
||||
public StatementPreparer(LogicalConnectionImplementor logicalConnection, Settings settings) {
|
||||
this.settings = settings;
|
||||
proxiedConnection = ProxyBuilder.buildConnection( logicalConnection );
|
||||
sqlExceptionHelper = logicalConnection.getJdbcServices().getSqlExceptionHelper();
|
||||
}
|
||||
|
||||
private abstract class StatementPreparation {
|
||||
private final String sql;
|
||||
protected abstract PreparedStatement doPrepare() throws SQLException;
|
||||
public StatementPreparation(String sql) {
|
||||
this.sql = sql;
|
||||
}
|
||||
public String getSql() {
|
||||
return sql;
|
||||
}
|
||||
public void postProcess(PreparedStatement preparedStatement) throws SQLException {
|
||||
setStatementTimeout( preparedStatement );
|
||||
}
|
||||
public PreparedStatement prepareAndPostProcess() {
|
||||
try {
|
||||
PreparedStatement ps = doPrepare();
|
||||
postProcess( ps );
|
||||
return ps;
|
||||
}
|
||||
catch ( SQLException sqle ) {
|
||||
log.error( "sqlexception escaped proxy", sqle );
|
||||
throw sqlExceptionHelper.convert(
|
||||
sqle, "could not prepare statement", sql
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private abstract class QueryStatementPreparation extends StatementPreparation {
|
||||
QueryStatementPreparation(String sql) {
|
||||
super( sql );
|
||||
}
|
||||
public void postProcess(PreparedStatement preparedStatement) throws SQLException {
|
||||
super.postProcess( preparedStatement );
|
||||
setStatementFetchSize( preparedStatement );
|
||||
}
|
||||
}
|
||||
|
||||
public void close() {
|
||||
try {
|
||||
proxiedConnection.close();
|
||||
}
|
||||
catch (SQLException sqle) {
|
||||
log.error( "sqlexception escaped proxy", sqle );
|
||||
throw sqlExceptionHelper.convert( sqle, "could not close connection proxy" );
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepare a statement. If configured, the query timeout is set.
|
||||
* <p/>
|
||||
* If not explicitly closed via {@link java.sql.PreparedStatement#close()},
|
||||
* it will be released when the session is closed or disconnected.
|
||||
*
|
||||
* @param sql - the SQL for the statement to be prepared
|
||||
* @param isCallable - true, if a callable statement is to be prepared
|
||||
* @return the prepared statement
|
||||
*/
|
||||
public PreparedStatement prepareStatement(String sql, final boolean isCallable) {
|
||||
StatementPreparation statementPreparation = new StatementPreparation( sql ) {
|
||||
public PreparedStatement doPrepare() throws SQLException {
|
||||
return isCallable ?
|
||||
proxiedConnection.prepareCall( getSql() ) :
|
||||
proxiedConnection.prepareStatement( getSql() );
|
||||
}
|
||||
};
|
||||
return statementPreparation.prepareAndPostProcess();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a prepared statement to use for inserting / deleting / updating,
|
||||
* using JDBC3 getGeneratedKeys ({@link java.sql.Connection#prepareStatement(String, int)}).
|
||||
* If configured, the query timeout is set.
|
||||
* <p/>
|
||||
* If not explicitly closed via {@link java.sql.PreparedStatement#close()},
|
||||
* it will be released when the session is closed or disconnected.
|
||||
|
||||
* @param sql - the SQL for the statement to be prepared
|
||||
* @param autoGeneratedKeys - a flag indicating whether auto-generated
|
||||
* keys should be returned; one of
|
||||
* <code>PreparedStatement.RETURN_GENERATED_KEYS</code> or
|
||||
* <code>Statement.NO_GENERATED_KEYS</code>
|
||||
* @return the prepared statement
|
||||
*/
|
||||
public PreparedStatement prepareStatement(String sql, final int autoGeneratedKeys)
|
||||
throws HibernateException {
|
||||
if ( autoGeneratedKeys == PreparedStatement.RETURN_GENERATED_KEYS ) {
|
||||
checkAutoGeneratedKeysSupportEnabled();
|
||||
}
|
||||
StatementPreparation statementPreparation = new StatementPreparation( sql ) {
|
||||
public PreparedStatement doPrepare() throws SQLException {
|
||||
return proxiedConnection.prepareStatement( getSql(), autoGeneratedKeys );
|
||||
}
|
||||
};
|
||||
return statementPreparation.prepareAndPostProcess();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a prepared statement to use for inserting / deleting / updating.
|
||||
* using JDBC3 getGeneratedKeys ({@link java.sql.Connection#prepareStatement(String, String[])}).
|
||||
* If configured, the query timeout is set.
|
||||
* <p/>
|
||||
* If not explicitly closed via {@link java.sql.PreparedStatement#close()},
|
||||
* it will be released when the session is closed or disconnected.
|
||||
*/
|
||||
public PreparedStatement prepareStatement(String sql, final String[] columnNames) {
|
||||
checkAutoGeneratedKeysSupportEnabled();
|
||||
StatementPreparation preparation = new StatementPreparation( sql ) {
|
||||
public PreparedStatement doPrepare() throws SQLException {
|
||||
return proxiedConnection.prepareStatement( getSql(), columnNames );
|
||||
}
|
||||
};
|
||||
return preparation.prepareAndPostProcess();
|
||||
}
|
||||
|
||||
private void checkAutoGeneratedKeysSupportEnabled() {
|
||||
if ( ! settings.isGetGeneratedKeysEnabled() ) {
|
||||
throw new AssertionFailure("getGeneratedKeys() support is not enabled");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a prepared statement for use in loading / querying.
|
||||
* If configured, the query timeout and statement fetch size are set.
|
||||
* <p/>
|
||||
* If not explicitly closed via {@link java.sql.PreparedStatement#close()},
|
||||
* it will be released when the session is closed or disconnected.
|
||||
*/
|
||||
public PreparedStatement prepareQueryStatement(
|
||||
String sql,
|
||||
final boolean isCallable
|
||||
) {
|
||||
StatementPreparation prep = new QueryStatementPreparation( sql ) {
|
||||
public PreparedStatement doPrepare() throws SQLException {
|
||||
return isCallable ?
|
||||
proxiedConnection.prepareCall( getSql() ) :
|
||||
proxiedConnection.prepareStatement( getSql() );
|
||||
}
|
||||
};
|
||||
return prep.prepareAndPostProcess();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a scrollable prepared statement for use in loading / querying.
|
||||
* If configured, the query timeout and statement fetch size are set.
|
||||
* <p/>
|
||||
* If not explicitly closed via {@link java.sql.PreparedStatement#close()},
|
||||
* it will be released when the session is closed or disconnected.
|
||||
*/
|
||||
public PreparedStatement prepareScrollableQueryStatement(
|
||||
String sql,
|
||||
final ScrollMode scrollMode,
|
||||
final boolean isCallable
|
||||
) {
|
||||
if ( ! settings.isScrollableResultSetsEnabled() ) {
|
||||
throw new AssertionFailure("scrollable result sets are not enabled");
|
||||
}
|
||||
StatementPreparation prep = new QueryStatementPreparation( sql ) {
|
||||
public PreparedStatement doPrepare() throws SQLException {
|
||||
return isCallable ?
|
||||
proxiedConnection.prepareCall(
|
||||
getSql(), scrollMode.toResultSetType(), ResultSet.CONCUR_READ_ONLY
|
||||
) :
|
||||
proxiedConnection.prepareStatement(
|
||||
getSql(), scrollMode.toResultSetType(), ResultSet.CONCUR_READ_ONLY
|
||||
);
|
||||
}
|
||||
};
|
||||
return prep.prepareAndPostProcess();
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the transaction timeout.
|
||||
* @param seconds - number of seconds until the the transaction times out.
|
||||
*/
|
||||
public void setTransactionTimeout(int seconds) {
|
||||
isTransactionTimeoutSet = true;
|
||||
transactionTimeout = System.currentTimeMillis() / 1000 + seconds;
|
||||
}
|
||||
|
||||
/**
|
||||
* Unset the transaction timeout, called after the end of a
|
||||
* transaction.
|
||||
*/
|
||||
public void unsetTransactionTimeout() {
|
||||
isTransactionTimeoutSet = false;
|
||||
}
|
||||
|
||||
private void setStatementTimeout(PreparedStatement preparedStatement) throws SQLException {
|
||||
if ( isTransactionTimeoutSet ) {
|
||||
int timeout = (int) ( transactionTimeout - ( System.currentTimeMillis() / 1000 ) );
|
||||
if ( timeout <= 0) {
|
||||
throw new TransactionException("transaction timeout expired");
|
||||
}
|
||||
else {
|
||||
preparedStatement.setQueryTimeout(timeout);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void setStatementFetchSize(PreparedStatement statement) throws SQLException {
|
||||
if ( settings.getJdbcFetchSize() != null ) {
|
||||
statement.setFetchSize( settings.getJdbcFetchSize() );
|
||||
}
|
||||
}
|
||||
}
|
|
@ -26,6 +26,10 @@ package org.hibernate.engine.jdbc.internal.proxy;
|
|||
import java.lang.reflect.Method;
|
||||
import java.sql.Connection;
|
||||
import java.sql.Statement;
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* Invocation handler for {@link java.sql.PreparedStatement} proxies
|
||||
|
@ -33,6 +37,8 @@ import java.sql.Statement;
|
|||
* @author Steve Ebersole
|
||||
*/
|
||||
public class PreparedStatementProxyHandler extends AbstractStatementProxyHandler {
|
||||
private static final Logger log = LoggerFactory.getLogger( ConnectionProxyHandler.class );
|
||||
|
||||
private final String sql;
|
||||
|
||||
protected PreparedStatementProxyHandler(
|
||||
|
@ -63,6 +69,7 @@ public class PreparedStatementProxyHandler extends AbstractStatementProxyHandler
|
|||
}
|
||||
|
||||
private void journalParameterBind(Method method, Object[] args) {
|
||||
log.trace( "binding via {}: []", method.getName(), Arrays.asList( args ) );
|
||||
}
|
||||
|
||||
private boolean isExecution(Method method) {
|
||||
|
|
|
@ -79,6 +79,10 @@ public interface ConnectionManager extends Serializable {
|
|||
*/
|
||||
void afterStatement();
|
||||
|
||||
/**
|
||||
* Sets the transaction timeout.
|
||||
* @param seconds - number of seconds until the the transaction times out.
|
||||
*/
|
||||
void setTransactionTimeout(int seconds);
|
||||
|
||||
/**
|
||||
|
@ -131,28 +135,43 @@ public interface ConnectionManager extends Serializable {
|
|||
/**
|
||||
* Get a non-batchable prepared statement to use for inserting / deleting / updating,
|
||||
* using JDBC3 getGeneratedKeys ({@link java.sql.Connection#prepareStatement(String, int)}).
|
||||
* <p/>
|
||||
* If not explicitly closed via {@link java.sql.PreparedStatement#close()}, it will be
|
||||
* released when the session is closed or disconnected.
|
||||
*/
|
||||
public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys);
|
||||
|
||||
/**
|
||||
* Get a non-batchable prepared statement to use for inserting / deleting / updating.
|
||||
* using JDBC3 getGeneratedKeys ({@link java.sql.Connection#prepareStatement(String, String[])}).
|
||||
* <p/>
|
||||
* If not explicitly closed via {@link java.sql.PreparedStatement#close()}, it will be
|
||||
* released when the session is closed or disconnected.
|
||||
*/
|
||||
public PreparedStatement prepareStatement(String sql, String[] columnNames);
|
||||
|
||||
/**
|
||||
* Get a non-batchable prepared statement to use for selecting. Does not
|
||||
* result in execution of the current batch.
|
||||
* <p/>
|
||||
* If not explicitly closed via {@link java.sql.PreparedStatement#close()},
|
||||
* it will be released when the session is closed or disconnected.
|
||||
*/
|
||||
public PreparedStatement prepareSelectStatement(String sql);
|
||||
|
||||
/**
|
||||
* Get a non-batchable prepared statement to use for inserting / deleting / updating.
|
||||
* <p/>
|
||||
* If not explicitly closed via {@link java.sql.PreparedStatement#close()}, it will be
|
||||
* released when the session is closed or disconnected.
|
||||
*/
|
||||
public PreparedStatement prepareStatement(String sql, boolean isCallable);
|
||||
|
||||
/**
|
||||
* Get a non-batchable callable statement to use for inserting / deleting / updating.
|
||||
* <p/>
|
||||
* If not explicitly closed via {@link java.sql.PreparedStatement#close()}, it will be
|
||||
* released when the session is closed or disconnected.
|
||||
*/
|
||||
public CallableStatement prepareCallableStatement(String sql);
|
||||
|
||||
|
@ -161,28 +180,34 @@ public interface ConnectionManager extends Serializable {
|
|||
* (might be called many times before a single call to <tt>executeBatch()</tt>).
|
||||
* After setting parameters, call <tt>addToBatch</tt> - do not execute the
|
||||
* statement explicitly.
|
||||
* @see org.hibernate.jdbc.Batcher#addToBatch
|
||||
* @see org.hibernate.engine.jdbc.batch.spi.Batch#addToBatch
|
||||
* <p/>
|
||||
* If not explicitly closed via {@link java.sql.PreparedStatement#close()}, it will be
|
||||
* released when the session is closed or disconnected.
|
||||
*/
|
||||
public PreparedStatement prepareBatchStatement(String sql, boolean isCallable);
|
||||
public PreparedStatement prepareBatchStatement(Object key, String sql, boolean isCallable);
|
||||
|
||||
/**
|
||||
* Get a prepared statement for use in loading / querying. If not explicitly
|
||||
* released by <tt>closeQueryStatement()</tt>, it will be released when the
|
||||
* session is closed or disconnected.
|
||||
* Get a prepared statement for use in loading / querying. Does not
|
||||
* result in execution of the current batch.
|
||||
* <p/>
|
||||
* If not explicitly closed via {@link java.sql.PreparedStatement#close()},
|
||||
* it will be released when the session is closed or disconnected.
|
||||
*/
|
||||
public PreparedStatement prepareQueryStatement(
|
||||
String sql,
|
||||
boolean isScrollable,
|
||||
ScrollMode scrollMode,
|
||||
boolean isCallable);
|
||||
|
||||
/**
|
||||
* Cancel the current query statement
|
||||
*/
|
||||
public void cancelLastQuery();
|
||||
|
||||
public void abortBatch(SQLException sqle);
|
||||
public void abortBatch();
|
||||
|
||||
public void addToBatch(Expectation expectation );
|
||||
public void addToBatch(Object batchKey, String sql, Expectation expectation);
|
||||
|
||||
public void executeBatch();
|
||||
}
|
||||
|
|
|
@ -346,9 +346,19 @@ public class DefaultMergeEventListener extends AbstractSaveEventListener
|
|||
Object propertyFromEntity = persister.getPropertyValue( entity, propertyName, source.getEntityMode() );
|
||||
Type propertyType = persister.getPropertyType( propertyName );
|
||||
EntityEntry copyEntry = source.getPersistenceContext().getEntry( copy );
|
||||
if ( propertyFromCopy == null || ! propertyType.isEntityType() ) {
|
||||
log.trace( "property '" + copyEntry.getEntityName() + "." + propertyName +
|
||||
"' is null or not an entity; " + propertyName + " =["+propertyFromCopy+"]");
|
||||
if ( propertyFromCopy == null ||
|
||||
propertyFromEntity == null ||
|
||||
! propertyType.isEntityType() ||
|
||||
! copyCache.containsKey( propertyFromEntity ) ) {
|
||||
if ( log.isTraceEnabled() ) {
|
||||
String fullPropertyName = "property '" + copyEntry.getEntityName() + "." + propertyName;
|
||||
log.trace( fullPropertyName + " in copy is " + ( propertyFromCopy == null ? "null" : propertyFromCopy ) );
|
||||
log.trace( fullPropertyName + " in original is " + ( propertyFromCopy == null ? "null" : propertyFromCopy ) );
|
||||
log.trace( fullPropertyName + ( propertyType.isEntityType() ? " is" : " is not" ) + " an entity type" );
|
||||
if ( propertyFromEntity != null && ! copyCache.containsKey( propertyFromEntity ) ) {
|
||||
log.trace( fullPropertyName + " is not in copy cache" );
|
||||
}
|
||||
}
|
||||
if ( isNullabilityCheckedGlobal( source ) ) {
|
||||
throw ex;
|
||||
}
|
||||
|
@ -358,28 +368,18 @@ public class DefaultMergeEventListener extends AbstractSaveEventListener
|
|||
saveTransientEntity( copy, entityName, requestedId, source, copyCache, false );
|
||||
}
|
||||
}
|
||||
if ( ! copyCache.containsKey( propertyFromEntity ) ) {
|
||||
log.trace( "property '" + copyEntry.getEntityName() + "." + propertyName +
|
||||
"' from original entity is not in copyCache; " + propertyName + " =["+propertyFromEntity+"]");
|
||||
if ( isNullabilityCheckedGlobal( source ) ) {
|
||||
throw ex;
|
||||
if ( log.isTraceEnabled() && propertyFromEntity != null ) {
|
||||
if ( ( ( EventCache ) copyCache ).isOperatedOn( propertyFromEntity ) ) {
|
||||
log.trace( "property '" + copyEntry.getEntityName() + "." + propertyName +
|
||||
"' from original entity is in copyCache and is in the process of being merged; " +
|
||||
propertyName + " =["+propertyFromEntity+"]");
|
||||
}
|
||||
else {
|
||||
// retry save w/o checking non-nullable properties
|
||||
// (the failure will be detected later)
|
||||
saveTransientEntity( copy, entityName, requestedId, source, copyCache, false );
|
||||
log.trace( "property '" + copyEntry.getEntityName() + "." + propertyName +
|
||||
"' from original entity is in copyCache and is not in the process of being merged; " +
|
||||
propertyName + " =["+propertyFromEntity+"]");
|
||||
}
|
||||
}
|
||||
if ( ( ( EventCache ) copyCache ).isOperatedOn( propertyFromEntity ) ) {
|
||||
log.trace( "property '" + copyEntry.getEntityName() + "." + propertyName +
|
||||
"' from original entity is in copyCache and is in the process of being merged; " +
|
||||
propertyName + " =["+propertyFromEntity+"]");
|
||||
}
|
||||
else {
|
||||
log.trace( "property '" + copyEntry.getEntityName() + "." + propertyName +
|
||||
"' from original entity is in copyCache and is not in the process of being merged; " +
|
||||
propertyName + " =["+propertyFromEntity+"]");
|
||||
}
|
||||
// continue...; we'll find out if it ends up not getting saved later
|
||||
}
|
||||
|
||||
|
|
|
@ -99,7 +99,6 @@ import org.hibernate.event.EventListeners;
|
|||
import org.hibernate.id.IdentifierGenerator;
|
||||
import org.hibernate.id.UUIDGenerator;
|
||||
import org.hibernate.id.factory.IdentifierGeneratorFactory;
|
||||
import org.hibernate.jdbc.BatcherFactory;
|
||||
import org.hibernate.mapping.Collection;
|
||||
import org.hibernate.mapping.PersistentClass;
|
||||
import org.hibernate.mapping.RootClass;
|
||||
|
@ -1235,10 +1234,6 @@ public final class SessionFactoryImpl implements SessionFactory, SessionFactoryI
|
|||
return filters.keySet();
|
||||
}
|
||||
|
||||
public BatcherFactory getBatcherFactory() {
|
||||
return settings.getBatcherFactory();
|
||||
}
|
||||
|
||||
public IdentifierGenerator getIdentifierGenerator(String rootEntityName) {
|
||||
return (IdentifierGenerator) identifierGenerators.get(rootEntityName);
|
||||
}
|
||||
|
|
|
@ -1,161 +0,0 @@
|
|||
/*
|
||||
* Hibernate, Relational Persistence for Idiomatic Java
|
||||
*
|
||||
* Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
|
||||
* indicated by the @author tags or express copyright attribution
|
||||
* statements applied by the authors. All third-party contributions are
|
||||
* distributed under license by Red Hat Middleware LLC.
|
||||
*
|
||||
* This copyrighted material is made available to anyone wishing to use, modify,
|
||||
* copy, or redistribute it subject to the terms and conditions of the GNU
|
||||
* Lesser General Public License, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
||||
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
* for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with this distribution; if not, write to:
|
||||
* Free Software Foundation, Inc.
|
||||
* 51 Franklin Street, Fifth Floor
|
||||
* Boston, MA 02110-1301 USA
|
||||
*
|
||||
*/
|
||||
package org.hibernate.jdbc;
|
||||
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.engine.jdbc.spi.SQLExceptionHelper;
|
||||
|
||||
/**
|
||||
* Manages prepared statements and batching.
|
||||
*
|
||||
* @author Gavin King
|
||||
*/
|
||||
public abstract class AbstractBatcher implements Batcher {
|
||||
|
||||
protected static final Logger log = LoggerFactory.getLogger( AbstractBatcher.class );
|
||||
|
||||
private final SQLExceptionHelper exceptionHelper;
|
||||
private final int jdbcBatchSize;
|
||||
|
||||
private PreparedStatement batchUpdate;
|
||||
private String batchUpdateSQL;
|
||||
private boolean isClosingBatchUpdate = false;
|
||||
|
||||
public AbstractBatcher(SQLExceptionHelper exceptionHelper, int jdbcBatchSize) {
|
||||
this.exceptionHelper = exceptionHelper;
|
||||
this.jdbcBatchSize = jdbcBatchSize;
|
||||
}
|
||||
|
||||
public final int getJdbcBatchSize() {
|
||||
return jdbcBatchSize;
|
||||
}
|
||||
|
||||
public boolean hasOpenResources() {
|
||||
try {
|
||||
return !isClosingBatchUpdate && batchUpdate != null && ! batchUpdate.isClosed();
|
||||
}
|
||||
catch (SQLException sqle) {
|
||||
throw exceptionHelper.convert(
|
||||
sqle,
|
||||
"Could check to see if batch statement was closed",
|
||||
batchUpdateSQL
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
public PreparedStatement getStatement(String sql) {
|
||||
return batchUpdate != null && batchUpdateSQL.equals( sql ) ? batchUpdate : null;
|
||||
}
|
||||
|
||||
public void setStatement(String sql, PreparedStatement ps) {
|
||||
checkNotClosingBatchUpdate();
|
||||
batchUpdateSQL = sql;
|
||||
batchUpdate = ps;
|
||||
}
|
||||
|
||||
protected PreparedStatement getStatement() {
|
||||
return batchUpdate;
|
||||
}
|
||||
|
||||
public void abortBatch(SQLException sqle) {
|
||||
closeStatements();
|
||||
}
|
||||
|
||||
/**
|
||||
* Actually releases the batcher, allowing it to cleanup internally held
|
||||
* resources.
|
||||
*/
|
||||
public void closeStatements() {
|
||||
try {
|
||||
closeBatchUpdate();
|
||||
}
|
||||
catch ( SQLException sqle ) {
|
||||
//no big deal
|
||||
log.warn( "Could not close a JDBC prepared statement", sqle );
|
||||
}
|
||||
batchUpdate = null;
|
||||
batchUpdateSQL = null;
|
||||
}
|
||||
|
||||
public void executeBatch() throws HibernateException {
|
||||
checkNotClosingBatchUpdate();
|
||||
if (batchUpdate!=null) {
|
||||
try {
|
||||
try {
|
||||
doExecuteBatch(batchUpdate);
|
||||
}
|
||||
finally {
|
||||
closeBatchUpdate();
|
||||
}
|
||||
}
|
||||
catch (SQLException sqle) {
|
||||
throw exceptionHelper.convert(
|
||||
sqle,
|
||||
"Could not execute JDBC batch update",
|
||||
batchUpdateSQL
|
||||
);
|
||||
}
|
||||
finally {
|
||||
batchUpdate=null;
|
||||
batchUpdateSQL=null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract void doExecuteBatch(PreparedStatement ps) throws SQLException, HibernateException;
|
||||
|
||||
|
||||
private void closeBatchUpdate() throws SQLException{
|
||||
checkNotClosingBatchUpdate();
|
||||
try {
|
||||
if ( batchUpdate != null ) {
|
||||
isClosingBatchUpdate = true;
|
||||
batchUpdate.close();
|
||||
}
|
||||
}
|
||||
finally {
|
||||
isClosingBatchUpdate = false;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private void checkNotClosingBatchUpdate() {
|
||||
if ( isClosingBatchUpdate ) {
|
||||
throw new IllegalStateException( "Cannot perform operation while closing batch update." );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
@ -1,76 +0,0 @@
|
|||
/*
|
||||
* Hibernate, Relational Persistence for Idiomatic Java
|
||||
*
|
||||
* Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
|
||||
* indicated by the @author tags or express copyright attribution
|
||||
* statements applied by the authors. All third-party contributions are
|
||||
* distributed under license by Red Hat Middleware LLC.
|
||||
*
|
||||
* This copyrighted material is made available to anyone wishing to use, modify,
|
||||
* copy, or redistribute it subject to the terms and conditions of the GNU
|
||||
* Lesser General Public License, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
||||
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
* for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with this distribution; if not, write to:
|
||||
* Free Software Foundation, Inc.
|
||||
* 51 Franklin Street, Fifth Floor
|
||||
* Boston, MA 02110-1301 USA
|
||||
*
|
||||
*/
|
||||
package org.hibernate.jdbc;
|
||||
|
||||
import java.sql.CallableStatement;
|
||||
import java.sql.Connection;
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.ScrollMode;
|
||||
import org.hibernate.dialect.Dialect;
|
||||
|
||||
/**
|
||||
* Manages <tt>PreparedStatement</tt>s for a session. Abstracts JDBC
|
||||
* batching to maintain the illusion that a single logical batch
|
||||
* exists for the whole session, even when batching is disabled.
|
||||
* Provides transparent <tt>PreparedStatement</tt> caching.
|
||||
*
|
||||
* @see java.sql.PreparedStatement
|
||||
* @see org.hibernate.impl.SessionImpl
|
||||
* @author Gavin King
|
||||
*/
|
||||
public interface Batcher {
|
||||
|
||||
public PreparedStatement getStatement(String sql);
|
||||
public void setStatement(String sql, PreparedStatement ps);
|
||||
public boolean hasOpenResources();
|
||||
|
||||
/**
|
||||
* Add an insert / delete / update to the current batch (might be called multiple times
|
||||
* for single <tt>prepareBatchStatement()</tt>)
|
||||
*/
|
||||
public void addToBatch(Expectation expectation) throws SQLException, HibernateException;
|
||||
|
||||
/**
|
||||
* Execute the batch
|
||||
*/
|
||||
public void executeBatch() throws HibernateException;
|
||||
|
||||
/**
|
||||
* Must be called when an exception occurs
|
||||
* @param sqle the (not null) exception that is the reason for aborting
|
||||
*/
|
||||
public void abortBatch(SQLException sqle);
|
||||
|
||||
/**
|
||||
* Actually releases the batcher, allowing it to cleanup internally held
|
||||
* resources.
|
||||
*/
|
||||
public void closeStatements();
|
||||
}
|
||||
|
|
@ -1,100 +0,0 @@
|
|||
/*
|
||||
* Hibernate, Relational Persistence for Idiomatic Java
|
||||
*
|
||||
* Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
|
||||
* indicated by the @author tags or express copyright attribution
|
||||
* statements applied by the authors. All third-party contributions are
|
||||
* distributed under license by Red Hat Middleware LLC.
|
||||
*
|
||||
* This copyrighted material is made available to anyone wishing to use, modify,
|
||||
* copy, or redistribute it subject to the terms and conditions of the GNU
|
||||
* Lesser General Public License, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
||||
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
* for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with this distribution; if not, write to:
|
||||
* Free Software Foundation, Inc.
|
||||
* 51 Franklin Street, Fifth Floor
|
||||
* Boston, MA 02110-1301 USA
|
||||
*
|
||||
*/
|
||||
package org.hibernate.jdbc;
|
||||
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.engine.jdbc.spi.SQLExceptionHelper;
|
||||
|
||||
/**
|
||||
* An implementation of the <tt>Batcher</tt> interface that
|
||||
* actually uses batching
|
||||
* @author Gavin King
|
||||
*/
|
||||
public class BatchingBatcher extends AbstractBatcher {
|
||||
|
||||
private Expectation[] expectations;
|
||||
|
||||
private int currentSize;
|
||||
public BatchingBatcher(SQLExceptionHelper exceptionHelper, int jdbcBatchSize) {
|
||||
super( exceptionHelper, jdbcBatchSize );
|
||||
expectations = new Expectation[ jdbcBatchSize ];
|
||||
currentSize = 0;
|
||||
}
|
||||
|
||||
public void addToBatch(Expectation expectation) throws SQLException, HibernateException {
|
||||
if ( !expectation.canBeBatched() ) {
|
||||
throw new HibernateException( "attempting to batch an operation which cannot be batched" );
|
||||
}
|
||||
PreparedStatement batchUpdate = getStatement();
|
||||
batchUpdate.addBatch();
|
||||
expectations[ currentSize++ ] = expectation;
|
||||
if ( currentSize == getJdbcBatchSize() ) {
|
||||
doExecuteBatch( batchUpdate );
|
||||
}
|
||||
}
|
||||
|
||||
protected void doExecuteBatch(PreparedStatement ps) throws SQLException, HibernateException {
|
||||
if ( currentSize == 0 ) {
|
||||
log.debug( "no batched statements to execute" );
|
||||
}
|
||||
else {
|
||||
if ( log.isDebugEnabled() ) {
|
||||
log.debug( "Executing batch size: " + currentSize );
|
||||
}
|
||||
|
||||
try {
|
||||
checkRowCounts( ps.executeBatch(), ps );
|
||||
}
|
||||
catch (RuntimeException re) {
|
||||
log.error( "Exception executing batch: ", re );
|
||||
throw re;
|
||||
}
|
||||
finally {
|
||||
currentSize = 0;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
private void checkRowCounts(int[] rowCounts, PreparedStatement ps) throws SQLException, HibernateException {
|
||||
int numberOfRowCounts = rowCounts.length;
|
||||
if ( numberOfRowCounts != currentSize ) {
|
||||
log.warn( "JDBC driver did not return the expected number of row counts" );
|
||||
}
|
||||
for ( int i = 0; i < numberOfRowCounts; i++ ) {
|
||||
expectations[i].verifyOutcome( rowCounts[i], ps, i );
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
@ -1083,7 +1083,7 @@ public abstract class AbstractCollectionPersister
|
|||
boolean useBatch = expectation.canBeBatched();
|
||||
String sql = getSQLDeleteString();
|
||||
if ( useBatch ) {
|
||||
st = session.getJDBCContext().getConnectionManager().prepareBatchStatement( sql, callable );
|
||||
st = session.getJDBCContext().getConnectionManager().prepareBatchStatement( this, sql, callable );
|
||||
}
|
||||
else {
|
||||
st = session.getJDBCContext().getConnectionManager().prepareStatement( sql, callable );
|
||||
|
@ -1095,7 +1095,7 @@ public abstract class AbstractCollectionPersister
|
|||
|
||||
writeKey( st, id, offset, session );
|
||||
if ( useBatch ) {
|
||||
session.getJDBCContext().getConnectionManager().addToBatch( expectation );
|
||||
session.getJDBCContext().getConnectionManager().addToBatch( this, sql, expectation );
|
||||
}
|
||||
else {
|
||||
expectation.verifyOutcome( st.executeUpdate(), st, -1 );
|
||||
|
@ -1103,7 +1103,7 @@ public abstract class AbstractCollectionPersister
|
|||
}
|
||||
catch ( SQLException sqle ) {
|
||||
if ( useBatch ) {
|
||||
session.getJDBCContext().getConnectionManager().abortBatch( sqle );
|
||||
session.getJDBCContext().getConnectionManager().abortBatch();
|
||||
}
|
||||
throw sqle;
|
||||
}
|
||||
|
@ -1161,7 +1161,9 @@ public abstract class AbstractCollectionPersister
|
|||
String sql = getSQLInsertRowString();
|
||||
|
||||
if ( useBatch ) {
|
||||
st = session.getJDBCContext().getConnectionManager().prepareBatchStatement( sql, callable );
|
||||
st = session.getJDBCContext().getConnectionManager().prepareBatchStatement(
|
||||
this, sql, callable
|
||||
);
|
||||
}
|
||||
else {
|
||||
st = session.getJDBCContext().getConnectionManager().prepareStatement( sql, callable );
|
||||
|
@ -1182,7 +1184,7 @@ public abstract class AbstractCollectionPersister
|
|||
loc = writeElement(st, collection.getElement(entry), loc, session );
|
||||
|
||||
if ( useBatch ) {
|
||||
session.getJDBCContext().getConnectionManager().addToBatch( expectation );
|
||||
session.getJDBCContext().getConnectionManager().addToBatch( this, sql, expectation );
|
||||
}
|
||||
else {
|
||||
expectation.verifyOutcome( st.executeUpdate(), st, -1 );
|
||||
|
@ -1193,7 +1195,7 @@ public abstract class AbstractCollectionPersister
|
|||
}
|
||||
catch ( SQLException sqle ) {
|
||||
if ( useBatch ) {
|
||||
session.getJDBCContext().getConnectionManager().abortBatch( sqle );
|
||||
session.getJDBCContext().getConnectionManager().abortBatch();
|
||||
}
|
||||
throw sqle;
|
||||
}
|
||||
|
@ -1261,7 +1263,9 @@ public abstract class AbstractCollectionPersister
|
|||
String sql = getSQLDeleteRowString();
|
||||
|
||||
if ( useBatch ) {
|
||||
st = session.getJDBCContext().getConnectionManager().prepareBatchStatement( sql, callable );
|
||||
st = session.getJDBCContext().getConnectionManager().prepareBatchStatement(
|
||||
this, sql, callable
|
||||
);
|
||||
}
|
||||
else {
|
||||
st = session.getJDBCContext().getConnectionManager().prepareStatement( sql, callable );
|
||||
|
@ -1286,7 +1290,7 @@ public abstract class AbstractCollectionPersister
|
|||
}
|
||||
|
||||
if ( useBatch ) {
|
||||
session.getJDBCContext().getConnectionManager().addToBatch( expectation );
|
||||
session.getJDBCContext().getConnectionManager().addToBatch( this, sql, expectation );
|
||||
}
|
||||
else {
|
||||
expectation.verifyOutcome( st.executeUpdate(), st, -1 );
|
||||
|
@ -1295,7 +1299,7 @@ public abstract class AbstractCollectionPersister
|
|||
}
|
||||
catch ( SQLException sqle ) {
|
||||
if ( useBatch ) {
|
||||
session.getJDBCContext().getConnectionManager().abortBatch( sqle );
|
||||
session.getJDBCContext().getConnectionManager().abortBatch();
|
||||
}
|
||||
throw sqle;
|
||||
}
|
||||
|
@ -1361,7 +1365,9 @@ public abstract class AbstractCollectionPersister
|
|||
|
||||
if ( useBatch ) {
|
||||
if ( st == null ) {
|
||||
st = session.getJDBCContext().getConnectionManager().prepareBatchStatement( sql, callable );
|
||||
st = session.getJDBCContext().getConnectionManager().prepareBatchStatement(
|
||||
this, sql, callable
|
||||
);
|
||||
}
|
||||
}
|
||||
else {
|
||||
|
@ -1381,7 +1387,7 @@ public abstract class AbstractCollectionPersister
|
|||
writeElement(st, collection.getElement(entry), offset, session );
|
||||
|
||||
if ( useBatch ) {
|
||||
session.getJDBCContext().getConnectionManager().addToBatch( expectation );
|
||||
session.getJDBCContext().getConnectionManager().addToBatch( this, sql, expectation );
|
||||
}
|
||||
else {
|
||||
expectation.verifyOutcome( st.executeUpdate(), st, -1 );
|
||||
|
@ -1391,7 +1397,7 @@ public abstract class AbstractCollectionPersister
|
|||
}
|
||||
catch ( SQLException sqle ) {
|
||||
if ( useBatch ) {
|
||||
session.getJDBCContext().getConnectionManager().abortBatch( sqle );
|
||||
session.getJDBCContext().getConnectionManager().abortBatch();
|
||||
}
|
||||
throw sqle;
|
||||
}
|
||||
|
|
|
@ -211,7 +211,9 @@ public class BasicCollectionPersister extends AbstractCollectionPersister {
|
|||
|
||||
if ( useBatch ) {
|
||||
if ( st == null ) {
|
||||
st = session.getJDBCContext().getConnectionManager().prepareBatchStatement( sql, callable );
|
||||
st = session.getJDBCContext().getConnectionManager().prepareBatchStatement(
|
||||
this, sql, callable
|
||||
);
|
||||
}
|
||||
}
|
||||
else {
|
||||
|
@ -235,7 +237,7 @@ public class BasicCollectionPersister extends AbstractCollectionPersister {
|
|||
}
|
||||
|
||||
if ( useBatch ) {
|
||||
session.getJDBCContext().getConnectionManager().addToBatch( expectation );
|
||||
session.getJDBCContext().getConnectionManager().addToBatch( this, sql, expectation );
|
||||
}
|
||||
else {
|
||||
expectation.verifyOutcome( st.executeUpdate(), st, -1 );
|
||||
|
@ -243,7 +245,7 @@ public class BasicCollectionPersister extends AbstractCollectionPersister {
|
|||
}
|
||||
catch ( SQLException sqle ) {
|
||||
if ( useBatch ) {
|
||||
session.getJDBCContext().getConnectionManager().abortBatch( sqle );
|
||||
session.getJDBCContext().getConnectionManager().abortBatch();
|
||||
}
|
||||
throw sqle;
|
||||
}
|
||||
|
|
|
@ -196,27 +196,26 @@ public class OneToManyPersister extends AbstractCollectionPersister {
|
|||
|
||||
Object entry = entries.next();
|
||||
if ( collection.needsUpdating( entry, i, elementType ) ) { // will still be issued when it used to be null
|
||||
String sql = getSQLDeleteRowString();
|
||||
if ( st == null ) {
|
||||
String sql = getSQLDeleteRowString();
|
||||
if ( isDeleteCallable() ) {
|
||||
expectation = Expectations.appropriateExpectation( getDeleteCheckStyle() );
|
||||
useBatch = expectation.canBeBatched();
|
||||
st = useBatch
|
||||
? session.getJDBCContext().getConnectionManager().prepareBatchStatement( sql, true )
|
||||
? session.getJDBCContext().getConnectionManager().prepareBatchStatement( this, sql, true )
|
||||
: session.getJDBCContext().getConnectionManager().prepareStatement( sql, true );
|
||||
offset += expectation.prepare( st );
|
||||
}
|
||||
else {
|
||||
st = session.getJDBCContext().getConnectionManager().prepareBatchStatement(
|
||||
getSQLDeleteRowString(),
|
||||
false
|
||||
this, sql, false
|
||||
);
|
||||
}
|
||||
}
|
||||
int loc = writeKey( st, id, offset, session );
|
||||
writeElementToWhere( st, collection.getSnapshotElement(entry, i), loc, session );
|
||||
if ( useBatch ) {
|
||||
session.getJDBCContext().getConnectionManager().addToBatch( expectation );
|
||||
session.getJDBCContext().getConnectionManager().addToBatch( this, sql, expectation );
|
||||
}
|
||||
else {
|
||||
expectation.verifyOutcome( st.executeUpdate(), st, -1 );
|
||||
|
@ -228,7 +227,7 @@ public class OneToManyPersister extends AbstractCollectionPersister {
|
|||
}
|
||||
catch ( SQLException sqle ) {
|
||||
if ( useBatch ) {
|
||||
session.getJDBCContext().getConnectionManager().abortBatch( sqle );
|
||||
session.getJDBCContext().getConnectionManager().abortBatch();
|
||||
}
|
||||
throw sqle;
|
||||
}
|
||||
|
@ -255,7 +254,9 @@ public class OneToManyPersister extends AbstractCollectionPersister {
|
|||
if ( collection.needsUpdating( entry, i, elementType ) ) {
|
||||
if ( useBatch ) {
|
||||
if ( st == null ) {
|
||||
st = session.getJDBCContext().getConnectionManager().prepareBatchStatement( sql, callable );
|
||||
st = session.getJDBCContext().getConnectionManager().prepareBatchStatement(
|
||||
this, sql, callable
|
||||
);
|
||||
}
|
||||
}
|
||||
else {
|
||||
|
@ -272,7 +273,7 @@ public class OneToManyPersister extends AbstractCollectionPersister {
|
|||
writeElementToWhere( st, collection.getElement( entry ), loc, session );
|
||||
|
||||
if ( useBatch ) {
|
||||
session.getJDBCContext().getConnectionManager().addToBatch( expectation );
|
||||
session.getJDBCContext().getConnectionManager().addToBatch( this, sql, expectation );
|
||||
}
|
||||
else {
|
||||
expectation.verifyOutcome( st.executeUpdate(), st, -1 );
|
||||
|
@ -284,7 +285,7 @@ public class OneToManyPersister extends AbstractCollectionPersister {
|
|||
}
|
||||
catch ( SQLException sqle ) {
|
||||
if ( useBatch ) {
|
||||
session.getJDBCContext().getConnectionManager().abortBatch( sqle );
|
||||
session.getJDBCContext().getConnectionManager().abortBatch();
|
||||
}
|
||||
throw sqle;
|
||||
}
|
||||
|
|
|
@ -2381,7 +2381,7 @@ public abstract class AbstractEntityPersister
|
|||
// Render the SQL query
|
||||
final PreparedStatement insert;
|
||||
if ( useBatch ) {
|
||||
insert = session.getJDBCContext().getConnectionManager().prepareBatchStatement( sql, callable );
|
||||
insert = session.getJDBCContext().getConnectionManager().prepareBatchStatement( this, sql, callable );
|
||||
}
|
||||
else {
|
||||
insert = session.getJDBCContext().getConnectionManager().prepareStatement( sql, callable );
|
||||
|
@ -2398,7 +2398,7 @@ public abstract class AbstractEntityPersister
|
|||
|
||||
if ( useBatch ) {
|
||||
// TODO : shouldnt inserts be Expectations.NONE?
|
||||
session.getJDBCContext().getConnectionManager().addToBatch( expectation );
|
||||
session.getJDBCContext().getConnectionManager().addToBatch( this, sql, expectation );
|
||||
}
|
||||
else {
|
||||
expectation.verifyOutcome( insert.executeUpdate(), insert, -1 );
|
||||
|
@ -2407,7 +2407,7 @@ public abstract class AbstractEntityPersister
|
|||
}
|
||||
catch ( SQLException sqle ) {
|
||||
if ( useBatch ) {
|
||||
session.getJDBCContext().getConnectionManager().abortBatch( sqle );
|
||||
session.getJDBCContext().getConnectionManager().abortBatch();
|
||||
}
|
||||
throw sqle;
|
||||
}
|
||||
|
@ -2500,7 +2500,7 @@ public abstract class AbstractEntityPersister
|
|||
int index = 1; // starting index
|
||||
final PreparedStatement update;
|
||||
if ( useBatch ) {
|
||||
update = session.getJDBCContext().getConnectionManager().prepareBatchStatement( sql, callable );
|
||||
update = session.getJDBCContext().getConnectionManager().prepareBatchStatement( this, sql, callable );
|
||||
}
|
||||
else {
|
||||
update = session.getJDBCContext().getConnectionManager().prepareStatement( sql, callable );
|
||||
|
@ -2543,7 +2543,7 @@ public abstract class AbstractEntityPersister
|
|||
}
|
||||
|
||||
if ( useBatch ) {
|
||||
session.getJDBCContext().getConnectionManager().addToBatch( expectation );
|
||||
session.getJDBCContext().getConnectionManager().addToBatch( this, sql, expectation );
|
||||
return true;
|
||||
}
|
||||
else {
|
||||
|
@ -2553,7 +2553,7 @@ public abstract class AbstractEntityPersister
|
|||
}
|
||||
catch ( SQLException sqle ) {
|
||||
if ( useBatch ) {
|
||||
session.getJDBCContext().getConnectionManager().abortBatch( sqle );
|
||||
session.getJDBCContext().getConnectionManager().abortBatch();
|
||||
}
|
||||
throw sqle;
|
||||
}
|
||||
|
@ -2614,7 +2614,7 @@ public abstract class AbstractEntityPersister
|
|||
PreparedStatement delete;
|
||||
int index = 1;
|
||||
if ( useBatch ) {
|
||||
delete = session.getJDBCContext().getConnectionManager().prepareBatchStatement( sql, callable );
|
||||
delete = session.getJDBCContext().getConnectionManager().prepareBatchStatement( this, sql, callable );
|
||||
}
|
||||
else {
|
||||
delete = session.getJDBCContext().getConnectionManager().prepareStatement( sql, callable );
|
||||
|
@ -2649,7 +2649,7 @@ public abstract class AbstractEntityPersister
|
|||
}
|
||||
|
||||
if ( useBatch ) {
|
||||
session.getJDBCContext().getConnectionManager().addToBatch( expectation );
|
||||
session.getJDBCContext().getConnectionManager().addToBatch( this, sql, expectation );
|
||||
}
|
||||
else {
|
||||
check( delete.executeUpdate(), id, j, expectation, delete );
|
||||
|
@ -2658,7 +2658,7 @@ public abstract class AbstractEntityPersister
|
|||
}
|
||||
catch ( SQLException sqle ) {
|
||||
if ( useBatch ) {
|
||||
session.getJDBCContext().getConnectionManager().abortBatch( sqle );
|
||||
session.getJDBCContext().getConnectionManager().abortBatch();
|
||||
}
|
||||
throw sqle;
|
||||
}
|
||||
|
|
|
@ -52,24 +52,24 @@ public class Staff {
|
|||
@Column(name="size_in_cm")
|
||||
@ColumnTransformer(
|
||||
forColumn = "size_in_cm",
|
||||
read = "size_in_cm / 2.54",
|
||||
write = "? * 2.54" )
|
||||
read = "size_in_cm / 2.54E0",
|
||||
write = "? * 2.54E0" )
|
||||
public double getSizeInInches() { return sizeInInches; }
|
||||
public void setSizeInInches(double sizeInInches) { this.sizeInInches = sizeInInches; }
|
||||
private double sizeInInches;
|
||||
|
||||
//Weird extra S to avoid potential SQL keywords
|
||||
@ColumnTransformer(
|
||||
read = "radiusS / 2.54",
|
||||
write = "? * 2.54" )
|
||||
read = "radiusS / 2.54E0",
|
||||
write = "? * 2.54E0" )
|
||||
public double getRadiusS() { return radiusS; }
|
||||
public void setRadiusS(double radiusS) { this.radiusS = radiusS; }
|
||||
private double radiusS;
|
||||
|
||||
@Column(name="diamet")
|
||||
@ColumnTransformer(
|
||||
read = "diamet / 2.54",
|
||||
write = "? * 2.54" )
|
||||
read = "diamet / 2.54E0",
|
||||
write = "? * 2.54E0" )
|
||||
public double getDiameter() { return diameter; }
|
||||
public void setDiameter(double diameter) { this.diameter = diameter; }
|
||||
private double diameter;
|
||||
|
|
|
@ -0,0 +1,34 @@
|
|||
/*
|
||||
* Hibernate, Relational Persistence for Idiomatic Java
|
||||
*
|
||||
* Copyright (c) 2010 by Red Hat Inc and/or its affiliates or by
|
||||
* third-party contributors as indicated by either @author tags or express
|
||||
* copyright attribution statements applied by the authors. All
|
||||
* third-party contributions are distributed under license by Red Hat Inc.
|
||||
*
|
||||
* This copyrighted material is made available to anyone wishing to use, modify,
|
||||
* copy, or redistribute it subject to the terms and conditions of the GNU
|
||||
* Lesser General Public License, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
||||
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
* for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with this distribution; if not, write to:
|
||||
* Free Software Foundation, Inc.
|
||||
* 51 Franklin Street, Fifth Floor
|
||||
* Boston, MA 02110-1301 USA
|
||||
*/
|
||||
|
||||
package org.hibernate.test.annotations.xml.ejb3;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
public class Company {
|
||||
int id;
|
||||
Map organization = new HashMap();
|
||||
Map conferenceRoomExtensions = new HashMap();
|
||||
}
|
|
@ -1,89 +1,144 @@
|
|||
//$Id$
|
||||
package org.hibernate.test.annotations.xml.ejb3;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
import org.hibernate.Session;
|
||||
import org.hibernate.Transaction;
|
||||
import org.hibernate.dialect.PostgreSQLDialect;
|
||||
import org.hibernate.test.annotations.TestCase;
|
||||
import org.hibernate.testing.junit.SkipForDialect;
|
||||
|
||||
/**
|
||||
* @author Emmanuel Bernard
|
||||
*/
|
||||
public class Ejb3XmlTest extends TestCase {
|
||||
@SkipForDialect(value = {PostgreSQLDialect.class}, comment = "postgresql jdbc driver does not implement the setQueryTimeout method")
|
||||
public void testEjb3Xml() throws Exception {
|
||||
Session s = openSession();
|
||||
Transaction tx = s.beginTransaction();
|
||||
CarModel model = new CarModel();
|
||||
model.setYear( new Date() );
|
||||
Manufacturer manufacturer = new Manufacturer();
|
||||
//s.persist( manufacturer );
|
||||
model.setManufacturer( manufacturer );
|
||||
manufacturer.getModels().add( model );
|
||||
s.persist( model );
|
||||
s.flush();
|
||||
s.clear();
|
||||
|
||||
model.setYear( new Date() );
|
||||
manufacturer = (Manufacturer) s.get( Manufacturer.class, manufacturer.getId() );
|
||||
List<Model> cars = s.getNamedQuery( "allModelsPerManufacturer" )
|
||||
.setParameter( "manufacturer", manufacturer )
|
||||
.list();
|
||||
assertEquals( 1, cars.size() );
|
||||
for ( Model car : cars ) {
|
||||
assertNotNull( car.getManufacturer() );
|
||||
s.delete( manufacturer );
|
||||
s.delete( car );
|
||||
}
|
||||
tx.rollback();
|
||||
s.close();
|
||||
}
|
||||
|
||||
public void testXMLEntityHandled() throws Exception {
|
||||
Session s = openSession();
|
||||
s.getTransaction().begin();
|
||||
Lighter l = new Lighter();
|
||||
l.name = "Blue";
|
||||
l.power = "400F";
|
||||
s.persist( l );
|
||||
s.flush();
|
||||
s.getTransaction().rollback();
|
||||
s.close();
|
||||
}
|
||||
|
||||
public void testXmlDefaultOverriding() throws Exception {
|
||||
Session s = openSession();
|
||||
Transaction tx = s.beginTransaction();
|
||||
Manufacturer manufacturer = new Manufacturer();
|
||||
s.persist( manufacturer );
|
||||
s.flush();
|
||||
s.clear();
|
||||
|
||||
assertEquals( 1, s.getNamedQuery( "manufacturer.findAll" ).list().size() );
|
||||
tx.rollback();
|
||||
s.close();
|
||||
}
|
||||
|
||||
protected Class[] getAnnotatedClasses() {
|
||||
return new Class[]{
|
||||
CarModel.class,
|
||||
Manufacturer.class,
|
||||
Model.class,
|
||||
Light.class
|
||||
//Lighter.class xml only entuty
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String[] getXmlFiles() {
|
||||
return new String[]{
|
||||
"org/hibernate/test/annotations/xml/ejb3/orm.xml",
|
||||
"org/hibernate/test/annotations/xml/ejb3/orm2.xml",
|
||||
"org/hibernate/test/annotations/xml/ejb3/orm3.xml"
|
||||
};
|
||||
}
|
||||
}
|
||||
/*
|
||||
* Hibernate, Relational Persistence for Idiomatic Java
|
||||
*
|
||||
* Copyright (c) 2010 by Red Hat Inc and/or its affiliates or by
|
||||
* third-party contributors as indicated by either @author tags or express
|
||||
* copyright attribution statements applied by the authors. All
|
||||
* third-party contributions are distributed under license by Red Hat Inc.
|
||||
*
|
||||
* This copyrighted material is made available to anyone wishing to use, modify,
|
||||
* copy, or redistribute it subject to the terms and conditions of the GNU
|
||||
* Lesser General Public License, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
||||
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
* for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with this distribution; if not, write to:
|
||||
* Free Software Foundation, Inc.
|
||||
* 51 Franklin Street, Fifth Floor
|
||||
* Boston, MA 02110-1301 USA
|
||||
*/
|
||||
|
||||
package org.hibernate.test.annotations.xml.ejb3;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
import org.hibernate.Session;
|
||||
import org.hibernate.SessionFactory;
|
||||
import org.hibernate.Transaction;
|
||||
import org.hibernate.dialect.PostgreSQLDialect;
|
||||
import org.hibernate.persister.collection.BasicCollectionPersister;
|
||||
import org.hibernate.test.annotations.TestCase;
|
||||
import org.hibernate.testing.junit.SkipForDialect;
|
||||
|
||||
/**
|
||||
* @author Emmanuel Bernard
|
||||
*/
|
||||
public class Ejb3XmlTest extends TestCase {
|
||||
@SkipForDialect(value = { PostgreSQLDialect.class },
|
||||
comment = "postgresql jdbc driver does not implement the setQueryTimeout method")
|
||||
public void testEjb3Xml() throws Exception {
|
||||
Session s = openSession();
|
||||
Transaction tx = s.beginTransaction();
|
||||
CarModel model = new CarModel();
|
||||
model.setYear( new Date() );
|
||||
Manufacturer manufacturer = new Manufacturer();
|
||||
//s.persist( manufacturer );
|
||||
model.setManufacturer( manufacturer );
|
||||
manufacturer.getModels().add( model );
|
||||
s.persist( model );
|
||||
s.flush();
|
||||
s.clear();
|
||||
|
||||
model.setYear( new Date() );
|
||||
manufacturer = (Manufacturer) s.get( Manufacturer.class, manufacturer.getId() );
|
||||
@SuppressWarnings("unchecked")
|
||||
List<Model> cars = s.getNamedQuery( "allModelsPerManufacturer" )
|
||||
.setParameter( "manufacturer", manufacturer )
|
||||
.list();
|
||||
assertEquals( 1, cars.size() );
|
||||
for ( Model car : cars ) {
|
||||
assertNotNull( car.getManufacturer() );
|
||||
s.delete( manufacturer );
|
||||
s.delete( car );
|
||||
}
|
||||
tx.rollback();
|
||||
s.close();
|
||||
}
|
||||
|
||||
public void testXMLEntityHandled() throws Exception {
|
||||
Session s = openSession();
|
||||
s.getTransaction().begin();
|
||||
Lighter l = new Lighter();
|
||||
l.name = "Blue";
|
||||
l.power = "400F";
|
||||
s.persist( l );
|
||||
s.flush();
|
||||
s.getTransaction().rollback();
|
||||
s.close();
|
||||
}
|
||||
|
||||
public void testXmlDefaultOverriding() throws Exception {
|
||||
Session s = openSession();
|
||||
Transaction tx = s.beginTransaction();
|
||||
Manufacturer manufacturer = new Manufacturer();
|
||||
s.persist( manufacturer );
|
||||
s.flush();
|
||||
s.clear();
|
||||
|
||||
assertEquals( 1, s.getNamedQuery( "manufacturer.findAll" ).list().size() );
|
||||
tx.rollback();
|
||||
s.close();
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testMapXMLSupport() throws Exception {
|
||||
Session s = openSession();
|
||||
SessionFactory sf = s.getSessionFactory();
|
||||
Transaction tx = s.beginTransaction();
|
||||
|
||||
// Verify that we can persist an object with a couple Map mappings
|
||||
VicePresident vpSales = new VicePresident();
|
||||
vpSales.name = "Dwight";
|
||||
Company company = new Company();
|
||||
company.conferenceRoomExtensions.put( "8932", "x1234" );
|
||||
company.organization.put( "sales", vpSales );
|
||||
s.persist( company );
|
||||
s.flush();
|
||||
s.clear();
|
||||
|
||||
// For the element-collection, check that the orm.xml entries are honored.
|
||||
// This includes: map-key-column/column/collection-table/join-column
|
||||
BasicCollectionPersister confRoomMeta = (BasicCollectionPersister) sf.getCollectionMetadata( Company.class.getName() + ".conferenceRoomExtensions" );
|
||||
assertEquals( "company_id", confRoomMeta.getKeyColumnNames()[0] );
|
||||
assertEquals( "phone_extension", confRoomMeta.getElementColumnNames()[0] );
|
||||
assertEquals( "room_number", confRoomMeta.getIndexColumnNames()[0] );
|
||||
assertEquals( "phone_extension_lookup", confRoomMeta.getTableName() );
|
||||
tx.rollback();
|
||||
s.close();
|
||||
}
|
||||
|
||||
protected Class[] getAnnotatedClasses() {
|
||||
return new Class[] {
|
||||
CarModel.class,
|
||||
Manufacturer.class,
|
||||
Model.class,
|
||||
Light.class
|
||||
//Lighter.class xml only entuty
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String[] getXmlFiles() {
|
||||
return new String[] {
|
||||
"org/hibernate/test/annotations/xml/ejb3/orm.xml",
|
||||
"org/hibernate/test/annotations/xml/ejb3/orm2.xml",
|
||||
"org/hibernate/test/annotations/xml/ejb3/orm3.xml",
|
||||
"org/hibernate/test/annotations/xml/ejb3/orm4.xml"
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,30 @@
|
|||
/*
|
||||
* Hibernate, Relational Persistence for Idiomatic Java
|
||||
*
|
||||
* Copyright (c) 2010 by Red Hat Inc and/or its affiliates or by
|
||||
* third-party contributors as indicated by either @author tags or express
|
||||
* copyright attribution statements applied by the authors. All
|
||||
* third-party contributions are distributed under license by Red Hat Inc.
|
||||
*
|
||||
* This copyrighted material is made available to anyone wishing to use, modify,
|
||||
* copy, or redistribute it subject to the terms and conditions of the GNU
|
||||
* Lesser General Public License, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
||||
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
* for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with this distribution; if not, write to:
|
||||
* Free Software Foundation, Inc.
|
||||
* 51 Franklin Street, Fifth Floor
|
||||
* Boston, MA 02110-1301 USA
|
||||
*/
|
||||
|
||||
package org.hibernate.test.annotations.xml.ejb3;
|
||||
|
||||
public class VicePresident {
|
||||
int id;
|
||||
String name;
|
||||
}
|
|
@ -10,6 +10,7 @@ import org.hibernate.ScrollMode;
|
|||
import org.hibernate.ScrollableResults;
|
||||
import org.hibernate.Session;
|
||||
import org.hibernate.Transaction;
|
||||
import org.hibernate.engine.SessionFactoryImplementor;
|
||||
import org.hibernate.testing.junit.functional.FunctionalTestCase;
|
||||
import org.hibernate.testing.junit.functional.FunctionalTestClassTestSuite;
|
||||
import org.hibernate.cfg.Configuration;
|
||||
|
@ -49,16 +50,37 @@ public class BatchTest extends FunctionalTestCase {
|
|||
final int N = 5000; //26 secs with batch flush, 26 without
|
||||
//final int N = 100000; //53 secs with batch flush, OOME without
|
||||
//final int N = 250000; //137 secs with batch flush, OOME without
|
||||
int batchSize = ( ( SessionFactoryImplementor ) getSessions() ).getSettings().getJdbcBatchSize();
|
||||
doBatchInsertUpdate( N, batchSize );
|
||||
System.out.println( System.currentTimeMillis() - start );
|
||||
}
|
||||
|
||||
public void testBatchInsertUpdateSizeEqJdbcBatchSize() {
|
||||
int batchSize = ( ( SessionFactoryImplementor ) getSessions() ).getSettings().getJdbcBatchSize();
|
||||
doBatchInsertUpdate( 50, batchSize );
|
||||
}
|
||||
|
||||
public void testBatchInsertUpdateSizeLtJdbcBatchSize() {
|
||||
int batchSize = ( ( SessionFactoryImplementor ) getSessions() ).getSettings().getJdbcBatchSize();
|
||||
doBatchInsertUpdate( 50, batchSize - 1 );
|
||||
}
|
||||
|
||||
public void testBatchInsertUpdateSizeGtJdbcBatchSize() {
|
||||
long start = System.currentTimeMillis();
|
||||
int batchSize = ( ( SessionFactoryImplementor ) getSessions() ).getSettings().getJdbcBatchSize();
|
||||
doBatchInsertUpdate( 50, batchSize + 1 );
|
||||
}
|
||||
|
||||
public void doBatchInsertUpdate(int nEntities, int nBeforeFlush) {
|
||||
Session s = openSession();
|
||||
s.setCacheMode( CacheMode.IGNORE );
|
||||
Transaction t = s.beginTransaction();
|
||||
for ( int i = 0; i < N; i++ ) {
|
||||
for ( int i = 0; i < nEntities; i++ ) {
|
||||
DataPoint dp = new DataPoint();
|
||||
dp.setX( new BigDecimal( i * 0.1d ).setScale( 19, BigDecimal.ROUND_DOWN ) );
|
||||
dp.setY( new BigDecimal( Math.cos( dp.getX().doubleValue() ) ).setScale( 19, BigDecimal.ROUND_DOWN ) );
|
||||
s.save( dp );
|
||||
if ( i % 20 == 0 ) {
|
||||
if ( i + 1 % nBeforeFlush == 0 ) {
|
||||
s.flush();
|
||||
s.clear();
|
||||
}
|
||||
|
@ -75,15 +97,30 @@ public class BatchTest extends FunctionalTestCase {
|
|||
while ( sr.next() ) {
|
||||
DataPoint dp = ( DataPoint ) sr.get( 0 );
|
||||
dp.setDescription( "done!" );
|
||||
if ( ++i % 20 == 0 ) {
|
||||
if ( ++i % nBeforeFlush == 0 ) {
|
||||
s.flush();
|
||||
s.clear();
|
||||
}
|
||||
}
|
||||
t.commit();
|
||||
s.close();
|
||||
System.out.println( System.currentTimeMillis() - start );
|
||||
}
|
||||
|
||||
s = openSession();
|
||||
s.setCacheMode( CacheMode.IGNORE );
|
||||
t = s.beginTransaction();
|
||||
i = 0;
|
||||
sr = s.createQuery( "from DataPoint dp order by dp.x asc" )
|
||||
.scroll( ScrollMode.FORWARD_ONLY );
|
||||
while ( sr.next() ) {
|
||||
DataPoint dp = ( DataPoint ) sr.get( 0 );
|
||||
s.delete( dp );
|
||||
if ( ++i % nBeforeFlush == 0 ) {
|
||||
s.flush();
|
||||
s.clear();
|
||||
}
|
||||
}
|
||||
t.commit();
|
||||
s.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
* Hibernate, Relational Persistence for Idiomatic Java
|
||||
*
|
||||
* Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
|
||||
* Copyright (c) 2010, Red Hat Middleware LLC or third-party contributors as
|
||||
* indicated by the @author tags or express copyright attribution
|
||||
* statements applied by the authors. All third-party contributions are
|
||||
* distributed under license by Red Hat Middleware LLC.
|
||||
|
@ -22,32 +22,31 @@
|
|||
* Boston, MA 02110-1301 USA
|
||||
*
|
||||
*/
|
||||
package org.hibernate.jdbc;
|
||||
package org.hibernate.test.cascade.circle;
|
||||
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.SQLException;
|
||||
import junit.framework.Test;
|
||||
|
||||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.engine.jdbc.spi.SQLExceptionHelper;
|
||||
import org.hibernate.TransientObjectException;
|
||||
import org.hibernate.cfg.Configuration;
|
||||
import org.hibernate.cfg.Environment;
|
||||
import org.hibernate.testing.junit.functional.FunctionalTestClassTestSuite;
|
||||
|
||||
/**
|
||||
* An implementation of the <tt>Batcher</tt> interface that does no batching
|
||||
*
|
||||
* @author Gavin King
|
||||
* @author Gail Badner
|
||||
*/
|
||||
public class NonBatchingBatcher extends AbstractBatcher {
|
||||
public class MultiPathCircleCascadeCheckNullFalseDelayedInsertTest extends MultiPathCircleCascadeDelayedInsertTest {
|
||||
|
||||
public NonBatchingBatcher(SQLExceptionHelper exceptionHelper) {
|
||||
super( exceptionHelper, 1 );
|
||||
public MultiPathCircleCascadeCheckNullFalseDelayedInsertTest(String str) {
|
||||
super( str );
|
||||
}
|
||||
|
||||
public void addToBatch(Expectation expectation) throws SQLException, HibernateException {
|
||||
PreparedStatement statement = getStatement();
|
||||
final int rowCount = statement.executeUpdate();
|
||||
expectation.verifyOutcome( rowCount, statement, 0 );
|
||||
@Override
|
||||
public void configure(Configuration cfg) {
|
||||
super.configure( cfg );
|
||||
cfg.setProperty( Environment.CHECK_NULLABILITY, "false" );
|
||||
}
|
||||
|
||||
protected void doExecuteBatch(PreparedStatement ps) throws SQLException, HibernateException {
|
||||
public static Test suite() {
|
||||
return new FunctionalTestClassTestSuite( MultiPathCircleCascadeCheckNullFalseDelayedInsertTest.class );
|
||||
}
|
||||
|
||||
}
|
||||
}
|
|
@ -1,7 +1,7 @@
|
|||
/*
|
||||
* Hibernate, Relational Persistence for Idiomatic Java
|
||||
*
|
||||
* Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
|
||||
* Copyright (c) 2010, Red Hat Middleware LLC or third-party contributors as
|
||||
* indicated by the @author tags or express copyright attribution
|
||||
* statements applied by the authors. All third-party contributions are
|
||||
* distributed under license by Red Hat Middleware LLC.
|
||||
|
@ -22,28 +22,30 @@
|
|||
* Boston, MA 02110-1301 USA
|
||||
*
|
||||
*/
|
||||
package org.hibernate.jdbc;
|
||||
package org.hibernate.test.cascade.circle;
|
||||
|
||||
import org.hibernate.AssertionFailure;
|
||||
import org.hibernate.engine.jdbc.spi.SQLExceptionHelper;
|
||||
import junit.framework.Test;
|
||||
|
||||
import org.hibernate.cfg.Configuration;
|
||||
import org.hibernate.cfg.Environment;
|
||||
import org.hibernate.testing.junit.functional.FunctionalTestClassTestSuite;
|
||||
|
||||
/**
|
||||
* A BatcherFactory implementation which constructs Batcher instances
|
||||
* that do not perform batch operations.
|
||||
*
|
||||
* @author Gavin King
|
||||
* @author Gail Badner
|
||||
*/
|
||||
public class NonBatchingBatcherFactory implements BatcherFactory {
|
||||
public class MultiPathCircleCascadeCheckNullTrueDelayedInsertTest extends MultiPathCircleCascadeDelayedInsertTest {
|
||||
|
||||
public void setJdbcBatchSize(int jdbcBatchSize) {
|
||||
if ( jdbcBatchSize > 1 ) {
|
||||
throw new AssertionFailure( "jdbcBatchSize must be 1 for " + getClass().getName() );
|
||||
}
|
||||
public MultiPathCircleCascadeCheckNullTrueDelayedInsertTest(String str) {
|
||||
super( str );
|
||||
}
|
||||
|
||||
public Batcher createBatcher(SQLExceptionHelper exceptionHelper) {
|
||||
return new NonBatchingBatcher( exceptionHelper );
|
||||
@Override
|
||||
public void configure(Configuration cfg) {
|
||||
super.configure( cfg );
|
||||
cfg.setProperty( Environment.CHECK_NULLABILITY, "true" );
|
||||
}
|
||||
|
||||
}
|
||||
public static Test suite() {
|
||||
return new FunctionalTestClassTestSuite( MultiPathCircleCascadeCheckNullTrueDelayedInsertTest.class );
|
||||
}
|
||||
}
|
|
@ -0,0 +1,82 @@
|
|||
<?xml version="1.0"?>
|
||||
<!DOCTYPE hibernate-mapping SYSTEM "http://www.hibernate.org/dtd/hibernate-mapping-3.0.dtd" >
|
||||
|
||||
<hibernate-mapping package="org.hibernate.test.cascade.circle">
|
||||
|
||||
<class name="Route" table="HB_Route">
|
||||
|
||||
<id name="routeID" type="long"><generator class="increment"/></id>
|
||||
|
||||
<property name="name" type="string" not-null="true"/>
|
||||
|
||||
<set name="nodes" inverse="true" cascade="persist,merge,refresh">
|
||||
<key column="routeID"/>
|
||||
<one-to-many class="Node"/>
|
||||
</set>
|
||||
</class>
|
||||
|
||||
<class name="Tour" table="HB_Tour">
|
||||
|
||||
<id name="tourID" type="long"><generator class="increment"/></id>
|
||||
|
||||
<property name="name" type="string" not-null="true"/>
|
||||
|
||||
<set name="nodes" inverse="true" lazy="true" cascade="merge,refresh">
|
||||
<key column="tourID"/>
|
||||
<one-to-many class="Node"/>
|
||||
</set>
|
||||
</class>
|
||||
|
||||
<class name="Transport" table="HB_Transport">
|
||||
|
||||
<id name="transportID" type="long"><generator class="increment"/></id>
|
||||
|
||||
<property name="name" type="string" not-null="true"/>
|
||||
|
||||
<many-to-one name="pickupNode"
|
||||
column="pickupNodeID"
|
||||
unique="true"
|
||||
not-null="true"
|
||||
cascade="merge,refresh"
|
||||
lazy="false"/>
|
||||
|
||||
<many-to-one name="deliveryNode"
|
||||
column="deliveryNodeID"
|
||||
unique="true"
|
||||
not-null="true"
|
||||
cascade="merge,refresh"
|
||||
lazy="false"/>
|
||||
</class>
|
||||
|
||||
<class name="Node" table="HB_Node">
|
||||
|
||||
<id name="nodeID" type="long"><generator class="increment"/></id>
|
||||
|
||||
<property name="name" type="string" not-null="true"/>
|
||||
|
||||
<set name="deliveryTransports" inverse="true" lazy="true" cascade="merge,refresh">
|
||||
<key column="deliveryNodeID"/>
|
||||
<one-to-many class="Transport"/>
|
||||
</set>
|
||||
|
||||
<set name="pickupTransports" inverse="true" lazy="true" cascade="merge,refresh">
|
||||
<key column="pickupNodeID"/>
|
||||
<one-to-many class="Transport"/>
|
||||
</set>
|
||||
|
||||
<many-to-one name="route"
|
||||
column="routeID"
|
||||
unique="false"
|
||||
not-null="true"
|
||||
cascade="none"
|
||||
lazy="false"/>
|
||||
|
||||
<many-to-one name="tour"
|
||||
column="tourID"
|
||||
unique="false"
|
||||
not-null="false"
|
||||
cascade="merge,refresh"
|
||||
lazy="false"/>
|
||||
</class>
|
||||
|
||||
</hibernate-mapping>
|
|
@ -0,0 +1,65 @@
|
|||
/*
|
||||
* Hibernate, Relational Persistence for Idiomatic Java
|
||||
*
|
||||
* Copyright (c) 2010, Red Hat Middleware LLC or third-party contributors as
|
||||
* indicated by the @author tags or express copyright attribution
|
||||
* statements applied by the authors. All third-party contributions are
|
||||
* distributed under license by Red Hat Middleware LLC.
|
||||
*
|
||||
* This copyrighted material is made available to anyone wishing to use, modify,
|
||||
* copy, or redistribute it subject to the terms and conditions of the GNU
|
||||
* Lesser General Public License, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
||||
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
* for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with this distribution; if not, write to:
|
||||
* Free Software Foundation, Inc.
|
||||
* 51 Franklin Street, Fifth Floor
|
||||
* Boston, MA 02110-1301 USA
|
||||
*
|
||||
*/
|
||||
package org.hibernate.test.cascade.circle;
|
||||
|
||||
import junit.framework.Test;
|
||||
|
||||
import org.hibernate.JDBCException;
|
||||
import org.hibernate.PropertyValueException;
|
||||
import org.hibernate.TransientObjectException;
|
||||
import org.hibernate.testing.junit.functional.FunctionalTestClassTestSuite;
|
||||
|
||||
/**
|
||||
* @author Gail Badner
|
||||
*/
|
||||
public class MultiPathCircleCascadeDelayedInsertTest extends MultiPathCircleCascadeTest {
|
||||
public MultiPathCircleCascadeDelayedInsertTest(String string) {
|
||||
super(string);
|
||||
}
|
||||
|
||||
public String[] getMappings() {
|
||||
return new String[] {
|
||||
"cascade/circle/MultiPathCircleCascadeDelayedInsert.hbm.xml"
|
||||
};
|
||||
}
|
||||
|
||||
public static Test suite() {
|
||||
return new FunctionalTestClassTestSuite( MultiPathCircleCascadeDelayedInsertTest.class );
|
||||
}
|
||||
|
||||
protected void checkExceptionFromNullValueForNonNullable(Exception ex, boolean checkNullability, boolean isNullValue ) {
|
||||
if ( checkNullability ) {
|
||||
if ( isNullValue ) {
|
||||
assertTrue( ex instanceof PropertyValueException );
|
||||
}
|
||||
else {
|
||||
assertTrue( ex instanceof TransientObjectException );
|
||||
}
|
||||
}
|
||||
else {
|
||||
assertTrue( ex instanceof JDBCException || ex instanceof TransientObjectException );
|
||||
}
|
||||
}
|
||||
}
|
|
@ -109,15 +109,15 @@ public class MultiPathCircleCascadeTest extends FunctionalTestCase {
|
|||
|
||||
try {
|
||||
s.merge( node );
|
||||
s.getTransaction().commit();
|
||||
fail( "should have thrown an exception" );
|
||||
}
|
||||
catch ( Exception ex ) {
|
||||
if ( ( ( SessionImplementor ) s ).getFactory().getSettings().isCheckNullability() ) {
|
||||
assertTrue( ex instanceof TransientObjectException );
|
||||
}
|
||||
else {
|
||||
assertTrue( ex instanceof JDBCException );
|
||||
}
|
||||
checkExceptionFromNullValueForNonNullable(
|
||||
ex,
|
||||
( ( SessionImplementor ) s ).getFactory().getSettings().isCheckNullability(),
|
||||
false
|
||||
);
|
||||
}
|
||||
finally {
|
||||
s.getTransaction().rollback();
|
||||
|
@ -138,15 +138,15 @@ public class MultiPathCircleCascadeTest extends FunctionalTestCase {
|
|||
|
||||
try {
|
||||
s.merge( node );
|
||||
s.getTransaction().commit();
|
||||
fail( "should have thrown an exception" );
|
||||
}
|
||||
catch ( Exception ex ) {
|
||||
if ( ( ( SessionImplementor ) s ).getFactory().getSettings().isCheckNullability() ) {
|
||||
assertTrue( ex instanceof PropertyValueException );
|
||||
}
|
||||
else {
|
||||
assertTrue( ex instanceof JDBCException );
|
||||
}
|
||||
checkExceptionFromNullValueForNonNullable(
|
||||
ex,
|
||||
( ( SessionImplementor ) s ).getFactory().getSettings().isCheckNullability(),
|
||||
true
|
||||
);
|
||||
}
|
||||
finally {
|
||||
s.getTransaction().rollback();
|
||||
|
@ -165,15 +165,15 @@ public class MultiPathCircleCascadeTest extends FunctionalTestCase {
|
|||
|
||||
try {
|
||||
s.merge( route );
|
||||
s.getTransaction().commit();
|
||||
fail( "should have thrown an exception" );
|
||||
}
|
||||
catch ( Exception ex ) {
|
||||
if ( ( ( SessionImplementor ) s ).getFactory().getSettings().isCheckNullability() ) {
|
||||
assertTrue( ex instanceof PropertyValueException );
|
||||
}
|
||||
else {
|
||||
assertTrue( ex instanceof JDBCException );
|
||||
}
|
||||
checkExceptionFromNullValueForNonNullable(
|
||||
ex,
|
||||
( ( SessionImplementor ) s ).getFactory().getSettings().isCheckNullability(),
|
||||
true
|
||||
);
|
||||
}
|
||||
finally {
|
||||
s.getTransaction().rollback();
|
||||
|
@ -531,6 +531,20 @@ public class MultiPathCircleCascadeTest extends FunctionalTestCase {
|
|||
assertUpdateCount( 1 );
|
||||
}
|
||||
|
||||
protected void checkExceptionFromNullValueForNonNullable(Exception ex, boolean checkNullability, boolean isNullValue ) {
|
||||
if ( checkNullability ) {
|
||||
if ( isNullValue ) {
|
||||
assertTrue( ex instanceof PropertyValueException );
|
||||
}
|
||||
else {
|
||||
assertTrue( ex instanceof TransientObjectException );
|
||||
}
|
||||
}
|
||||
else {
|
||||
assertTrue( ex instanceof JDBCException );
|
||||
}
|
||||
}
|
||||
|
||||
protected void clearCounts() {
|
||||
getSessions().getStatistics().clear();
|
||||
}
|
||||
|
|
|
@ -22,8 +22,8 @@
|
|||
<property name="heightInches">
|
||||
<column name="height_centimeters"
|
||||
not-null="true"
|
||||
read="height_centimeters / 2.54"
|
||||
write="? * 2.54"/>
|
||||
read="height_centimeters / 2.54E0"
|
||||
write="? * 2.54E0"/>
|
||||
</property>
|
||||
<property name="currentAddress"
|
||||
column="address"
|
||||
|
|
|
@ -15,8 +15,8 @@
|
|||
<property name="weightPounds">
|
||||
<column name="weight_kg"
|
||||
not-null="true"
|
||||
write="0.453 * ?"
|
||||
read="weight_kg / 0.453"/>
|
||||
write="0.453E0 * ?"
|
||||
read="weight_kg / 0.453E0"/>
|
||||
</property>
|
||||
|
||||
<property name="effectiveStartDate" column="eff_start_dt" type="java.util.Date"/>
|
||||
|
|
|
@ -307,7 +307,7 @@ public class ASTParserLoadingTest extends FunctionalTestCase {
|
|||
results = s.createQuery( "from Human where name is not null" ).list();
|
||||
assertEquals( 3, results.size() );
|
||||
String query =
|
||||
getDialect() instanceof DB2Dialect ?
|
||||
( getDialect() instanceof DB2Dialect || getDialect() instanceof HSQLDialect ) ?
|
||||
"from Human where cast(? as string) is null" :
|
||||
"from Human where ? is null"
|
||||
;
|
||||
|
@ -2600,14 +2600,14 @@ public class ASTParserLoadingTest extends FunctionalTestCase {
|
|||
* PostgreSQL >= 8.3.7 typecasts are no longer automatically allowed
|
||||
* <link>http://www.postgresql.org/docs/current/static/release-8-3.html</link>
|
||||
*/
|
||||
if(getDialect() instanceof PostgreSQLDialect){
|
||||
if(getDialect() instanceof PostgreSQLDialect || getDialect() instanceof HSQLDialect){
|
||||
hql = "from Animal a where bit_length(str(a.bodyWeight)) = 24";
|
||||
}else{
|
||||
hql = "from Animal a where bit_length(a.bodyWeight) = 24";
|
||||
}
|
||||
|
||||
session.createQuery(hql).list();
|
||||
if(getDialect() instanceof PostgreSQLDialect){
|
||||
if(getDialect() instanceof PostgreSQLDialect || getDialect() instanceof HSQLDialect){
|
||||
hql = "select bit_length(str(a.bodyWeight)) from Animal a";
|
||||
}else{
|
||||
hql = "select bit_length(a.bodyWeight) from Animal a";
|
||||
|
|
|
@ -50,8 +50,8 @@
|
|||
<property name="heightInches">
|
||||
<column name="height_centimeters"
|
||||
not-null="true"
|
||||
read="height_centimeters / 2.54"
|
||||
write="? * 2.54"/>
|
||||
read="height_centimeters / 2.54E0"
|
||||
write="? * 2.54E0"/>
|
||||
</property>
|
||||
<property name="intValue"/>
|
||||
<property name="floatValue"/>
|
||||
|
|
|
@ -3,22 +3,21 @@ package org.hibernate.test.insertordering;
|
|||
import java.util.List;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.PreparedStatement;
|
||||
|
||||
import junit.framework.Test;
|
||||
|
||||
import org.hibernate.engine.jdbc.batch.internal.BatchBuilder;
|
||||
import org.hibernate.engine.jdbc.batch.internal.BatchingBatch;
|
||||
import org.hibernate.engine.jdbc.batch.spi.Batch;
|
||||
import org.hibernate.engine.jdbc.spi.SQLExceptionHelper;
|
||||
import org.hibernate.engine.jdbc.spi.SQLStatementLogger;
|
||||
import org.hibernate.testing.junit.functional.FunctionalTestCase;
|
||||
import org.hibernate.testing.junit.functional.FunctionalTestClassTestSuite;
|
||||
import org.hibernate.cfg.Configuration;
|
||||
import org.hibernate.cfg.Environment;
|
||||
import org.hibernate.Session;
|
||||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.jdbc.BatchingBatcher;
|
||||
import org.hibernate.jdbc.Expectation;
|
||||
import org.hibernate.jdbc.BatcherFactory;
|
||||
import org.hibernate.jdbc.Batcher;
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
|
@ -42,7 +41,7 @@ public class InsertOrderingTest extends FunctionalTestCase {
|
|||
super.configure( cfg );
|
||||
cfg.setProperty( Environment.ORDER_INSERTS, "true" );
|
||||
cfg.setProperty( Environment.STATEMENT_BATCH_SIZE, "10" );
|
||||
cfg.setProperty( Environment.BATCH_STRATEGY, StatsBatcherFactory.class.getName() );
|
||||
cfg.setProperty( Environment.BATCH_STRATEGY, StatsBatchBuilder.class.getName() );
|
||||
}
|
||||
|
||||
public void testBatchOrdering() {
|
||||
|
@ -56,11 +55,11 @@ public class InsertOrderingTest extends FunctionalTestCase {
|
|||
s.save( group );
|
||||
user.addMembership( group );
|
||||
}
|
||||
StatsBatcher.reset();
|
||||
StatsBatch.reset();
|
||||
s.getTransaction().commit();
|
||||
s.close();
|
||||
|
||||
assertEquals( 3, StatsBatcher.batchSizes.size() );
|
||||
assertEquals( 3, StatsBatch.batchSizes.size() );
|
||||
|
||||
s = openSession();
|
||||
s.beginTransaction();
|
||||
|
@ -76,13 +75,13 @@ public class InsertOrderingTest extends FunctionalTestCase {
|
|||
public int count = 0;
|
||||
}
|
||||
|
||||
public static class StatsBatcher extends BatchingBatcher {
|
||||
public static class StatsBatch extends BatchingBatch {
|
||||
private static String batchSQL;
|
||||
private static List batchSizes = new ArrayList();
|
||||
private static int currentBatch = -1;
|
||||
|
||||
public StatsBatcher(SQLExceptionHelper exceptionHelper, int jdbcBatchSize) {
|
||||
super( exceptionHelper, jdbcBatchSize );
|
||||
public StatsBatch(Object key, SQLStatementLogger statementLogger, SQLExceptionHelper exceptionHelper, int jdbcBatchSize) {
|
||||
super( key, statementLogger, exceptionHelper, jdbcBatchSize );
|
||||
}
|
||||
|
||||
static void reset() {
|
||||
|
@ -91,7 +90,7 @@ public class InsertOrderingTest extends FunctionalTestCase {
|
|||
batchSQL = null;
|
||||
}
|
||||
|
||||
public void setStatement(String sql, PreparedStatement ps) {
|
||||
public void addBatchStatement(Object key, String sql, PreparedStatement ps) {
|
||||
if ( batchSQL == null || ! batchSQL.equals( sql ) ) {
|
||||
currentBatch++;
|
||||
batchSQL = sql;
|
||||
|
@ -99,31 +98,31 @@ public class InsertOrderingTest extends FunctionalTestCase {
|
|||
System.out.println( "--------------------------------------------------------" );
|
||||
System.out.println( "Preparing statement [" + sql + "]" );
|
||||
}
|
||||
super.setStatement( sql, ps );
|
||||
super.addBatchStatement( key, sql, ps );
|
||||
}
|
||||
|
||||
public void addToBatch(Expectation expectation) throws SQLException, HibernateException {
|
||||
public void addToBatch(Object key, String sql, Expectation expectation) {
|
||||
Counter counter = ( Counter ) batchSizes.get( currentBatch );
|
||||
counter.count++;
|
||||
System.out.println( "Adding to batch [" + batchSQL + "]" );
|
||||
super.addToBatch( expectation );
|
||||
super.addToBatch( key, sql, expectation );
|
||||
}
|
||||
|
||||
protected void doExecuteBatch(PreparedStatement ps) throws SQLException, HibernateException {
|
||||
protected void doExecuteBatch() {
|
||||
System.out.println( "executing batch [" + batchSQL + "]" );
|
||||
System.out.println( "--------------------------------------------------------" );
|
||||
super.doExecuteBatch( ps );
|
||||
super.doExecuteBatch();
|
||||
}
|
||||
}
|
||||
|
||||
public static class StatsBatcherFactory implements BatcherFactory {
|
||||
public static class StatsBatchBuilder extends BatchBuilder {
|
||||
private int jdbcBatchSize;
|
||||
|
||||
public void setJdbcBatchSize(int jdbcBatchSize) {
|
||||
this.jdbcBatchSize = jdbcBatchSize;
|
||||
}
|
||||
public Batcher createBatcher(SQLExceptionHelper exceptionHelper) {
|
||||
return new StatsBatcher( exceptionHelper, jdbcBatchSize );
|
||||
public Batch buildBatch(Object key, SQLStatementLogger statementLogger, SQLExceptionHelper exceptionHelper) {
|
||||
return new StatsBatch(key, statementLogger, exceptionHelper, jdbcBatchSize );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -35,7 +35,6 @@ import org.hibernate.ConnectionReleaseMode;
|
|||
import org.hibernate.engine.jdbc.internal.LogicalConnectionImpl;
|
||||
import org.hibernate.engine.jdbc.spi.ConnectionObserver;
|
||||
import org.hibernate.engine.jdbc.internal.proxy.ProxyBuilder;
|
||||
import org.hibernate.jdbc.NonBatchingBatcherFactory;
|
||||
import org.hibernate.test.common.BasicTestingJdbcServiceImpl;
|
||||
import org.hibernate.testing.junit.UnitTestCase;
|
||||
|
||||
|
@ -135,8 +134,7 @@ public class AggressiveReleaseTest extends UnitTestCase {
|
|||
null,
|
||||
ConnectionReleaseMode.AFTER_STATEMENT,
|
||||
services,
|
||||
null,
|
||||
new NonBatchingBatcherFactory()
|
||||
null
|
||||
);
|
||||
Connection proxiedConnection = ProxyBuilder.buildConnection( logicalConnection );
|
||||
ConnectionCounter observer = new ConnectionCounter();
|
||||
|
@ -170,8 +168,7 @@ public class AggressiveReleaseTest extends UnitTestCase {
|
|||
null,
|
||||
ConnectionReleaseMode.AFTER_STATEMENT,
|
||||
services,
|
||||
null,
|
||||
new NonBatchingBatcherFactory()
|
||||
null
|
||||
);
|
||||
Connection proxiedConnection = ProxyBuilder.buildConnection( logicalConnection );
|
||||
ConnectionCounter observer = new ConnectionCounter();
|
||||
|
@ -230,8 +227,7 @@ public class AggressiveReleaseTest extends UnitTestCase {
|
|||
null,
|
||||
ConnectionReleaseMode.AFTER_STATEMENT,
|
||||
services,
|
||||
null,
|
||||
new NonBatchingBatcherFactory()
|
||||
null
|
||||
);
|
||||
Connection proxiedConnection = ProxyBuilder.buildConnection( logicalConnection );
|
||||
ConnectionCounter observer = new ConnectionCounter();
|
||||
|
|
|
@ -34,7 +34,6 @@ import org.hibernate.ConnectionReleaseMode;
|
|||
import org.hibernate.JDBCException;
|
||||
import org.hibernate.engine.jdbc.internal.LogicalConnectionImpl;
|
||||
import org.hibernate.engine.jdbc.internal.proxy.ProxyBuilder;
|
||||
import org.hibernate.jdbc.NonBatchingBatcherFactory;
|
||||
import org.hibernate.test.common.BasicTestingJdbcServiceImpl;
|
||||
import org.hibernate.testing.junit.UnitTestCase;
|
||||
|
||||
|
@ -63,8 +62,7 @@ public class BasicConnectionProxyTest extends UnitTestCase {
|
|||
null,
|
||||
ConnectionReleaseMode.AFTER_TRANSACTION,
|
||||
services,
|
||||
null,
|
||||
new NonBatchingBatcherFactory()
|
||||
null
|
||||
);
|
||||
Connection proxiedConnection = ProxyBuilder.buildConnection( logicalConnection );
|
||||
try {
|
||||
|
@ -92,8 +90,7 @@ public class BasicConnectionProxyTest extends UnitTestCase {
|
|||
null,
|
||||
ConnectionReleaseMode.AFTER_TRANSACTION,
|
||||
services,
|
||||
null,
|
||||
new NonBatchingBatcherFactory()
|
||||
null
|
||||
);
|
||||
Connection proxiedConnection = ProxyBuilder.buildConnection( logicalConnection );
|
||||
try {
|
||||
|
@ -115,8 +112,7 @@ public class BasicConnectionProxyTest extends UnitTestCase {
|
|||
null,
|
||||
ConnectionReleaseMode.AFTER_TRANSACTION,
|
||||
services,
|
||||
null,
|
||||
new NonBatchingBatcherFactory()
|
||||
null
|
||||
);
|
||||
Connection proxiedConnection = ProxyBuilder.buildConnection( logicalConnection );
|
||||
|
||||
|
|
|
@ -39,8 +39,8 @@
|
|||
<property name="heightInches">
|
||||
<column name="height_centimeters"
|
||||
not-null="true"
|
||||
read="height_centimeters / 2.54"
|
||||
write="? * 2.54"/>
|
||||
read="height_centimeters / 2.54E0"
|
||||
write="? * 2.54E0"/>
|
||||
</property>
|
||||
|
||||
<join table="address">
|
||||
|
@ -73,8 +73,8 @@
|
|||
<property name="login" column="u_login"/>
|
||||
<property name="passwordExpiryDays">
|
||||
<column name="pwd_expiry_weeks"
|
||||
read="pwd_expiry_weeks * 7.0"
|
||||
write="? / 7.0"/>
|
||||
read="pwd_expiry_weeks * 7.0E0"
|
||||
write="? / 7.0E0"/>
|
||||
</property>
|
||||
</join>
|
||||
<join table="t_silly" fetch="select" optional="true">
|
||||
|
|
|
@ -38,8 +38,8 @@
|
|||
<property name="heightInches">
|
||||
<column name="height_centimeters"
|
||||
not-null="true"
|
||||
read="height_centimeters / 2.54"
|
||||
write="? * 2.54"/>
|
||||
read="height_centimeters / 2.54E0"
|
||||
write="? * 2.54E0"/>
|
||||
</property>
|
||||
|
||||
<component name="address">
|
||||
|
@ -58,8 +58,8 @@
|
|||
<property name="passwordExpiryDays">
|
||||
<column name="pwd_expiry_weeks"
|
||||
not-null="true"
|
||||
read="pwd_expiry_weeks * 7.0"
|
||||
write="? / 7.0"/>
|
||||
read="pwd_expiry_weeks * 7.0E0"
|
||||
write="? / 7.0E0"/>
|
||||
</property>
|
||||
<many-to-one name="manager"/>
|
||||
</joined-subclass>
|
||||
|
|
|
@ -1939,7 +1939,7 @@ public class FooBarTest extends LegacyTestCase {
|
|||
.addOrder( Order.asc("date") )
|
||||
.list();
|
||||
assertTrue( list.size()==1 && list.get(0)==f );
|
||||
if(!(getDialect() instanceof TimesTenDialect)) {
|
||||
if(!(getDialect() instanceof TimesTenDialect || getDialect() instanceof HSQLDialect)) {
|
||||
list = s.createCriteria(Foo.class).setMaxResults(0).list();
|
||||
assertTrue( list.size()==0 );
|
||||
}
|
||||
|
|
|
@ -26,7 +26,11 @@ import java.util.List;
|
|||
import junit.framework.Test;
|
||||
import junit.framework.Assert;
|
||||
|
||||
import org.hibernate.engine.jdbc.batch.internal.BatchBuilder;
|
||||
import org.hibernate.engine.jdbc.batch.internal.NonBatchingBatch;
|
||||
import org.hibernate.engine.jdbc.batch.spi.Batch;
|
||||
import org.hibernate.engine.jdbc.spi.SQLExceptionHelper;
|
||||
import org.hibernate.engine.jdbc.spi.SQLStatementLogger;
|
||||
import org.hibernate.testing.junit.functional.FunctionalTestCase;
|
||||
import org.hibernate.testing.junit.functional.FunctionalTestClassTestSuite;
|
||||
import org.hibernate.cfg.Configuration;
|
||||
|
@ -35,9 +39,6 @@ import org.hibernate.Session;
|
|||
import org.hibernate.Hibernate;
|
||||
import org.hibernate.Interceptor;
|
||||
import org.hibernate.EmptyInterceptor;
|
||||
import org.hibernate.jdbc.BatcherFactory;
|
||||
import org.hibernate.jdbc.NonBatchingBatcher;
|
||||
import org.hibernate.jdbc.Batcher;
|
||||
import org.hibernate.stat.CollectionStatistics;
|
||||
import org.hibernate.loader.collection.BatchingCollectionInitializer;
|
||||
import org.hibernate.persister.collection.AbstractCollectionPersister;
|
||||
|
@ -64,23 +65,23 @@ public class BatchedManyToManyTest extends FunctionalTestCase {
|
|||
public void configure(Configuration cfg) {
|
||||
cfg.setProperty( Environment.USE_SECOND_LEVEL_CACHE, "false" );
|
||||
cfg.setProperty( Environment.GENERATE_STATISTICS, "true" );
|
||||
cfg.setProperty( Environment.BATCH_STRATEGY, TestingBatcherFactory.class.getName() );
|
||||
cfg.setProperty( Environment.BATCH_STRATEGY, TestingBatchBuilder.class.getName() );
|
||||
}
|
||||
|
||||
public static class TestingBatcherFactory implements BatcherFactory {
|
||||
public static class TestingBatchBuilder extends BatchBuilder {
|
||||
private int jdbcBatchSize;
|
||||
|
||||
public void setJdbcBatchSize(int jdbcBatchSize) {
|
||||
this.jdbcBatchSize = jdbcBatchSize;
|
||||
}
|
||||
public Batcher createBatcher(SQLExceptionHelper exceptionHelper) {
|
||||
return new TestingBatcher( exceptionHelper, jdbcBatchSize );
|
||||
public Batch buildBatch(Object key, SQLStatementLogger statementLogger, SQLExceptionHelper exceptionHelper) {
|
||||
return new TestingBatch(key, statementLogger, exceptionHelper, jdbcBatchSize );
|
||||
}
|
||||
}
|
||||
|
||||
public static class TestingBatcher extends NonBatchingBatcher {
|
||||
public TestingBatcher(SQLExceptionHelper exceptionHelper, int jdbcBatchSize) {
|
||||
super( exceptionHelper );
|
||||
public static class TestingBatch extends NonBatchingBatch {
|
||||
public TestingBatch(Object key, SQLStatementLogger statementLogger, SQLExceptionHelper exceptionHelper, int jdbcBatchSize) {
|
||||
super( key, statementLogger, exceptionHelper );
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -39,8 +39,8 @@
|
|||
<property name="heightInches">
|
||||
<column name="height_centimeters"
|
||||
not-null="true"
|
||||
read="height_centimeters / 2.54"
|
||||
write="? * 2.54"/>
|
||||
read="height_centimeters / 2.54E0"
|
||||
write="? * 2.54E0"/>
|
||||
</property>
|
||||
|
||||
</class>
|
||||
|
@ -62,8 +62,8 @@
|
|||
<property name="heightInches">
|
||||
<column name="height_centimeters"
|
||||
not-null="true"
|
||||
read="height_centimeters / 2.54"
|
||||
write="? * 2.54"/>
|
||||
read="height_centimeters / 2.54E0"
|
||||
write="? * 2.54E0"/>
|
||||
</property>
|
||||
|
||||
</class>
|
||||
|
@ -92,7 +92,7 @@
|
|||
<property name="heightInches">
|
||||
<column name="height_centimeters"
|
||||
not-null="true"
|
||||
read="height_centimeters / 2.54"/>
|
||||
read="height_centimeters / 2.54E0"/>
|
||||
</property>
|
||||
|
||||
</class>
|
||||
|
|
|
@ -37,8 +37,8 @@
|
|||
<property name="heightInches">
|
||||
<column name="height_centimeters"
|
||||
not-null="true"
|
||||
read="height_centimeters / 2.54"
|
||||
write="? * 2.54"/>
|
||||
read="height_centimeters / 2.54E0"
|
||||
write="? * 2.54E0"/>
|
||||
</property>
|
||||
|
||||
<component name="address">
|
||||
|
@ -56,8 +56,8 @@
|
|||
<property name="passwordExpiryDays">
|
||||
<column name="pwd_expiry_weeks"
|
||||
not-null="true"
|
||||
read="pwd_expiry_weeks * 7.0"
|
||||
write="? / 7.0"/>
|
||||
read="pwd_expiry_weeks * 7.0E0"
|
||||
write="? / 7.0E0"/>
|
||||
</property>
|
||||
<many-to-one name="manager"/>
|
||||
</union-subclass>
|
||||
|
|
|
@ -8,10 +8,12 @@ log4j.rootLogger=info, stdout
|
|||
|
||||
log4j.logger.org.hibernate.test=info
|
||||
log4j.logger.org.hibernate.tool.hbm2ddl=debug
|
||||
log4j.logger.org.hibernate.engine.jdbc=trace
|
||||
log4j.logger.org.hibernate.engine.jdbc.internal=trace
|
||||
log4j.logger.org.hibernate.engine.jdbc.internal.proxy=trace
|
||||
log4j.logger.org.hibernate.engine.jdbc.batch.internal=trace
|
||||
log4j.logger.org.hibernate.hql.ast.QueryTranslatorImpl=trace
|
||||
log4j.logger.org.hibernate.hql.ast.HqlSqlWalker=trace
|
||||
log4j.logger.org.hibernate.hql.ast.SqlGenerator=trace
|
||||
log4j.logger.org.hibernate.hql.ast.AST=trace
|
||||
log4j.logger.org.hibernate.type.descriptor.sql.BasicBinder=trace
|
||||
log4j.logger.org.hibernate.type.BasicTypeRegistry=trace
|
||||
log4j.logger.org.hibernate.type.BasicTypeRegistry=trace
|
||||
|
|
|
@ -0,0 +1,54 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
|
||||
<!--
|
||||
~ Hibernate, Relational Persistence for Idiomatic Java
|
||||
~
|
||||
~ Copyright (c) 2010 by Red Hat Inc and/or its affiliates or by
|
||||
~ third-party contributors as indicated by either @author tags or express
|
||||
~ copyright attribution statements applied by the authors. All
|
||||
~ third-party contributions are distributed under license by Red Hat Inc.
|
||||
~
|
||||
~ This copyrighted material is made available to anyone wishing to use, modify,
|
||||
~ copy, or redistribute it subject to the terms and conditions of the GNU
|
||||
~ Lesser General Public License, as published by the Free Software Foundation.
|
||||
~
|
||||
~ This program is distributed in the hope that it will be useful,
|
||||
~ but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
||||
~ or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
~ for more details.
|
||||
~
|
||||
~ You should have received a copy of the GNU Lesser General Public License
|
||||
~ along with this distribution; if not, write to:
|
||||
~ Free Software Foundation, Inc.
|
||||
~ 51 Franklin Street, Fifth Floor
|
||||
~ Boston, MA 02110-1301 USA
|
||||
-->
|
||||
|
||||
<entity-mappings xmlns="http://java.sun.com/xml/ns/persistence/orm"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://java.sun.com/xml/ns/persistence/orm http://java.sun.com/xml/ns/persistence/orm/orm_2_0.xsd"
|
||||
version="2.0">
|
||||
<package>org.hibernate.test.annotations.xml.ejb3</package>
|
||||
<entity class="Company" access="FIELD" metadata-complete="true">
|
||||
<attributes>
|
||||
<id name="id"/>
|
||||
<one-to-many name="organization" target-entity="VicePresident">
|
||||
<map-key-class class="java.lang.String"/>
|
||||
</one-to-many>
|
||||
<element-collection name="conferenceRoomExtensions" target-class="java.lang.String">
|
||||
<map-key-class class="java.lang.String"/>
|
||||
<map-key-column name="room_number"/>
|
||||
<column name="phone_extension"/>
|
||||
<collection-table name="phone_extension_lookup">
|
||||
<join-column name="company_id" referenced-column-name="id"/>
|
||||
</collection-table>
|
||||
</element-collection>
|
||||
</attributes>
|
||||
</entity>
|
||||
<entity class="VicePresident" access="FIELD" metadata-complete="true">
|
||||
<attributes>
|
||||
<id name="id"/>
|
||||
<basic name="name"/>
|
||||
</attributes>
|
||||
</entity>
|
||||
</entity-mappings>
|
|
@ -2,6 +2,6 @@ apply plugin: 'java'
|
|||
|
||||
dependencies {
|
||||
compile( project( ':hibernate-core' ) )
|
||||
compile( [group: 'net.sf.ehcache', name: 'ehcache', version: '1.5.0'] )
|
||||
compile( [group: 'net.sf.ehcache', name: 'ehcache-core', version: '2.3.1'] )
|
||||
testCompile( project(':hibernate-core').sourceSets.test.classes )
|
||||
}
|
||||
}
|
||||
|
|
111
hibernate-ehcache/src/main/java/org/hibernate/cache/AbstractEhCacheRegionFactory.java
vendored
Normal file
111
hibernate-ehcache/src/main/java/org/hibernate/cache/AbstractEhCacheRegionFactory.java
vendored
Normal file
|
@ -0,0 +1,111 @@
|
|||
/*
|
||||
* Hibernate, Relational Persistence for Idiomatic Java
|
||||
*
|
||||
* Copyright (c) 2007, Red Hat Middleware LLC or third-party contributors as
|
||||
* indicated by the @author tags or express copyright attribution
|
||||
* statements applied by the authors. ÊAll third-party contributions are
|
||||
* distributed under license by Red Hat Middleware LLC.
|
||||
*
|
||||
* This copyrighted material is made available to anyone wishing to use, modify,
|
||||
* copy, or redistribute it subject to the terms and conditions of the GNU
|
||||
* Lesser General Public License, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
||||
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
* for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with this distribution; if not, write to:
|
||||
* Free Software Foundation, Inc.
|
||||
* 51 Franklin Street, Fifth Floor
|
||||
* Boston, MA 02110-1301 USA
|
||||
*/
|
||||
|
||||
package org.hibernate.cache;
|
||||
|
||||
import org.hibernate.cache.access.AccessType;
|
||||
import org.hibernate.cfg.Settings;
|
||||
|
||||
import java.util.Properties;
|
||||
|
||||
/**
|
||||
* Abstract class that will delegate all calls to org.hibernate.cache.RegionFactory to the instance it wraps.
|
||||
* This abstracts the Singleton CacheManager construct of Ehcache
|
||||
*
|
||||
* @author Alex Snaps
|
||||
*/
|
||||
class AbstractEhCacheRegionFactory implements RegionFactory {
|
||||
|
||||
private final RegionFactory underlyingRegionFactory;
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
protected AbstractEhCacheRegionFactory(RegionFactory regionFactory) {
|
||||
underlyingRegionFactory = regionFactory;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public final void start(final Settings settings, final Properties properties) throws CacheException {
|
||||
underlyingRegionFactory.start(settings, properties);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public final void stop() {
|
||||
underlyingRegionFactory.stop();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public final boolean isMinimalPutsEnabledByDefault() {
|
||||
return underlyingRegionFactory.isMinimalPutsEnabledByDefault();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public final AccessType getDefaultAccessType() {
|
||||
return underlyingRegionFactory.getDefaultAccessType();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public final long nextTimestamp() {
|
||||
return underlyingRegionFactory.nextTimestamp();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public final EntityRegion buildEntityRegion(final String regionName, final Properties properties, final CacheDataDescription metadata) throws CacheException {
|
||||
return underlyingRegionFactory.buildEntityRegion(regionName, properties, metadata);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public final CollectionRegion buildCollectionRegion(final String regionName, final Properties properties, final CacheDataDescription metadata) throws CacheException {
|
||||
return underlyingRegionFactory.buildCollectionRegion(regionName, properties, metadata);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public final QueryResultsRegion buildQueryResultsRegion(final String regionName, final Properties properties) throws CacheException {
|
||||
return underlyingRegionFactory.buildQueryResultsRegion(regionName, properties);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public final TimestampsRegion buildTimestampsRegion(final String regionName, final Properties properties) throws CacheException {
|
||||
return underlyingRegionFactory.buildTimestampsRegion(regionName, properties);
|
||||
}
|
||||
}
|
|
@ -23,15 +23,15 @@
|
|||
*/
|
||||
package org.hibernate.cache;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
|
||||
import net.sf.ehcache.CacheManager;
|
||||
import net.sf.ehcache.Element;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* EHCache plugin for Hibernate
|
||||
* <p/>
|
||||
|
|
|
@ -23,15 +23,15 @@
|
|||
*/
|
||||
package org.hibernate.cache;
|
||||
|
||||
import java.util.Properties;
|
||||
import java.net.URL;
|
||||
|
||||
import net.sf.ehcache.CacheManager;
|
||||
import org.hibernate.cfg.Environment;
|
||||
import org.hibernate.util.ConfigHelper;
|
||||
import org.hibernate.util.StringHelper;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.hibernate.cfg.Environment;
|
||||
import org.hibernate.util.StringHelper;
|
||||
import org.hibernate.util.ConfigHelper;
|
||||
|
||||
import java.net.URL;
|
||||
import java.util.Properties;
|
||||
|
||||
/**
|
||||
* Cache Provider plugin for Hibernate
|
||||
|
|
25
hibernate-core/src/main/java/org/hibernate/jdbc/BatcherFactory.java → hibernate-ehcache/src/main/java/org/hibernate/cache/EhCacheRegionFactory.java
vendored
Executable file → Normal file
25
hibernate-core/src/main/java/org/hibernate/jdbc/BatcherFactory.java → hibernate-ehcache/src/main/java/org/hibernate/cache/EhCacheRegionFactory.java
vendored
Executable file → Normal file
|
@ -1,9 +1,9 @@
|
|||
/*
|
||||
* Hibernate, Relational Persistence for Idiomatic Java
|
||||
*
|
||||
* Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
|
||||
* Copyright (c) 2007, Red Hat Middleware LLC or third-party contributors as
|
||||
* indicated by the @author tags or express copyright attribution
|
||||
* statements applied by the authors. All third-party contributions are
|
||||
* statements applied by the authors. ÊAll third-party contributions are
|
||||
* distributed under license by Red Hat Middleware LLC.
|
||||
*
|
||||
* This copyrighted material is made available to anyone wishing to use, modify,
|
||||
|
@ -20,19 +20,22 @@
|
|||
* Free Software Foundation, Inc.
|
||||
* 51 Franklin Street, Fifth Floor
|
||||
* Boston, MA 02110-1301 USA
|
||||
*
|
||||
*/
|
||||
package org.hibernate.jdbc;
|
||||
|
||||
import org.hibernate.Interceptor;
|
||||
import org.hibernate.engine.jdbc.spi.SQLExceptionHelper;
|
||||
package org.hibernate.cache;
|
||||
|
||||
import java.util.Properties;
|
||||
|
||||
/**
|
||||
* Factory for <tt>Batcher</tt> instances.
|
||||
* @author Gavin King
|
||||
* Thin wrapper class around the within Ehcache-core packaged EhCacheRegionFactory.
|
||||
* It directly delegates to the wrapped instance, enabling users to upgrade Ehcache-core versions
|
||||
* by simply dropping in the new jar.
|
||||
*
|
||||
* @author Alex Snaps
|
||||
*/
|
||||
public interface BatcherFactory {
|
||||
public void setJdbcBatchSize(int jdbcBatchSize);
|
||||
public Batcher createBatcher(SQLExceptionHelper exceptionHelper);
|
||||
public final class EhCacheRegionFactory extends AbstractEhCacheRegionFactory {
|
||||
|
||||
public EhCacheRegionFactory(Properties properties) {
|
||||
super(new net.sf.ehcache.hibernate.EhCacheRegionFactory(properties));
|
||||
}
|
||||
}
|
|
@ -29,8 +29,8 @@ import net.sf.ehcache.util.ClassLoaderUtil;
|
|||
|
||||
import java.net.URL;
|
||||
import java.util.Properties;
|
||||
import java.util.logging.Logger;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
|
||||
/**
|
||||
* Singleton cache Provider plugin for Hibernate 3.2 and ehcache-1.2. New in this provider is support for
|
||||
|
|
31
hibernate-core/src/main/java/org/hibernate/jdbc/BatchingBatcherFactory.java → hibernate-ehcache/src/main/java/org/hibernate/cache/SingletonEhCacheRegionFactory.java
vendored
Executable file → Normal file
31
hibernate-core/src/main/java/org/hibernate/jdbc/BatchingBatcherFactory.java → hibernate-ehcache/src/main/java/org/hibernate/cache/SingletonEhCacheRegionFactory.java
vendored
Executable file → Normal file
|
@ -1,9 +1,9 @@
|
|||
/*
|
||||
* Hibernate, Relational Persistence for Idiomatic Java
|
||||
*
|
||||
* Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
|
||||
* Copyright (c) 2007, Red Hat Middleware LLC or third-party contributors as
|
||||
* indicated by the @author tags or express copyright attribution
|
||||
* statements applied by the authors. All third-party contributions are
|
||||
* statements applied by the authors. ÊAll third-party contributions are
|
||||
* distributed under license by Red Hat Middleware LLC.
|
||||
*
|
||||
* This copyrighted material is made available to anyone wishing to use, modify,
|
||||
|
@ -20,29 +20,22 @@
|
|||
* Free Software Foundation, Inc.
|
||||
* 51 Franklin Street, Fifth Floor
|
||||
* Boston, MA 02110-1301 USA
|
||||
*
|
||||
*/
|
||||
package org.hibernate.jdbc;
|
||||
|
||||
import org.hibernate.engine.jdbc.spi.SQLExceptionHelper;
|
||||
package org.hibernate.cache;
|
||||
|
||||
import java.util.Properties;
|
||||
|
||||
/**
|
||||
* A BatcherFactory implementation which constructs Batcher instances
|
||||
* capable of actually performing batch operations.
|
||||
*
|
||||
* @author Gavin King
|
||||
* Thin wrapper class around the within Ehcache-core packaged SingletonEhCacheRegionFactory.
|
||||
* It directly delegates to the wrapped instance, enabling user to upgrade the Ehcache-core version
|
||||
* by simply dropping in a new jar.
|
||||
*
|
||||
* @author Alex Snaps
|
||||
*/
|
||||
public class BatchingBatcherFactory implements BatcherFactory {
|
||||
public final class SingletonEhCacheRegionFactory extends AbstractEhCacheRegionFactory {
|
||||
|
||||
private int jdbcBatchSize;
|
||||
|
||||
public void setJdbcBatchSize(int jdbcBatchSize) {
|
||||
this.jdbcBatchSize = jdbcBatchSize;
|
||||
public SingletonEhCacheRegionFactory(Properties properties) {
|
||||
super(new net.sf.ehcache.hibernate.SingletonEhCacheRegionFactory(properties));
|
||||
}
|
||||
|
||||
public Batcher createBatcher(SQLExceptionHelper exceptionHelper) {
|
||||
return new BatchingBatcher( exceptionHelper, jdbcBatchSize );
|
||||
}
|
||||
|
||||
}
|
|
@ -252,7 +252,7 @@ public class CriteriaBuilderImpl implements CriteriaBuilder, Serializable {
|
|||
|
||||
public Predicate wrap(Expression<Boolean> expression) {
|
||||
if ( Predicate.class.isInstance( expression ) ) {
|
||||
return ( ( Predicate ) expression );
|
||||
return ( (Predicate) expression );
|
||||
}
|
||||
else if ( PathImplementor.class.isInstance( expression ) ) {
|
||||
return new BooleanAssertionPredicate( this, expression, Boolean.TRUE );
|
||||
|
@ -340,7 +340,7 @@ public class CriteriaBuilderImpl implements CriteriaBuilder, Serializable {
|
|||
if ( predicate.getExpressions().size() == 0 ) {
|
||||
return new BooleanStaticAssertionPredicate(
|
||||
this,
|
||||
predicate.getOperator() == Predicate.BooleanOperator.OR
|
||||
predicate.getOperator() == Predicate.BooleanOperator.OR
|
||||
);
|
||||
}
|
||||
predicate.not();
|
||||
|
@ -1363,7 +1363,7 @@ public class CriteriaBuilderImpl implements CriteriaBuilder, Serializable {
|
|||
}
|
||||
return new MemberOfPredicate<E, C>(
|
||||
this,
|
||||
e,
|
||||
e,
|
||||
(PluralAttributePath<C>)collectionExpression
|
||||
);
|
||||
}
|
||||
|
|
|
@ -24,15 +24,15 @@
|
|||
package org.hibernate.ejb.criteria.predicate;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import javax.persistence.criteria.Predicate;
|
||||
import java.util.List;
|
||||
import javax.persistence.criteria.Expression;
|
||||
import javax.persistence.criteria.Predicate;
|
||||
|
||||
import org.hibernate.ejb.criteria.ParameterRegistry;
|
||||
import org.hibernate.ejb.criteria.CriteriaBuilderImpl;
|
||||
import org.hibernate.ejb.criteria.CriteriaQueryCompiler;
|
||||
import org.hibernate.ejb.criteria.ParameterRegistry;
|
||||
import org.hibernate.ejb.criteria.Renderable;
|
||||
|
||||
/**
|
||||
|
@ -44,15 +44,15 @@ import org.hibernate.ejb.criteria.Renderable;
|
|||
public class CompoundPredicate
|
||||
extends AbstractPredicateImpl
|
||||
implements Serializable {
|
||||
private final BooleanOperator operator;
|
||||
private BooleanOperator operator;
|
||||
private final List<Expression<Boolean>> expressions = new ArrayList<Expression<Boolean>>();
|
||||
|
||||
/**
|
||||
* Constructs an empty conjunction or disjunction.
|
||||
*
|
||||
* @param criteriaBuilder The query builder from whcih this originates.
|
||||
* @param operator Indicates whether this predicate will funtion
|
||||
* as a conjunction or disjuntion.
|
||||
* @param criteriaBuilder The query builder from which this originates.
|
||||
* @param operator Indicates whether this predicate will function
|
||||
* as a conjunction or disjunction.
|
||||
*/
|
||||
public CompoundPredicate(CriteriaBuilderImpl criteriaBuilder, BooleanOperator operator) {
|
||||
super( criteriaBuilder );
|
||||
|
@ -63,31 +63,31 @@ public class CompoundPredicate
|
|||
* Constructs a conjunction or disjunction over the given expressions.
|
||||
*
|
||||
* @param criteriaBuilder The query builder from which this originates.
|
||||
* @param operator Indicates whether this predicate will funtion
|
||||
* as a conjunction or disjuntion.
|
||||
* @param operator Indicates whether this predicate will function
|
||||
* as a conjunction or disjunction.
|
||||
* @param expressions The expressions to be grouped.
|
||||
*/
|
||||
public CompoundPredicate(
|
||||
CriteriaBuilderImpl criteriaBuilder,
|
||||
BooleanOperator operator,
|
||||
Expression<Boolean>... expressions) {
|
||||
this( criteriaBuilder, operator );
|
||||
this( criteriaBuilder, operator );
|
||||
applyExpressions( expressions );
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a conjunction or disjunction over the given expressions.
|
||||
*
|
||||
* @param criteriaBuilder The query builder from whcih this originates.
|
||||
* @param operator Indicates whether this predicate will funtion
|
||||
* as a conjunction or disjuntion.
|
||||
* @param criteriaBuilder The query builder from which this originates.
|
||||
* @param operator Indicates whether this predicate will function
|
||||
* as a conjunction or disjunction.
|
||||
* @param expressions The expressions to be grouped.
|
||||
*/
|
||||
public CompoundPredicate(
|
||||
CriteriaBuilderImpl criteriaBuilder,
|
||||
BooleanOperator operator,
|
||||
List<Expression<Boolean>> expressions) {
|
||||
this( criteriaBuilder, operator );
|
||||
this( criteriaBuilder, operator );
|
||||
applyExpressions( expressions );
|
||||
}
|
||||
|
||||
|
@ -110,7 +110,7 @@ public class CompoundPredicate
|
|||
|
||||
public void registerParameters(ParameterRegistry registry) {
|
||||
for ( Expression expression : getExpressions() ) {
|
||||
Helper.possibleParameter(expression, registry);
|
||||
Helper.possibleParameter( expression, registry );
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -125,7 +125,7 @@ public class CompoundPredicate
|
|||
: "0=1"; // false
|
||||
}
|
||||
if ( getExpressions().size() == 1 ) {
|
||||
return ( (Renderable) getExpressions().get(0) ).render( renderingContext );
|
||||
return ( (Renderable) getExpressions().get( 0 ) ).render( renderingContext );
|
||||
}
|
||||
final StringBuilder buffer = new StringBuilder();
|
||||
String sep = "";
|
||||
|
@ -148,4 +148,29 @@ public class CompoundPredicate
|
|||
public String renderProjection(CriteriaQueryCompiler.RenderingContext renderingContext) {
|
||||
return render( renderingContext );
|
||||
}
|
||||
|
||||
/**
|
||||
* Create negation of compound predicate by using logic rules:
|
||||
* 1. not (x || y) is (not x && not y)
|
||||
* 2. not (x && y) is (not x || not y)
|
||||
*/
|
||||
@Override
|
||||
public Predicate not() {
|
||||
toggleOperator();
|
||||
for ( Expression expr : this.getExpressions() ) {
|
||||
if ( Predicate.class.isInstance( expr ) ) {
|
||||
( (Predicate) expr ).not();
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
private void toggleOperator() {
|
||||
if ( this.operator == BooleanOperator.AND ) {
|
||||
this.operator = BooleanOperator.OR;
|
||||
}
|
||||
else {
|
||||
this.operator = BooleanOperator.AND;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,9 +23,11 @@
|
|||
*/
|
||||
package org.hibernate.ejb.criteria.basic;
|
||||
|
||||
import java.util.List;
|
||||
import javax.persistence.EntityManager;
|
||||
import javax.persistence.criteria.CriteriaBuilder;
|
||||
import javax.persistence.criteria.CriteriaQuery;
|
||||
import javax.persistence.criteria.Predicate;
|
||||
import javax.persistence.criteria.Root;
|
||||
|
||||
import org.hibernate.ejb.metamodel.AbstractMetamodelSpecificTest;
|
||||
|
@ -35,33 +37,121 @@ import org.hibernate.ejb.metamodel.Order;
|
|||
* Test the various predicates.
|
||||
*
|
||||
* @author Steve Ebersole
|
||||
* @author Hardy Ferentschik
|
||||
*/
|
||||
public class PredicateTest extends AbstractMetamodelSpecificTest {
|
||||
public void testEmptyConjunction() {
|
||||
// yes this is a retarded case, but explicitly allowed in the JPA spec
|
||||
CriteriaBuilder builder = factory.getCriteriaBuilder();
|
||||
EntityManager em = getOrCreateEntityManager();
|
||||
|
||||
private EntityManager em;
|
||||
private CriteriaBuilder builder;
|
||||
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
builder = factory.getCriteriaBuilder();
|
||||
em = getOrCreateEntityManager();
|
||||
createTestOrders();
|
||||
em.getTransaction().begin();
|
||||
CriteriaQuery<Order> orderCriteria = builder.createQuery(Order.class);
|
||||
Root<Order> orderRoot = orderCriteria.from(Order.class);
|
||||
orderCriteria.select(orderRoot);
|
||||
orderCriteria.where( builder.isTrue( builder.conjunction() ) );
|
||||
em.createQuery( orderCriteria ).getResultList();
|
||||
}
|
||||
|
||||
public void tearDown() throws Exception {
|
||||
em.getTransaction().commit();
|
||||
em.close();
|
||||
super.tearDown();
|
||||
}
|
||||
|
||||
public void testEmptyConjunction() {
|
||||
// yes this is a retarded case, but explicitly allowed in the JPA spec
|
||||
CriteriaQuery<Order> orderCriteria = builder.createQuery( Order.class );
|
||||
Root<Order> orderRoot = orderCriteria.from( Order.class );
|
||||
orderCriteria.select( orderRoot );
|
||||
orderCriteria.where( builder.isTrue( builder.conjunction() ) );
|
||||
em.createQuery( orderCriteria ).getResultList();
|
||||
|
||||
List<Order> orders = em.createQuery( orderCriteria ).getResultList();
|
||||
assertTrue( orders.size() == 3 );
|
||||
}
|
||||
|
||||
public void testEmptyDisjunction() {
|
||||
// yes this is a retarded case, but explicitly allowed in the JPA spec
|
||||
CriteriaBuilder builder = factory.getCriteriaBuilder();
|
||||
EntityManager em = getOrCreateEntityManager();
|
||||
em.getTransaction().begin();
|
||||
CriteriaQuery<Order> orderCriteria = builder.createQuery(Order.class);
|
||||
Root<Order> orderRoot = orderCriteria.from(Order.class);
|
||||
orderCriteria.select(orderRoot);
|
||||
CriteriaQuery<Order> orderCriteria = builder.createQuery( Order.class );
|
||||
Root<Order> orderRoot = orderCriteria.from( Order.class );
|
||||
orderCriteria.select( orderRoot );
|
||||
orderCriteria.where( builder.isFalse( builder.disjunction() ) );
|
||||
em.createQuery( orderCriteria ).getResultList();
|
||||
|
||||
List<Order> orders = em.createQuery( orderCriteria ).getResultList();
|
||||
assertTrue( orders.size() == 3 );
|
||||
}
|
||||
|
||||
/**
|
||||
* Check simple not.
|
||||
*/
|
||||
public void testSimpleNot() {
|
||||
CriteriaQuery<Order> orderCriteria = builder.createQuery( Order.class );
|
||||
Root<Order> orderRoot = orderCriteria.from( Order.class );
|
||||
|
||||
orderCriteria.select( orderRoot );
|
||||
orderCriteria.where( builder.not( builder.equal( orderRoot.get( "id" ), "order-1" ) ) );
|
||||
|
||||
List<Order> orders = em.createQuery( orderCriteria ).getResultList();
|
||||
assertTrue( orders.size() == 2 );
|
||||
}
|
||||
|
||||
/**
|
||||
* Check complicated not.
|
||||
*/
|
||||
public void testComplicatedNotOr() {
|
||||
CriteriaQuery<Order> orderCriteria = builder.createQuery( Order.class );
|
||||
Root<Order> orderRoot = orderCriteria.from( Order.class );
|
||||
|
||||
orderCriteria.select( orderRoot );
|
||||
Predicate p1 = builder.equal( orderRoot.get( "id" ), "order-1" );
|
||||
Predicate p2 = builder.equal( orderRoot.get( "id" ), "order-2" );
|
||||
orderCriteria.where( builder.not( builder.or( p1, p2 ) ) );
|
||||
|
||||
List<Order> orders = em.createQuery( orderCriteria ).getResultList();
|
||||
assertTrue( orders.size() == 1 );
|
||||
Order order = orders.get( 0 );
|
||||
assertEquals( "order-3", order.getId() );
|
||||
}
|
||||
|
||||
/**
|
||||
* Check complicated not.
|
||||
*/
|
||||
public void testNotMultipleOr() {
|
||||
CriteriaQuery<Order> orderCriteria = builder.createQuery( Order.class );
|
||||
Root<Order> orderRoot = orderCriteria.from( Order.class );
|
||||
|
||||
orderCriteria.select( orderRoot );
|
||||
Predicate p1 = builder.equal( orderRoot.get( "id" ), "order-1" );
|
||||
Predicate p2 = builder.equal( orderRoot.get( "id" ), "order-2" );
|
||||
Predicate p3 = builder.equal( orderRoot.get( "id" ), "order-3" );
|
||||
orderCriteria.where( builder.not( builder.or( p1, p2, p3 ) ) );
|
||||
|
||||
List<Order> orders = em.createQuery( orderCriteria ).getResultList();
|
||||
assertTrue( orders.size() == 0 );
|
||||
}
|
||||
|
||||
/**
|
||||
* Check complicated not.
|
||||
*/
|
||||
public void testComplicatedNotAnd() {
|
||||
CriteriaQuery<Order> orderCriteria = builder.createQuery( Order.class );
|
||||
Root<Order> orderRoot = orderCriteria.from( Order.class );
|
||||
|
||||
orderCriteria.select( orderRoot );
|
||||
Predicate p1 = builder.equal( orderRoot.get( "id" ), "order-1" );
|
||||
Predicate p2 = builder.equal( orderRoot.get( "id" ), "order-2" );
|
||||
orderCriteria.where( builder.not( builder.and( p1, p2 ) ) );
|
||||
|
||||
List<Order> orders = em.createQuery( orderCriteria ).getResultList();
|
||||
assertTrue( orders.size() == 3 );
|
||||
}
|
||||
|
||||
private void createTestOrders() {
|
||||
em.getTransaction().begin();
|
||||
em.persist( new Order( "order-1", 1.0d ) );
|
||||
em.persist( new Order( "order-2", 10.0d ) );
|
||||
em.persist( new Order( "order-3", 100.0d ) );
|
||||
em.getTransaction().commit();
|
||||
em.close();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -82,7 +82,6 @@ public class Tools {
|
|||
proxy.getHibernateLazyInitializer().getEntityName(),
|
||||
proxy.getHibernateLazyInitializer().getIdentifier()
|
||||
);
|
||||
proxy.getHibernateLazyInitializer().setImplementation( target );
|
||||
return target;
|
||||
}
|
||||
finally {
|
||||
|
|
|
@ -0,0 +1,99 @@
|
|||
/*
|
||||
* Hibernate, Relational Persistence for Idiomatic Java
|
||||
*
|
||||
* Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
|
||||
* indicated by the @author tags or express copyright attribution
|
||||
* statements applied by the authors. All third-party contributions are
|
||||
* distributed under license by Red Hat Middleware LLC.
|
||||
*
|
||||
* This copyrighted material is made available to anyone wishing to use, modify,
|
||||
* copy, or redistribute it subject to the terms and conditions of the GNU
|
||||
* Lesser General Public License, as published by the Free Software Foundation.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
||||
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
|
||||
* for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Lesser General Public License
|
||||
* along with this distribution; if not, write to:
|
||||
* Free Software Foundation, Inc.
|
||||
* 51 Franklin Street, Fifth Floor
|
||||
* Boston, MA 02110-1301 USA
|
||||
*/
|
||||
package org.hibernate.envers.test.integration.proxy;
|
||||
|
||||
import javax.persistence.EntityManager;
|
||||
|
||||
import org.hibernate.ejb.Ejb3Configuration;
|
||||
import org.hibernate.envers.test.AbstractEntityTest;
|
||||
import org.hibernate.envers.test.entities.onetomany.ListRefEdEntity;
|
||||
import org.hibernate.envers.test.entities.onetomany.ListRefIngEntity;
|
||||
import org.hibernate.proxy.HibernateProxy;
|
||||
import org.testng.annotations.BeforeClass;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
/**
|
||||
* Test case for HHH-5750: Proxied objects lose the temporary session used to
|
||||
* initialize them.
|
||||
*
|
||||
* @author Erik-Berndt Scheper
|
||||
*
|
||||
*/
|
||||
public class AuditedCollectionProxyTest extends AbstractEntityTest {
|
||||
|
||||
Integer id_ListRefEdEntity1;
|
||||
|
||||
public void configure(Ejb3Configuration cfg) {
|
||||
cfg.addAnnotatedClass(ListRefEdEntity.class);
|
||||
cfg.addAnnotatedClass(ListRefIngEntity.class);
|
||||
}
|
||||
|
||||
@BeforeClass(dependsOnMethods = "init")
|
||||
public void initData() {
|
||||
EntityManager em = getEntityManager();
|
||||
|
||||
ListRefEdEntity listReferencedEntity1 = new ListRefEdEntity(
|
||||
Integer.valueOf(1), "str1");
|
||||
ListRefIngEntity refingEntity1 = new ListRefIngEntity(
|
||||
Integer.valueOf(1), "refing1", listReferencedEntity1);
|
||||
|
||||
// Revision 1
|
||||
em.getTransaction().begin();
|
||||
em.persist(listReferencedEntity1);
|
||||
em.persist(refingEntity1);
|
||||
em.getTransaction().commit();
|
||||
|
||||
id_ListRefEdEntity1 = listReferencedEntity1.getId();
|
||||
|
||||
// Revision 2
|
||||
ListRefIngEntity refingEntity2 = new ListRefIngEntity(
|
||||
Integer.valueOf(2), "refing2", listReferencedEntity1);
|
||||
|
||||
em.getTransaction().begin();
|
||||
em.persist(refingEntity2);
|
||||
em.getTransaction().commit();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testProxyIdentifier() {
|
||||
EntityManager em = getEntityManager();
|
||||
|
||||
ListRefEdEntity listReferencedEntity1 = em.getReference(
|
||||
ListRefEdEntity.class, id_ListRefEdEntity1);
|
||||
|
||||
assert listReferencedEntity1 instanceof HibernateProxy;
|
||||
|
||||
// Revision 3
|
||||
ListRefIngEntity refingEntity3 = new ListRefIngEntity(
|
||||
Integer.valueOf(3), "refing3", listReferencedEntity1);
|
||||
|
||||
em.getTransaction().begin();
|
||||
em.persist(refingEntity3);
|
||||
em.getTransaction().commit();
|
||||
|
||||
listReferencedEntity1.getReffering().size();
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -1,5 +1,5 @@
|
|||
dependencies {
|
||||
infinispanVersion = '4.2.0.ALPHA3'
|
||||
infinispanVersion = '4.2.0.CR4'
|
||||
jnpVersion = '5.0.3.GA'
|
||||
|
||||
compile(project(':hibernate-core'))
|
||||
|
|
|
@ -169,7 +169,7 @@ public abstract class BaseRegion implements Region {
|
|||
}
|
||||
|
||||
public boolean checkValid() {
|
||||
boolean valid = invalidateState.get() == InvalidateState.VALID;
|
||||
boolean valid = isValid();
|
||||
if (!valid) {
|
||||
synchronized (invalidationMutex) {
|
||||
if (invalidateState.compareAndSet(InvalidateState.INVALID, InvalidateState.CLEARING)) {
|
||||
|
@ -188,26 +188,34 @@ public abstract class BaseRegion implements Region {
|
|||
}
|
||||
}
|
||||
}
|
||||
valid = invalidateState.get() == InvalidateState.VALID;
|
||||
valid = isValid();
|
||||
}
|
||||
|
||||
return valid;
|
||||
}
|
||||
|
||||
protected boolean isValid() {
|
||||
return invalidateState.get() == InvalidateState.VALID;
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs a Infinispan <code>get(Fqn, Object)</code>
|
||||
*
|
||||
* @param key The key of the item to get
|
||||
* @param opt any option to add to the get invocation. May be <code>null</code>
|
||||
* @param suppressTimeout should any TimeoutException be suppressed?
|
||||
* @param flagAdapters flags to add to the get invocation
|
||||
* @return The retrieved object
|
||||
* @throws CacheException issue managing transaction or talking to cache
|
||||
* @throws CacheException issue managing transaction or talking to cache
|
||||
*/
|
||||
protected Object get(Object key, FlagAdapter opt, boolean suppressTimeout) throws CacheException {
|
||||
protected Object get(Object key, boolean suppressTimeout, FlagAdapter... flagAdapters) throws CacheException {
|
||||
CacheAdapter localCacheAdapter = cacheAdapter;
|
||||
if (flagAdapters != null && flagAdapters.length > 0)
|
||||
localCacheAdapter = cacheAdapter.withFlags(flagAdapters);
|
||||
|
||||
if (suppressTimeout)
|
||||
return cacheAdapter.getAllowingTimeout(key);
|
||||
return localCacheAdapter.getAllowingTimeout(key);
|
||||
else
|
||||
return cacheAdapter.get(key);
|
||||
return localCacheAdapter.get(key);
|
||||
}
|
||||
|
||||
public Object getOwnerForPut() {
|
||||
|
@ -295,4 +303,4 @@ public abstract class BaseRegion implements Region {
|
|||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -47,6 +47,13 @@ public class QueryResultsRegionImpl extends BaseTransactionalDataRegion implemen
|
|||
}
|
||||
|
||||
public Object get(Object key) throws CacheException {
|
||||
// If the region is not valid, skip cache store to avoid going remote to retrieve the query.
|
||||
// The aim of this is to maintain same logic/semantics as when state transfer was configured.
|
||||
// TODO: Once https://issues.jboss.org/browse/ISPN-835 has been resolved, revert to state transfer and remove workaround
|
||||
boolean skipCacheStore = false;
|
||||
if (!isValid())
|
||||
skipCacheStore = true;
|
||||
|
||||
if (!checkValid())
|
||||
return null;
|
||||
|
||||
|
@ -55,7 +62,10 @@ public class QueryResultsRegionImpl extends BaseTransactionalDataRegion implemen
|
|||
// to avoid holding locks that would prevent updates.
|
||||
// Add a zero (or low) timeout option so we don't block
|
||||
// waiting for tx's that did a put to commit
|
||||
return get(key, FlagAdapter.ZERO_LOCK_ACQUISITION_TIMEOUT, true);
|
||||
if (skipCacheStore)
|
||||
return get(key, true, FlagAdapter.ZERO_LOCK_ACQUISITION_TIMEOUT, FlagAdapter.SKIP_CACHE_STORE);
|
||||
else
|
||||
return get(key, true, FlagAdapter.ZERO_LOCK_ACQUISITION_TIMEOUT);
|
||||
}
|
||||
|
||||
public void put(Object key, Object value) throws CacheException {
|
||||
|
@ -83,5 +93,4 @@ public class QueryResultsRegionImpl extends BaseTransactionalDataRegion implemen
|
|||
.putAllowingTimeout(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -57,8 +57,20 @@ public class TimestampsRegionImpl extends BaseGeneralDataRegion implements Times
|
|||
|
||||
public Object get(Object key) throws CacheException {
|
||||
Object value = localCache.get(key);
|
||||
|
||||
// If the region is not valid, skip cache store to avoid going remote to retrieve the query.
|
||||
// The aim of this is to maintain same logic/semantics as when state transfer was configured.
|
||||
// TODO: Once https://issues.jboss.org/browse/ISPN-835 has been resolved, revert to state transfer and remove workaround
|
||||
boolean skipCacheStore = false;
|
||||
if (!isValid())
|
||||
skipCacheStore = true;
|
||||
|
||||
if (value == null && checkValid()) {
|
||||
value = get(key, null, false);
|
||||
if (skipCacheStore)
|
||||
value = get(key, false, FlagAdapter.SKIP_CACHE_STORE);
|
||||
else
|
||||
value = get(key, false);
|
||||
|
||||
if (value != null)
|
||||
localCache.put(key, value);
|
||||
}
|
||||
|
|
|
@ -50,12 +50,12 @@ public class CacheHelper {
|
|||
|
||||
public static void initInternalEvict(CacheAdapter cacheAdapter, AddressAdapter member) {
|
||||
EvictAll eKey = new EvictAll(member == null ? NoAddress.INSTANCE : member);
|
||||
cacheAdapter.withFlags(FlagAdapter.CACHE_MODE_LOCAL).put(eKey, Internal.INIT);
|
||||
cacheAdapter.withFlags(FlagAdapter.CACHE_MODE_LOCAL, FlagAdapter.SKIP_CACHE_LOAD).put(eKey, Internal.INIT);
|
||||
}
|
||||
|
||||
public static void sendEvictAllNotification(CacheAdapter cacheAdapter, AddressAdapter member) {
|
||||
EvictAll eKey = new EvictAll(member == null ? NoAddress.INSTANCE : member);
|
||||
cacheAdapter.put(eKey, Internal.EVICT);
|
||||
cacheAdapter.withFlags(FlagAdapter.SKIP_CACHE_LOAD).put(eKey, Internal.EVICT);
|
||||
}
|
||||
|
||||
public static boolean isEvictAllNotification(Object key) {
|
||||
|
|
|
@ -35,7 +35,9 @@ public enum FlagAdapter {
|
|||
ZERO_LOCK_ACQUISITION_TIMEOUT,
|
||||
CACHE_MODE_LOCAL,
|
||||
FORCE_ASYNCHRONOUS,
|
||||
FORCE_SYNCHRONOUS;
|
||||
FORCE_SYNCHRONOUS,
|
||||
SKIP_CACHE_STORE,
|
||||
SKIP_CACHE_LOAD;
|
||||
|
||||
Flag toFlag() {
|
||||
switch(this) {
|
||||
|
@ -47,6 +49,10 @@ public enum FlagAdapter {
|
|||
return Flag.FORCE_ASYNCHRONOUS;
|
||||
case FORCE_SYNCHRONOUS:
|
||||
return Flag.FORCE_SYNCHRONOUS;
|
||||
case SKIP_CACHE_STORE:
|
||||
return Flag.SKIP_CACHE_STORE;
|
||||
case SKIP_CACHE_LOAD:
|
||||
return Flag.SKIP_CACHE_LOAD;
|
||||
default:
|
||||
throw new CacheException("Unmatched Infinispan flag " + this);
|
||||
}
|
||||
|
|
|
@ -82,7 +82,6 @@
|
|||
<!-- A query cache that replicates queries. Replication is asynchronous. -->
|
||||
<namedCache name="replicated-query">
|
||||
<clustering mode="replication">
|
||||
<stateRetrieval fetchInMemoryState="false"/>
|
||||
<async/>
|
||||
</clustering>
|
||||
<locking isolationLevel="READ_COMMITTED" concurrencyLevel="1000"
|
||||
|
@ -91,6 +90,16 @@
|
|||
the eviction thread will never run. A separate executor is used for eviction in each cache. -->
|
||||
<eviction wakeUpInterval="5000" maxEntries="10000" strategy="LRU"/>
|
||||
<expiration maxIdle="100000"/>
|
||||
<!-- State transfer forces all replication calls to be synchronous,
|
||||
so for calls to remain async, use a cluster cache loader instead -->
|
||||
<loaders passivation="false" shared="false" preload="false">
|
||||
<loader class="org.infinispan.loaders.cluster.ClusterCacheLoader" fetchPersistentState="false"
|
||||
ignoreModifications="false" purgeOnStartup="false">
|
||||
<properties>
|
||||
<property name="remoteCallTimeout" value="20000"/>
|
||||
</properties>
|
||||
</loader>
|
||||
</loaders>
|
||||
</namedCache>
|
||||
|
||||
<!-- Optimized for timestamp caching. A clustered timestamp cache
|
||||
|
@ -98,7 +107,6 @@
|
|||
itself is configured with CacheMode=LOCAL. -->
|
||||
<namedCache name="timestamps">
|
||||
<clustering mode="replication">
|
||||
<stateRetrieval fetchInMemoryState="true" timeout="20000"/>
|
||||
<async/>
|
||||
</clustering>
|
||||
<locking isolationLevel="READ_COMMITTED" concurrencyLevel="1000"
|
||||
|
@ -106,6 +114,16 @@
|
|||
<lazyDeserialization enabled="true"/>
|
||||
<!-- Don't ever evict modification timestamps -->
|
||||
<eviction wakeUpInterval="0" strategy="NONE"/>
|
||||
<!-- State transfer forces all replication calls to be synchronous,
|
||||
so for calls to remain async, use a cluster cache loader instead -->
|
||||
<loaders passivation="false" shared="false" preload="false">
|
||||
<loader class="org.infinispan.loaders.cluster.ClusterCacheLoader" fetchPersistentState="false"
|
||||
ignoreModifications="false" purgeOnStartup="false">
|
||||
<properties>
|
||||
<property name="remoteCallTimeout" value="20000"/>
|
||||
</properties>
|
||||
</loader>
|
||||
</loaders>
|
||||
</namedCache>
|
||||
|
||||
</infinispan>
|
Loading…
Reference in New Issue