HHH-9803 - Checkstyle fix ups
This commit is contained in:
parent
bb3998bf34
commit
611f8a0e1c
|
@ -29,5 +29,5 @@ package org.hibernate.boot.jaxb.hbm.spi;
|
|||
* @author Steve Ebersole
|
||||
*/
|
||||
public interface SubEntityInfo extends EntityInfo {
|
||||
String getExtends();
|
||||
String getExtends();
|
||||
}
|
||||
|
|
|
@ -50,8 +50,8 @@ import org.hibernate.exception.spi.SQLExceptionConversionDelegate;
|
|||
import org.hibernate.exception.spi.TemplatedViolatedConstraintNameExtracter;
|
||||
import org.hibernate.exception.spi.ViolatedConstraintNameExtracter;
|
||||
import org.hibernate.hql.spi.id.IdTableSupportStandardImpl;
|
||||
import org.hibernate.hql.spi.id.global.GlobalTemporaryTableBulkIdStrategy;
|
||||
import org.hibernate.hql.spi.id.MultiTableBulkIdStrategy;
|
||||
import org.hibernate.hql.spi.id.global.GlobalTemporaryTableBulkIdStrategy;
|
||||
import org.hibernate.hql.spi.id.local.AfterUseAction;
|
||||
import org.hibernate.internal.util.JdbcExceptionHelper;
|
||||
import org.hibernate.procedure.internal.StandardCallableStatementSupport;
|
||||
|
@ -78,8 +78,7 @@ public class Oracle8iDialect extends Dialect {
|
|||
final boolean hasOffset = LimitHelper.hasFirstRow( selection );
|
||||
sql = sql.trim();
|
||||
boolean isForUpdate = false;
|
||||
if (sql.toLowerCase(Locale.ROOT
|
||||
).endsWith( " for update" )) {
|
||||
if (sql.toLowerCase(Locale.ROOT).endsWith( " for update" )) {
|
||||
sql = sql.substring( 0, sql.length() - 11 );
|
||||
isForUpdate = true;
|
||||
}
|
||||
|
|
|
@ -35,7 +35,6 @@ import org.hibernate.internal.util.StringHelper;
|
|||
import org.hibernate.persister.collection.QueryableCollection;
|
||||
import org.hibernate.persister.entity.EntityPersister;
|
||||
import org.hibernate.persister.entity.Queryable;
|
||||
import org.hibernate.sql.JoinFragment;
|
||||
import org.hibernate.sql.JoinType;
|
||||
import org.hibernate.type.CollectionType;
|
||||
import org.hibernate.type.EntityType;
|
||||
|
@ -61,8 +60,8 @@ public class DotNode extends FromReferenceNode implements DisplayableNode, Selec
|
|||
public static boolean useThetaStyleImplicitJoins;
|
||||
public static boolean regressionStyleJoinSuppression;
|
||||
|
||||
public static interface IllegalCollectionDereferenceExceptionBuilder {
|
||||
public QueryException buildIllegalCollectionDereferenceException(
|
||||
public interface IllegalCollectionDereferenceExceptionBuilder {
|
||||
QueryException buildIllegalCollectionDereferenceException(
|
||||
String collectionPropertyName,
|
||||
FromReferenceNode lhs);
|
||||
}
|
||||
|
@ -126,7 +125,7 @@ public class DotNode extends FromReferenceNode implements DisplayableNode, Selec
|
|||
*
|
||||
* @param joinType The type of join to use.
|
||||
*
|
||||
* @see JoinFragment
|
||||
* @see org.hibernate.sql.JoinFragment
|
||||
*/
|
||||
public void setJoinType(JoinType joinType) {
|
||||
this.joinType = joinType;
|
||||
|
|
|
@ -90,7 +90,9 @@ public class PreprocessingParser implements Parser {
|
|||
}
|
||||
|
||||
//ignore whitespace
|
||||
if ( ParserHelper.isWhitespace( token ) ) return;
|
||||
if ( ParserHelper.isWhitespace( token ) ) {
|
||||
return;
|
||||
}
|
||||
|
||||
//do replacements
|
||||
String substoken = ( String ) replacements.get( token );
|
||||
|
|
|
@ -25,7 +25,6 @@ package org.hibernate.internal;
|
|||
|
||||
import java.io.Serializable;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
|
|
|
@ -68,7 +68,6 @@ import org.hibernate.ReplicationMode;
|
|||
import org.hibernate.SQLQuery;
|
||||
import org.hibernate.ScrollMode;
|
||||
import org.hibernate.ScrollableResults;
|
||||
import org.hibernate.Session;
|
||||
import org.hibernate.SessionBuilder;
|
||||
import org.hibernate.SessionEventListener;
|
||||
import org.hibernate.SessionException;
|
||||
|
@ -169,7 +168,7 @@ import org.hibernate.stat.internal.SessionStatisticsImpl;
|
|||
* Concrete implementation of a Session.
|
||||
* <p/>
|
||||
* Exposes two interfaces:<ul>
|
||||
* <li>{@link Session} to the application</li>
|
||||
* <li>{@link org.hibernate.Session} to the application</li>
|
||||
* <li>{@link org.hibernate.engine.spi.SessionImplementor} to other Hibernate components (SPI)</li>
|
||||
* </ul>
|
||||
* <p/>
|
||||
|
|
|
@ -1504,9 +1504,7 @@ public class BoundedConcurrentHashMap<K, V> extends AbstractMap<K, V>
|
|||
// Reuse trailing consecutive sequence at same slot
|
||||
HashEntry<K, V> lastRun = e;
|
||||
int lastIdx = idx;
|
||||
for ( HashEntry<K, V> last = next;
|
||||
last != null;
|
||||
last = last.next ) {
|
||||
for ( HashEntry<K, V> last = next; last != null; last = last.next ) {
|
||||
int k = last.hash & sizeMask;
|
||||
if ( k != lastIdx ) {
|
||||
lastIdx = k;
|
||||
|
|
|
@ -42,7 +42,6 @@ import java.util.AbstractSet;
|
|||
import java.util.Collection;
|
||||
import java.util.EnumSet;
|
||||
import java.util.Enumeration;
|
||||
import java.util.IdentityHashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
import java.util.NoSuchElementException;
|
||||
|
@ -181,7 +180,7 @@ public class ConcurrentReferenceHashMap<K, V> extends AbstractMap<K, V>
|
|||
public static enum Option {
|
||||
/**
|
||||
* Indicates that referential-equality (== instead of .equals()) should
|
||||
* be used when locating keys. This offers similar behavior to {@link IdentityHashMap}
|
||||
* be used when locating keys. This offers similar behavior to {@link java.util.IdentityHashMap}
|
||||
*/
|
||||
IDENTITY_COMPARISONS
|
||||
}
|
||||
|
@ -814,9 +813,7 @@ public class ConcurrentReferenceHashMap<K, V> extends AbstractMap<K, V>
|
|||
// Reuse trailing consecutive sequence at same slot
|
||||
HashEntry<K, V> lastRun = e;
|
||||
int lastIdx = idx;
|
||||
for ( HashEntry<K, V> last = next;
|
||||
last != null;
|
||||
last = last.next ) {
|
||||
for ( HashEntry<K, V> last = next; last != null; last = last.next ) {
|
||||
int k = last.hash & sizeMask;
|
||||
if ( k != lastIdx ) {
|
||||
lastIdx = k;
|
||||
|
@ -1674,8 +1671,7 @@ public class ConcurrentReferenceHashMap<K, V> extends AbstractMap<K, V>
|
|||
/*
|
||||
* This class is needed for JDK5 compatibility.
|
||||
*/
|
||||
static class SimpleEntry<K, V> implements Entry<K, V>,
|
||||
java.io.Serializable {
|
||||
static class SimpleEntry<K, V> implements Entry<K, V>, java.io.Serializable {
|
||||
private static final long serialVersionUID = -8499721149061103585L;
|
||||
|
||||
private final K key;
|
||||
|
|
|
@ -29,16 +29,15 @@ import java.util.Iterator;
|
|||
/**
|
||||
* @author Gavin King
|
||||
*/
|
||||
public final class SingletonIterator implements Iterator {
|
||||
|
||||
private Object value;
|
||||
public final class SingletonIterator<T> implements Iterator<T> {
|
||||
private T value;
|
||||
private boolean hasNext = true;
|
||||
|
||||
public boolean hasNext() {
|
||||
return hasNext;
|
||||
}
|
||||
|
||||
public Object next() {
|
||||
public T next() {
|
||||
if (hasNext) {
|
||||
hasNext = false;
|
||||
return value;
|
||||
|
@ -52,7 +51,7 @@ public final class SingletonIterator implements Iterator {
|
|||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
public SingletonIterator(Object value) {
|
||||
public SingletonIterator(T value) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
|
|
|
@ -97,8 +97,6 @@ import org.hibernate.type.EntityType;
|
|||
import org.hibernate.type.Type;
|
||||
import org.hibernate.type.VersionType;
|
||||
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
/**
|
||||
* Abstract superclass of object loading (and querying) strategies. This class implements
|
||||
* useful common functionality that concrete loaders delegate to. It is not intended that this
|
||||
|
@ -250,13 +248,13 @@ public abstract class Loader {
|
|||
Dialect dialect,
|
||||
List<AfterLoadAction> afterLoadActions) throws HibernateException {
|
||||
sql = applyLocks( sql, parameters, dialect, afterLoadActions );
|
||||
|
||||
|
||||
// Keep this here, rather than moving to Select. Some Dialects may need the hint to be appended to the very
|
||||
// end or beginning of the finalized SQL statement, so wait until everything is processed.
|
||||
if ( parameters.getQueryHints() != null && parameters.getQueryHints().size() > 0 ) {
|
||||
sql = dialect.getQueryHintString( sql, parameters.getQueryHints() );
|
||||
}
|
||||
|
||||
|
||||
return getFactory().getSessionFactoryOptions().isCommentsEnabled()
|
||||
? prependComment( sql, parameters )
|
||||
: sql;
|
||||
|
@ -278,7 +276,10 @@ public abstract class Loader {
|
|||
new AfterLoadAction() {
|
||||
@Override
|
||||
public void afterLoad(SessionImplementor session, Object entity, Loadable persister) {
|
||||
( (Session) session ).buildLockRequest( lockOptions ).lock( persister.getEntityName(), entity );
|
||||
( (Session) session ).buildLockRequest( lockOptions ).lock(
|
||||
persister.getEntityName(),
|
||||
entity
|
||||
);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
@ -370,7 +371,9 @@ public abstract class Loader {
|
|||
* @param session The session from which the request originated.
|
||||
* @param queryParameters The query parameters specified by the user.
|
||||
* @param returnProxies Should proxies be generated
|
||||
*
|
||||
* @return The loaded "row".
|
||||
*
|
||||
* @throws HibernateException
|
||||
*/
|
||||
public Object loadSingleRow(
|
||||
|
@ -394,14 +397,14 @@ public abstract class Loader {
|
|||
hydratedObjects,
|
||||
new EntityKey[entitySpan],
|
||||
returnProxies
|
||||
);
|
||||
);
|
||||
}
|
||||
catch ( SQLException sqle ) {
|
||||
catch (SQLException sqle) {
|
||||
throw factory.getSQLExceptionHelper().convert(
|
||||
sqle,
|
||||
"could not read next row of results",
|
||||
getSQLString()
|
||||
);
|
||||
);
|
||||
}
|
||||
|
||||
initializeEntitiesAndCollections(
|
||||
|
@ -440,12 +443,13 @@ public abstract class Loader {
|
|||
loadedKeys,
|
||||
returnProxies
|
||||
);
|
||||
if ( ! keyToRead.equals( loadedKeys[0] ) ) {
|
||||
if ( !keyToRead.equals( loadedKeys[0] ) ) {
|
||||
throw new AssertionFailure(
|
||||
String.format(
|
||||
"Unexpected key read for row; expected [%s]; actual [%s]",
|
||||
keyToRead,
|
||||
loadedKeys[0] )
|
||||
loadedKeys[0]
|
||||
)
|
||||
);
|
||||
}
|
||||
if ( result == null ) {
|
||||
|
@ -455,12 +459,12 @@ public abstract class Loader {
|
|||
while ( resultSet.next() &&
|
||||
isCurrentRowForSameEntity( keyToRead, 0, resultSet, session ) );
|
||||
}
|
||||
catch ( SQLException sqle ) {
|
||||
catch (SQLException sqle) {
|
||||
throw factory.getSQLExceptionHelper().convert(
|
||||
sqle,
|
||||
"could not doAfterTransactionCompletion sequential read of results (forward)",
|
||||
getSQLString()
|
||||
);
|
||||
);
|
||||
}
|
||||
|
||||
initializeEntitiesAndCollections(
|
||||
|
@ -494,7 +498,9 @@ public abstract class Loader {
|
|||
* @param session The session from which the request originated.
|
||||
* @param queryParameters The query parameters specified by the user.
|
||||
* @param returnProxies Should proxies be generated
|
||||
*
|
||||
* @return The loaded "row".
|
||||
*
|
||||
* @throws HibernateException
|
||||
*/
|
||||
public Object loadSequentialRowsForward(
|
||||
|
@ -528,16 +534,16 @@ public abstract class Loader {
|
|||
null,
|
||||
resultSet,
|
||||
session
|
||||
);
|
||||
);
|
||||
|
||||
return sequentialLoad( resultSet, session, queryParameters, returnProxies, currentKey );
|
||||
}
|
||||
catch ( SQLException sqle ) {
|
||||
catch (SQLException sqle) {
|
||||
throw factory.getSQLExceptionHelper().convert(
|
||||
sqle,
|
||||
"could not perform sequential read of results (forward)",
|
||||
getSQLString()
|
||||
);
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -551,7 +557,9 @@ public abstract class Loader {
|
|||
* @param session The session from which the request originated.
|
||||
* @param queryParameters The query parameters specified by the user.
|
||||
* @param returnProxies Should proxies be generated
|
||||
*
|
||||
* @return The loaded "row".
|
||||
*
|
||||
* @throws HibernateException
|
||||
*/
|
||||
public Object loadSequentialRowsReverse(
|
||||
|
@ -592,7 +600,7 @@ public abstract class Loader {
|
|||
null,
|
||||
resultSet,
|
||||
session
|
||||
);
|
||||
);
|
||||
}
|
||||
else {
|
||||
// Since the result set cursor is always left at the first
|
||||
|
@ -612,7 +620,7 @@ public abstract class Loader {
|
|||
null,
|
||||
resultSet,
|
||||
session
|
||||
);
|
||||
);
|
||||
while ( resultSet.previous() ) {
|
||||
EntityKey checkKey = getKeyFromResultSet(
|
||||
0,
|
||||
|
@ -620,7 +628,7 @@ public abstract class Loader {
|
|||
null,
|
||||
resultSet,
|
||||
session
|
||||
);
|
||||
);
|
||||
|
||||
if ( firstPass ) {
|
||||
firstPass = false;
|
||||
|
@ -643,7 +651,7 @@ public abstract class Loader {
|
|||
null,
|
||||
resultSet,
|
||||
session
|
||||
);
|
||||
);
|
||||
|
||||
if ( !keyToRead.equals( checkKey ) ) {
|
||||
break;
|
||||
|
@ -657,12 +665,12 @@ public abstract class Loader {
|
|||
// and doAfterTransactionCompletion the load
|
||||
return sequentialLoad( resultSet, session, queryParameters, returnProxies, keyToRead );
|
||||
}
|
||||
catch ( SQLException sqle ) {
|
||||
catch (SQLException sqle) {
|
||||
throw factory.getSQLExceptionHelper().convert(
|
||||
sqle,
|
||||
"could not doAfterTransactionCompletion sequential read of results (forward)",
|
||||
getSQLString()
|
||||
);
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -672,7 +680,12 @@ public abstract class Loader {
|
|||
final String optionalEntityName = queryParameters.getOptionalEntityName();
|
||||
|
||||
if ( optionalObject != null && optionalEntityName != null ) {
|
||||
return session.generateEntityKey( optionalId, session.getEntityPersister( optionalEntityName, optionalObject ) );
|
||||
return session.generateEntityKey(
|
||||
optionalId, session.getEntityPersister(
|
||||
optionalEntityName,
|
||||
optionalObject
|
||||
)
|
||||
);
|
||||
}
|
||||
else {
|
||||
return null;
|
||||
|
@ -714,7 +727,15 @@ public abstract class Loader {
|
|||
ResultTransformer forcedResultTransformer) throws SQLException, HibernateException {
|
||||
final Loadable[] persisters = getEntityPersisters();
|
||||
final int entitySpan = persisters.length;
|
||||
extractKeysFromResultSet( persisters, queryParameters, resultSet, session, keys, lockModesArray, hydratedObjects );
|
||||
extractKeysFromResultSet(
|
||||
persisters,
|
||||
queryParameters,
|
||||
resultSet,
|
||||
session,
|
||||
keys,
|
||||
lockModesArray,
|
||||
hydratedObjects
|
||||
);
|
||||
|
||||
registerNonExists( keys, persisters, session );
|
||||
|
||||
|
@ -739,7 +760,7 @@ public abstract class Loader {
|
|||
Object proxy = session.getPersistenceContext().proxyFor( persisters[i], keys[i], entity );
|
||||
if ( entity != proxy ) {
|
||||
// force the proxy to resolve itself
|
||||
( (HibernateProxy) proxy ).getHibernateLazyInitializer().setImplementation(entity);
|
||||
( (HibernateProxy) proxy ).getHibernateLazyInitializer().setImplementation( entity );
|
||||
row[i] = proxy;
|
||||
}
|
||||
}
|
||||
|
@ -749,8 +770,11 @@ public abstract class Loader {
|
|||
|
||||
return forcedResultTransformer == null
|
||||
? getResultColumnOrRow( row, queryParameters.getResultTransformer(), resultSet, session )
|
||||
: forcedResultTransformer.transformTuple( getResultRow( row, resultSet, session ), getResultRowAliases() )
|
||||
;
|
||||
: forcedResultTransformer.transformTuple(
|
||||
getResultRow( row, resultSet, session ),
|
||||
getResultRowAliases()
|
||||
)
|
||||
;
|
||||
}
|
||||
|
||||
protected void extractKeysFromResultSet(
|
||||
|
@ -766,7 +790,7 @@ public abstract class Loader {
|
|||
final int numberOfPersistersToProcess;
|
||||
final Serializable optionalId = queryParameters.getOptionalId();
|
||||
if ( isSingleRowLoader() && optionalId != null ) {
|
||||
keys[ entitySpan - 1 ] = session.generateEntityKey( optionalId, persisters[ entitySpan - 1 ] );
|
||||
keys[entitySpan - 1] = session.generateEntityKey( optionalId, persisters[entitySpan - 1] );
|
||||
// skip the last persister below...
|
||||
numberOfPersistersToProcess = entitySpan - 1;
|
||||
}
|
||||
|
@ -778,7 +802,12 @@ public abstract class Loader {
|
|||
|
||||
for ( int i = 0; i < numberOfPersistersToProcess; i++ ) {
|
||||
final Type idType = persisters[i].getIdentifierType();
|
||||
hydratedKeyState[i] = idType.hydrate( resultSet, getEntityAliases()[i].getSuffixedKeyAliases(), session, null );
|
||||
hydratedKeyState[i] = idType.hydrate(
|
||||
resultSet,
|
||||
getEntityAliases()[i].getSuffixedKeyAliases(),
|
||||
session,
|
||||
null
|
||||
);
|
||||
}
|
||||
|
||||
for ( int i = 0; i < numberOfPersistersToProcess; i++ ) {
|
||||
|
@ -854,15 +883,15 @@ public abstract class Loader {
|
|||
final CollectionAliases[] descriptors = getCollectionAliases();
|
||||
final int[] collectionOwners = getCollectionOwners();
|
||||
|
||||
for ( int i=0; i<collectionPersisters.length; i++ ) {
|
||||
for ( int i = 0; i < collectionPersisters.length; i++ ) {
|
||||
|
||||
final boolean hasCollectionOwners = collectionOwners !=null &&
|
||||
final boolean hasCollectionOwners = collectionOwners != null &&
|
||||
collectionOwners[i] > -1;
|
||||
//true if this is a query and we are loading multiple instances of the same collection role
|
||||
//otherwise this is a CollectionInitializer and we are loading up a single collection or batch
|
||||
|
||||
final Object owner = hasCollectionOwners ?
|
||||
row[ collectionOwners[i] ] :
|
||||
row[collectionOwners[i]] :
|
||||
null; //if null, owner will be retrieved from session
|
||||
|
||||
final CollectionPersister collectionPersister = collectionPersisters[i];
|
||||
|
@ -883,7 +912,7 @@ public abstract class Loader {
|
|||
descriptors[i],
|
||||
resultSet,
|
||||
session
|
||||
);
|
||||
);
|
||||
|
||||
}
|
||||
|
||||
|
@ -914,7 +943,15 @@ public abstract class Loader {
|
|||
// that I could do the control breaking at the means to know when to stop
|
||||
|
||||
try {
|
||||
return processResultSet( rs, queryParameters, session, returnProxies, forcedResultTransformer, maxRows, afterLoadActions );
|
||||
return processResultSet(
|
||||
rs,
|
||||
queryParameters,
|
||||
session,
|
||||
returnProxies,
|
||||
forcedResultTransformer,
|
||||
maxRows,
|
||||
afterLoadActions
|
||||
);
|
||||
}
|
||||
finally {
|
||||
session.getJdbcCoordinator().getResourceRegistry().release( st );
|
||||
|
@ -961,7 +998,7 @@ public abstract class Loader {
|
|||
);
|
||||
results.add( result );
|
||||
if ( createSubselects ) {
|
||||
subselectResultKeys.add(keys);
|
||||
subselectResultKeys.add( keys );
|
||||
keys = new EntityKey[entitySpan]; //can't reuse in this case
|
||||
}
|
||||
}
|
||||
|
@ -995,9 +1032,9 @@ public abstract class Loader {
|
|||
return false;
|
||||
}
|
||||
|
||||
private static Set[] transpose( List keys ) {
|
||||
Set[] result = new Set[ ( ( EntityKey[] ) keys.get(0) ).length ];
|
||||
for ( int j=0; j<result.length; j++ ) {
|
||||
private static Set[] transpose(List keys) {
|
||||
Set[] result = new Set[( (EntityKey[]) keys.get( 0 ) ).length];
|
||||
for ( int j = 0; j < result.length; j++ ) {
|
||||
result[j] = new HashSet( keys.size() );
|
||||
for ( Object key : keys ) {
|
||||
result[j].add( ( (EntityKey[]) key )[j] );
|
||||
|
@ -1009,7 +1046,7 @@ public abstract class Loader {
|
|||
private void createSubselects(List keys, QueryParameters queryParameters, SessionImplementor session) {
|
||||
if ( keys.size() > 1 ) { //if we only returned one entity, query by key is more efficient
|
||||
|
||||
Set[] keySets = transpose(keys);
|
||||
Set[] keySets = transpose( keys );
|
||||
|
||||
Map namedParameterLocMap = buildNamedParameterLocMap( queryParameters );
|
||||
|
||||
|
@ -1042,12 +1079,12 @@ public abstract class Loader {
|
|||
}
|
||||
|
||||
private Map buildNamedParameterLocMap(QueryParameters queryParameters) {
|
||||
if ( queryParameters.getNamedParameters()!=null ) {
|
||||
if ( queryParameters.getNamedParameters() != null ) {
|
||||
final Map namedParameterLocMap = new HashMap();
|
||||
for(String name : queryParameters.getNamedParameters().keySet()){
|
||||
for ( String name : queryParameters.getNamedParameters().keySet() ) {
|
||||
namedParameterLocMap.put(
|
||||
name,
|
||||
getNamedParameterLocs(name)
|
||||
getNamedParameterLocs( name )
|
||||
);
|
||||
}
|
||||
return namedParameterLocMap;
|
||||
|
@ -1104,7 +1141,7 @@ public abstract class Loader {
|
|||
post = null;
|
||||
}
|
||||
|
||||
if ( hydratedObjects!=null ) {
|
||||
if ( hydratedObjects != null ) {
|
||||
int hydratedObjectsSize = hydratedObjects.size();
|
||||
LOG.tracev( "Total objects hydrated: {0}", hydratedObjectsSize );
|
||||
for ( Object hydratedObject : hydratedObjects ) {
|
||||
|
@ -1123,7 +1160,7 @@ public abstract class Loader {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Until this entire method is refactored w/ polymorphism, postLoad was
|
||||
// split off from initializeEntity. It *must* occur after
|
||||
// endCollectionLoad to ensure the collection is in the
|
||||
|
@ -1136,7 +1173,9 @@ public abstract class Loader {
|
|||
final EntityEntry entityEntry = session.getPersistenceContext().getEntry( hydratedObject );
|
||||
if ( entityEntry == null ) {
|
||||
// big problem
|
||||
throw new HibernateException( "Could not locate EntityEntry immediately after two-phase load" );
|
||||
throw new HibernateException(
|
||||
"Could not locate EntityEntry immediately after two-phase load"
|
||||
);
|
||||
}
|
||||
afterLoadAction.afterLoad( session, hydratedObject, (Loadable) entityEntry.getPersister() );
|
||||
}
|
||||
|
@ -1152,7 +1191,7 @@ public abstract class Loader {
|
|||
//this is a query and we are loading multiple instances of the same collection role
|
||||
session.getPersistenceContext()
|
||||
.getLoadContexts()
|
||||
.getCollectionLoadContext( ( ResultSet ) resultSetId )
|
||||
.getCollectionLoadContext( (ResultSet) resultSetId )
|
||||
.endLoadingCollections( collectionPersister );
|
||||
}
|
||||
|
||||
|
@ -1161,6 +1200,7 @@ public abstract class Loader {
|
|||
* transform query results.
|
||||
*
|
||||
* @param resultTransformer the specified result transformer
|
||||
*
|
||||
* @return the actual result transformer
|
||||
*/
|
||||
protected ResultTransformer resolveResultTransformer(ResultTransformer resultTransformer) {
|
||||
|
@ -1173,6 +1213,7 @@ public abstract class Loader {
|
|||
|
||||
/**
|
||||
* Are rows transformed immediately after being read from the ResultSet?
|
||||
*
|
||||
* @return true, if getResultColumnOrRow() transforms the results; false, otherwise
|
||||
*/
|
||||
protected boolean areResultSetRowsTransformedImmediately() {
|
||||
|
@ -1181,10 +1222,11 @@ public abstract class Loader {
|
|||
|
||||
/**
|
||||
* Returns the aliases that corresponding to a result row.
|
||||
*
|
||||
* @return Returns the aliases that corresponding to a result row.
|
||||
*/
|
||||
protected String[] getResultRowAliases() {
|
||||
return null;
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1252,12 +1294,14 @@ public abstract class Loader {
|
|||
);
|
||||
}
|
||||
else if ( isSpecialOneToOne ) {*/
|
||||
boolean isOneToOneAssociation = ownerAssociationTypes!=null &&
|
||||
ownerAssociationTypes[i]!=null &&
|
||||
boolean isOneToOneAssociation = ownerAssociationTypes != null &&
|
||||
ownerAssociationTypes[i] != null &&
|
||||
ownerAssociationTypes[i].isOneToOne();
|
||||
if ( isOneToOneAssociation ) {
|
||||
persistenceContext.addNullProperty( ownerKey,
|
||||
ownerAssociationTypes[i].getPropertyName() );
|
||||
persistenceContext.addNullProperty(
|
||||
ownerKey,
|
||||
ownerAssociationTypes[i].getPropertyName()
|
||||
);
|
||||
}
|
||||
/*}
|
||||
else {
|
||||
|
@ -1285,7 +1329,7 @@ public abstract class Loader {
|
|||
final CollectionAliases descriptor,
|
||||
final ResultSet rs,
|
||||
final SessionImplementor session)
|
||||
throws HibernateException, SQLException {
|
||||
throws HibernateException, SQLException {
|
||||
|
||||
final PersistenceContext persistenceContext = session.getPersistenceContext();
|
||||
|
||||
|
@ -1293,14 +1337,16 @@ public abstract class Loader {
|
|||
rs,
|
||||
descriptor.getSuffixedKeyAliases(),
|
||||
session
|
||||
);
|
||||
);
|
||||
|
||||
if ( collectionRowKey != null ) {
|
||||
// we found a collection element in the result set
|
||||
|
||||
if ( LOG.isDebugEnabled() ) {
|
||||
LOG.debugf( "Found row of collection: %s",
|
||||
MessageHelper.collectionInfoString( persister, collectionRowKey, getFactory() ) );
|
||||
LOG.debugf(
|
||||
"Found row of collection: %s",
|
||||
MessageHelper.collectionInfoString( persister, collectionRowKey, getFactory() )
|
||||
);
|
||||
}
|
||||
|
||||
Object owner = optionalOwner;
|
||||
|
@ -1330,8 +1376,10 @@ public abstract class Loader {
|
|||
// since what we have is an empty collection
|
||||
|
||||
if ( LOG.isDebugEnabled() ) {
|
||||
LOG.debugf( "Result set contains (possibly empty) collection: %s",
|
||||
MessageHelper.collectionInfoString( persister, optionalKey, getFactory() ) );
|
||||
LOG.debugf(
|
||||
"Result set contains (possibly empty) collection: %s",
|
||||
MessageHelper.collectionInfoString( persister, optionalKey, getFactory() )
|
||||
);
|
||||
}
|
||||
|
||||
persistenceContext.getLoadContexts()
|
||||
|
@ -1360,7 +1408,7 @@ public abstract class Loader {
|
|||
// for each of the passed-in keys, to account for the possibility
|
||||
// that the collection is empty and has no rows in the result set
|
||||
CollectionPersister[] collectionPersisters = getCollectionPersisters();
|
||||
for ( CollectionPersister collectionPersister : collectionPersisters )
|
||||
for ( CollectionPersister collectionPersister : collectionPersisters ) {
|
||||
for ( Serializable key : keys ) {
|
||||
//handle empty collections
|
||||
if ( debugEnabled ) {
|
||||
|
@ -1375,6 +1423,7 @@ public abstract class Loader {
|
|||
.getCollectionLoadContext( (ResultSet) resultSetId )
|
||||
.getLoadingCollection( collectionPersister, key );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// else this is not a collection initializer (and empty collections will
|
||||
|
@ -1447,7 +1496,7 @@ public abstract class Loader {
|
|||
session,
|
||||
null
|
||||
);
|
||||
if ( !versionType.isEqual(version, currentVersion) ) {
|
||||
if ( !versionType.isEqual( version, currentVersion ) ) {
|
||||
if ( session.getFactory().getStatistics().isStatisticsEnabled() ) {
|
||||
session.getFactory().getStatisticsImplementor()
|
||||
.optimisticFailure( persister.getEntityName() );
|
||||
|
@ -1577,7 +1626,7 @@ public abstract class Loader {
|
|||
final Object optionalObject,
|
||||
final List hydratedObjects,
|
||||
final SessionImplementor session)
|
||||
throws HibernateException, SQLException {
|
||||
throws HibernateException, SQLException {
|
||||
final String instanceClass = getInstanceClass(
|
||||
rs,
|
||||
i,
|
||||
|
@ -1639,7 +1688,7 @@ public abstract class Loader {
|
|||
|
||||
private boolean isEagerPropertyFetchEnabled(int i) {
|
||||
boolean[] array = getEntityEagerPropertyFetches();
|
||||
return array!=null && array[i];
|
||||
return array != null && array[i];
|
||||
}
|
||||
|
||||
|
||||
|
@ -1665,10 +1714,16 @@ public abstract class Loader {
|
|||
final Loadable persister = (Loadable) getFactory().getEntityPersister( instanceEntityName );
|
||||
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
LOG.tracev( "Initializing object from ResultSet: {0}", MessageHelper.infoString( persister, id, getFactory() ) );
|
||||
LOG.tracev(
|
||||
"Initializing object from ResultSet: {0}", MessageHelper.infoString(
|
||||
persister,
|
||||
id,
|
||||
getFactory()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
boolean eagerPropertyFetch = isEagerPropertyFetchEnabled(i);
|
||||
boolean eagerPropertyFetch = isEagerPropertyFetchEnabled( i );
|
||||
|
||||
// add temp entry so that the next step is circular-reference
|
||||
// safe - only needed because some types don't take proper
|
||||
|
@ -1685,7 +1740,7 @@ public abstract class Loader {
|
|||
//This is not very nice (and quite slow):
|
||||
final String[][] cols = persister == rootPersister ?
|
||||
getEntityAliases()[i].getSuffixedPropertyAliases() :
|
||||
getEntityAliases()[i].getSuffixedPropertyAliases(persister);
|
||||
getEntityAliases()[i].getSuffixedPropertyAliases( persister );
|
||||
|
||||
final Object[] values = persister.hydrate(
|
||||
rs,
|
||||
|
@ -1697,13 +1752,13 @@ public abstract class Loader {
|
|||
session
|
||||
);
|
||||
|
||||
final Object rowId = persister.hasRowId() ? rs.getObject(rowIdAlias) : null;
|
||||
final Object rowId = persister.hasRowId() ? rs.getObject( rowIdAlias ) : null;
|
||||
|
||||
final AssociationType[] ownerAssociationTypes = getOwnerAssociationTypes();
|
||||
if ( ownerAssociationTypes != null && ownerAssociationTypes[i] != null ) {
|
||||
String ukName = ownerAssociationTypes[i].getRHSUniqueKeyPropertyName();
|
||||
if (ukName!=null) {
|
||||
final int index = ( (UniqueKeyLoadable) persister ).getPropertyIndex(ukName);
|
||||
if ( ukName != null ) {
|
||||
final int index = ( (UniqueKeyLoadable) persister ).getPropertyIndex( ukName );
|
||||
final Type type = persister.getPropertyTypes()[index];
|
||||
|
||||
// polymorphism not really handled completely correctly,
|
||||
|
@ -1740,11 +1795,11 @@ public abstract class Loader {
|
|||
* Determine the concrete class of an instance in the <tt>ResultSet</tt>
|
||||
*/
|
||||
private String getInstanceClass(
|
||||
final ResultSet rs,
|
||||
final int i,
|
||||
final Loadable persister,
|
||||
final Serializable id,
|
||||
final SessionImplementor session) throws HibernateException, SQLException {
|
||||
final ResultSet rs,
|
||||
final int i,
|
||||
final Loadable persister,
|
||||
final Serializable id,
|
||||
final SessionImplementor session) throws HibernateException, SQLException {
|
||||
|
||||
if ( persister.hasSubclasses() ) {
|
||||
|
||||
|
@ -1764,7 +1819,7 @@ public abstract class Loader {
|
|||
"Discriminator: " + discriminatorValue,
|
||||
id,
|
||||
persister.getEntityName()
|
||||
);
|
||||
);
|
||||
}
|
||||
|
||||
return result;
|
||||
|
@ -1800,6 +1855,7 @@ public abstract class Loader {
|
|||
* if dialect does not support LIMIT expression or processed query does not use pagination.
|
||||
*
|
||||
* @param selection Selection criteria.
|
||||
*
|
||||
* @return LIMIT clause delegate.
|
||||
*/
|
||||
protected LimitHandler getLimitHandler(RowSelection selection) {
|
||||
|
@ -1807,7 +1863,11 @@ public abstract class Loader {
|
|||
return LimitHelper.useLimit( limitHandler, selection ) ? limitHandler : NoopLimitHandler.INSTANCE;
|
||||
}
|
||||
|
||||
private ScrollMode getScrollMode(boolean scroll, boolean hasFirstRow, boolean useLimitOffSet, QueryParameters queryParameters) {
|
||||
private ScrollMode getScrollMode(
|
||||
boolean scroll,
|
||||
boolean hasFirstRow,
|
||||
boolean useLimitOffSet,
|
||||
QueryParameters queryParameters) {
|
||||
final boolean canScroll = getFactory().getSessionFactoryOptions().isScrollableResultSetsEnabled();
|
||||
if ( canScroll ) {
|
||||
if ( scroll ) {
|
||||
|
@ -1852,7 +1912,15 @@ public abstract class Loader {
|
|||
sql = preprocessSQL( sql, queryParameters, getFactory().getDialect(), afterLoadActions );
|
||||
|
||||
final PreparedStatement st = prepareQueryStatement( sql, queryParameters, limitHandler, scroll, session );
|
||||
return new SqlStatementWrapper( st, getResultSet( st, queryParameters.getRowSelection(), limitHandler, queryParameters.hasAutoDiscoverScalarTypes(), session ) );
|
||||
return new SqlStatementWrapper(
|
||||
st, getResultSet(
|
||||
st,
|
||||
queryParameters.getRowSelection(),
|
||||
limitHandler,
|
||||
queryParameters.hasAutoDiscoverScalarTypes(),
|
||||
session
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1861,11 +1929,11 @@ public abstract class Loader {
|
|||
* limit parameters.
|
||||
*/
|
||||
protected final PreparedStatement prepareQueryStatement(
|
||||
String sql,
|
||||
final QueryParameters queryParameters,
|
||||
final LimitHandler limitHandler,
|
||||
final boolean scroll,
|
||||
final SessionImplementor session) throws SQLException, HibernateException {
|
||||
String sql,
|
||||
final QueryParameters queryParameters,
|
||||
final LimitHandler limitHandler,
|
||||
final boolean scroll,
|
||||
final SessionImplementor session) throws SQLException, HibernateException {
|
||||
final Dialect dialect = getFactory().getDialect();
|
||||
final RowSelection selection = queryParameters.getRowSelection();
|
||||
final boolean useLimit = LimitHelper.useLimit( limitHandler, selection );
|
||||
|
@ -1873,7 +1941,7 @@ public abstract class Loader {
|
|||
final boolean useLimitOffset = hasFirstRow && useLimit && limitHandler.supportsLimitOffset();
|
||||
final boolean callable = queryParameters.isCallable();
|
||||
final ScrollMode scrollMode = getScrollMode( scroll, hasFirstRow, useLimitOffset, queryParameters );
|
||||
|
||||
|
||||
PreparedStatement st = session.getJdbcCoordinator().getStatementPreparer().prepareQueryStatement(
|
||||
sql,
|
||||
callable,
|
||||
|
@ -1886,8 +1954,8 @@ public abstract class Loader {
|
|||
//TODO: can we limit stored procedures ?!
|
||||
col += limitHandler.bindLimitParametersAtStartOfQuery( selection, st, col );
|
||||
|
||||
if (callable) {
|
||||
col = dialect.registerResultSetOutParameter( (CallableStatement)st, col );
|
||||
if ( callable ) {
|
||||
col = dialect.registerResultSetOutParameter( (CallableStatement) st, col );
|
||||
}
|
||||
|
||||
col += bindParameterValues( st, queryParameters, col, session );
|
||||
|
@ -1923,15 +1991,16 @@ public abstract class Loader {
|
|||
}
|
||||
}
|
||||
|
||||
if ( LOG.isTraceEnabled() )
|
||||
LOG.tracev( "Bound [{0}] parameters total", col );
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
LOG.tracev( "Bound [{0}] parameters total", col );
|
||||
}
|
||||
}
|
||||
catch ( SQLException sqle ) {
|
||||
catch (SQLException sqle) {
|
||||
session.getJdbcCoordinator().getResourceRegistry().release( st );
|
||||
session.getJdbcCoordinator().afterStatementExecution();
|
||||
throw sqle;
|
||||
}
|
||||
catch ( HibernateException he ) {
|
||||
catch (HibernateException he) {
|
||||
session.getJdbcCoordinator().getResourceRegistry().release( st );
|
||||
session.getJdbcCoordinator().afterStatementExecution();
|
||||
throw he;
|
||||
|
@ -1948,7 +2017,9 @@ public abstract class Loader {
|
|||
* @param queryParameters The encapsulation of the parameter values to be bound.
|
||||
* @param startIndex The position from which to start binding parameter values.
|
||||
* @param session The originating session.
|
||||
*
|
||||
* @return The number of JDBC bind positions actually bound during this method execution.
|
||||
*
|
||||
* @throws SQLException Indicates problems performing the binding.
|
||||
*/
|
||||
protected int bindParameterValues(
|
||||
|
@ -1973,7 +2044,9 @@ public abstract class Loader {
|
|||
* @param queryParameters The encapsulation of the parameter values to be bound.
|
||||
* @param startIndex The position from which to start binding parameter values.
|
||||
* @param session The originating session.
|
||||
*
|
||||
* @return The number of JDBC bind positions actually bound during this method execution.
|
||||
*
|
||||
* @throws SQLException Indicates problems performing the binding.
|
||||
* @throws org.hibernate.HibernateException Indicates problems delegating binding to the types.
|
||||
*/
|
||||
|
@ -2006,7 +2079,9 @@ public abstract class Loader {
|
|||
* @param namedParams A map of parameter names to values
|
||||
* @param startIndex The position from which to start binding parameter values.
|
||||
* @param session The originating session.
|
||||
*
|
||||
* @return The number of JDBC bind positions actually bound during this method execution.
|
||||
*
|
||||
* @throws SQLException Indicates problems performing the binding.
|
||||
* @throws org.hibernate.HibernateException Indicates problems delegating binding to the types.
|
||||
*/
|
||||
|
@ -2042,7 +2117,7 @@ public abstract class Loader {
|
|||
}
|
||||
|
||||
public int[] getNamedParameterLocs(String name) {
|
||||
throw new AssertionFailure("no named parameters");
|
||||
throw new AssertionFailure( "no named parameters" );
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -2056,7 +2131,7 @@ public abstract class Loader {
|
|||
final SessionImplementor session) throws SQLException, HibernateException {
|
||||
try {
|
||||
ResultSet rs = session.getJdbcCoordinator().getResultSetReturn().extract( st );
|
||||
rs = wrapResultSetIfEnabled( rs , session );
|
||||
rs = wrapResultSetIfEnabled( rs, session );
|
||||
|
||||
if ( !limitHandler.supportsLimitOffset() || !LimitHelper.useLimit( limitHandler, selection ) ) {
|
||||
advance( rs, selection );
|
||||
|
@ -2067,7 +2142,7 @@ public abstract class Loader {
|
|||
}
|
||||
return rs;
|
||||
}
|
||||
catch ( SQLException sqle ) {
|
||||
catch (SQLException sqle) {
|
||||
session.getJdbcCoordinator().getResourceRegistry().release( st );
|
||||
session.getJdbcCoordinator().afterStatementExecution();
|
||||
throw sqle;
|
||||
|
@ -2075,7 +2150,7 @@ public abstract class Loader {
|
|||
}
|
||||
|
||||
protected void autoDiscoverTypes(ResultSet rs) {
|
||||
throw new AssertionFailure("Auto discover types not supported in this loader");
|
||||
throw new AssertionFailure( "Auto discover types not supported in this loader" );
|
||||
|
||||
}
|
||||
|
||||
|
@ -2088,7 +2163,7 @@ public abstract class Loader {
|
|||
.getService( JdbcServices.class )
|
||||
.getResultSetWrapper().wrap( rs, retreiveColumnNameToIndexCache( rs ) );
|
||||
}
|
||||
catch(SQLException e) {
|
||||
catch (SQLException e) {
|
||||
LOG.unableToWrapResultSet( e );
|
||||
return rs;
|
||||
}
|
||||
|
@ -2131,21 +2206,26 @@ public abstract class Loader {
|
|||
List result;
|
||||
try {
|
||||
QueryParameters qp = new QueryParameters();
|
||||
qp.setPositionalParameterTypes( new Type[] { identifierType } );
|
||||
qp.setPositionalParameterValues( new Object[] { id } );
|
||||
qp.setPositionalParameterTypes( new Type[] {identifierType} );
|
||||
qp.setPositionalParameterValues( new Object[] {id} );
|
||||
qp.setOptionalObject( optionalObject );
|
||||
qp.setOptionalEntityName( optionalEntityName );
|
||||
qp.setOptionalId( optionalIdentifier );
|
||||
qp.setLockOptions( lockOptions );
|
||||
result = doQueryAndInitializeNonLazyCollections( session, qp, false );
|
||||
}
|
||||
catch ( SQLException sqle ) {
|
||||
catch (SQLException sqle) {
|
||||
final Loadable[] persisters = getEntityPersisters();
|
||||
throw factory.getSQLExceptionHelper().convert(
|
||||
sqle,
|
||||
"could not load an entity: " +
|
||||
MessageHelper.infoString( persisters[persisters.length-1], id, identifierType, getFactory() ),
|
||||
getSQLString()
|
||||
sqle,
|
||||
"could not load an entity: " +
|
||||
MessageHelper.infoString(
|
||||
persisters[persisters.length - 1],
|
||||
id,
|
||||
identifierType,
|
||||
getFactory()
|
||||
),
|
||||
getSQLString()
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -2157,15 +2237,16 @@ public abstract class Loader {
|
|||
|
||||
/**
|
||||
* Called by subclasses that load entities
|
||||
*
|
||||
* @param persister only needed for logging
|
||||
*/
|
||||
protected final List loadEntity(
|
||||
final SessionImplementor session,
|
||||
final Object key,
|
||||
final Object index,
|
||||
final Type keyType,
|
||||
final Type indexType,
|
||||
final EntityPersister persister) throws HibernateException {
|
||||
final SessionImplementor session,
|
||||
final Object key,
|
||||
final Object index,
|
||||
final Type keyType,
|
||||
final Type indexType,
|
||||
final EntityPersister persister) throws HibernateException {
|
||||
LOG.debug( "Loading collection element by index" );
|
||||
|
||||
List result;
|
||||
|
@ -2173,17 +2254,17 @@ public abstract class Loader {
|
|||
result = doQueryAndInitializeNonLazyCollections(
|
||||
session,
|
||||
new QueryParameters(
|
||||
new Type[] { keyType, indexType },
|
||||
new Object[] { key, index }
|
||||
new Type[] {keyType, indexType},
|
||||
new Object[] {key, index}
|
||||
),
|
||||
false
|
||||
);
|
||||
}
|
||||
catch ( SQLException sqle ) {
|
||||
catch (SQLException sqle) {
|
||||
throw factory.getSQLExceptionHelper().convert(
|
||||
sqle,
|
||||
"could not load collection element by index",
|
||||
getSQLString()
|
||||
sqle,
|
||||
"could not load collection element by index",
|
||||
getSQLString()
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -2205,8 +2286,9 @@ public abstract class Loader {
|
|||
final Serializable optionalId,
|
||||
final EntityPersister persister,
|
||||
LockOptions lockOptions) throws HibernateException {
|
||||
if ( LOG.isDebugEnabled() )
|
||||
if ( LOG.isDebugEnabled() ) {
|
||||
LOG.debugf( "Batch loading entity: %s", MessageHelper.infoString( persister, ids, getFactory() ) );
|
||||
}
|
||||
|
||||
Type[] types = new Type[ids.length];
|
||||
Arrays.fill( types, idType );
|
||||
|
@ -2221,12 +2303,12 @@ public abstract class Loader {
|
|||
qp.setLockOptions( lockOptions );
|
||||
result = doQueryAndInitializeNonLazyCollections( session, qp, false );
|
||||
}
|
||||
catch ( SQLException sqle ) {
|
||||
catch (SQLException sqle) {
|
||||
throw factory.getSQLExceptionHelper().convert(
|
||||
sqle,
|
||||
"could not load an entity batch: " +
|
||||
MessageHelper.infoString( getEntityPersisters()[0], ids, getFactory() ),
|
||||
getSQLString()
|
||||
sqle,
|
||||
"could not load an entity batch: " +
|
||||
MessageHelper.infoString( getEntityPersisters()[0], ids, getFactory() ),
|
||||
getSQLString()
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -2240,9 +2322,9 @@ public abstract class Loader {
|
|||
* Called by subclasses that initialize collections
|
||||
*/
|
||||
public final void loadCollection(
|
||||
final SessionImplementor session,
|
||||
final Serializable id,
|
||||
final Type type) throws HibernateException {
|
||||
final SessionImplementor session,
|
||||
final Serializable id,
|
||||
final Type type) throws HibernateException {
|
||||
if ( LOG.isDebugEnabled() ) {
|
||||
LOG.debugf(
|
||||
"Loading collection: %s",
|
||||
|
@ -2250,19 +2332,19 @@ public abstract class Loader {
|
|||
);
|
||||
}
|
||||
|
||||
Serializable[] ids = new Serializable[]{id};
|
||||
Serializable[] ids = new Serializable[] {id};
|
||||
try {
|
||||
doQueryAndInitializeNonLazyCollections(
|
||||
session,
|
||||
new QueryParameters( new Type[]{type}, ids, ids ),
|
||||
new QueryParameters( new Type[] {type}, ids, ids ),
|
||||
true
|
||||
);
|
||||
}
|
||||
catch ( SQLException sqle ) {
|
||||
catch (SQLException sqle) {
|
||||
throw factory.getSQLExceptionHelper().convert(
|
||||
sqle,
|
||||
"could not initialize a collection: " +
|
||||
MessageHelper.collectionInfoString( getCollectionPersisters()[0], id, getFactory() ),
|
||||
MessageHelper.collectionInfoString( getCollectionPersisters()[0], id, getFactory() ),
|
||||
getSQLString()
|
||||
);
|
||||
}
|
||||
|
@ -2293,11 +2375,11 @@ public abstract class Loader {
|
|||
true
|
||||
);
|
||||
}
|
||||
catch ( SQLException sqle ) {
|
||||
catch (SQLException sqle) {
|
||||
throw factory.getSQLExceptionHelper().convert(
|
||||
sqle,
|
||||
"could not initialize a collection batch: " +
|
||||
MessageHelper.collectionInfoString( getCollectionPersisters()[0], ids, getFactory() ),
|
||||
MessageHelper.collectionInfoString( getCollectionPersisters()[0], ids, getFactory() ),
|
||||
getSQLString()
|
||||
);
|
||||
}
|
||||
|
@ -2318,16 +2400,17 @@ public abstract class Loader {
|
|||
final Type[] idTypes = new Type[ids.length];
|
||||
Arrays.fill( idTypes, type );
|
||||
try {
|
||||
doQueryAndInitializeNonLazyCollections( session,
|
||||
doQueryAndInitializeNonLazyCollections(
|
||||
session,
|
||||
new QueryParameters( parameterTypes, parameterValues, namedParameters, ids ),
|
||||
true
|
||||
);
|
||||
}
|
||||
catch ( SQLException sqle ) {
|
||||
catch (SQLException sqle) {
|
||||
throw factory.getSQLExceptionHelper().convert(
|
||||
sqle,
|
||||
"could not load collection by subselect: " +
|
||||
MessageHelper.collectionInfoString( getCollectionPersisters()[0], ids, getFactory() ),
|
||||
MessageHelper.collectionInfoString( getCollectionPersisters()[0], ids, getFactory() ),
|
||||
getSQLString()
|
||||
);
|
||||
}
|
||||
|
@ -2343,7 +2426,7 @@ public abstract class Loader {
|
|||
final Set<Serializable> querySpaces,
|
||||
final Type[] resultTypes) throws HibernateException {
|
||||
final boolean cacheable = factory.getSessionFactoryOptions().isQueryCacheEnabled() &&
|
||||
queryParameters.isCacheable();
|
||||
queryParameters.isCacheable();
|
||||
|
||||
if ( cacheable ) {
|
||||
return listUsingQueryCache( session, queryParameters, querySpaces, resultTypes );
|
||||
|
@ -2525,12 +2608,13 @@ public abstract class Loader {
|
|||
|
||||
protected List doList(final SessionImplementor session, final QueryParameters queryParameters)
|
||||
throws HibernateException {
|
||||
return doList( session, queryParameters, null);
|
||||
return doList( session, queryParameters, null );
|
||||
}
|
||||
|
||||
private List doList(final SessionImplementor session,
|
||||
final QueryParameters queryParameters,
|
||||
final ResultTransformer forcedResultTransformer)
|
||||
private List doList(
|
||||
final SessionImplementor session,
|
||||
final QueryParameters queryParameters,
|
||||
final ResultTransformer forcedResultTransformer)
|
||||
throws HibernateException {
|
||||
|
||||
final boolean stats = getFactory().getStatistics().isStatisticsEnabled();
|
||||
|
@ -2543,7 +2627,7 @@ public abstract class Loader {
|
|||
try {
|
||||
result = doQueryAndInitializeNonLazyCollections( session, queryParameters, true, forcedResultTransformer );
|
||||
}
|
||||
catch ( SQLException sqle ) {
|
||||
catch (SQLException sqle) {
|
||||
throw factory.getSQLExceptionHelper().convert(
|
||||
sqle,
|
||||
"could not execute query",
|
||||
|
@ -2594,7 +2678,9 @@ public abstract class Loader {
|
|||
* @param holderInstantiator If the return values are expected to be wrapped
|
||||
* in a holder, this is the thing that knows how to wrap them.
|
||||
* @param session The session from which the scroll request originated.
|
||||
*
|
||||
* @return The ScrollableResults instance.
|
||||
*
|
||||
* @throws HibernateException Indicates an error executing the query, or constructing
|
||||
* the ScrollableResults.
|
||||
*/
|
||||
|
@ -2615,7 +2701,12 @@ public abstract class Loader {
|
|||
try {
|
||||
// Don't use Collections#emptyList() here -- follow on locking potentially adds AfterLoadActions,
|
||||
// so the list cannot be immutable.
|
||||
final SqlStatementWrapper wrapper = executeQueryStatement( queryParameters, true, new ArrayList<AfterLoadAction>(), session );
|
||||
final SqlStatementWrapper wrapper = executeQueryStatement(
|
||||
queryParameters,
|
||||
true,
|
||||
new ArrayList<AfterLoadAction>(),
|
||||
session
|
||||
);
|
||||
final ResultSet rs = wrapper.getResultSet();
|
||||
final PreparedStatement st = (PreparedStatement) wrapper.getStatement();
|
||||
|
||||
|
@ -2653,7 +2744,7 @@ public abstract class Loader {
|
|||
}
|
||||
|
||||
}
|
||||
catch ( SQLException sqle ) {
|
||||
catch (SQLException sqle) {
|
||||
throw factory.getSQLExceptionHelper().convert(
|
||||
sqle,
|
||||
"could not execute query using scroll",
|
||||
|
@ -2667,7 +2758,8 @@ public abstract class Loader {
|
|||
* Calculate and cache select-clause suffixes. Must be
|
||||
* called by subclasses after instantiation.
|
||||
*/
|
||||
protected void postInstantiate() {}
|
||||
protected void postInstantiate() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the result set descriptor
|
||||
|
|
|
@ -25,7 +25,6 @@ package org.hibernate.loader.plan.build.spi;
|
|||
|
||||
import org.hibernate.loader.plan.spi.CollectionQuerySpace;
|
||||
import org.hibernate.loader.plan.spi.Join;
|
||||
import org.hibernate.persister.collection.CollectionPropertyNames;
|
||||
|
||||
/**
|
||||
* Describes a collection query space that allows adding joins with other
|
||||
|
@ -38,18 +37,18 @@ import org.hibernate.persister.collection.CollectionPropertyNames;
|
|||
public interface ExpandingCollectionQuerySpace extends CollectionQuerySpace, ExpandingQuerySpace {
|
||||
|
||||
/**
|
||||
* Adds a join with another query space for either a collection element or index. If {@code join}
|
||||
* is an instance of {@link org.hibernate.loader.plan.spi.JoinDefinedByMetadata}, then the only valid
|
||||
* Adds a join with another query space for either a collection element or index.
|
||||
*
|
||||
* If {@code join} is an instance of {@link org.hibernate.loader.plan.spi.JoinDefinedByMetadata}, then the only valid
|
||||
* values returned by {@link org.hibernate.loader.plan.spi.JoinDefinedByMetadata#getJoinedPropertyName}
|
||||
* are {@link CollectionPropertyNames#COLLECTION_ELEMENTS} and {@link CollectionPropertyNames#COLLECTION_INDICES},
|
||||
* for the collection element or index, respectively.
|
||||
* are {@code "elements"} and {@code "indices"} for the collection element or index, respectively.
|
||||
*
|
||||
* @param join The element or index join to add.
|
||||
*
|
||||
* @throws java.lang.IllegalArgumentException if {@code join} is an instance of
|
||||
* {@link org.hibernate.loader.plan.spi.JoinDefinedByMetadata} and {@code join.getJoinedPropertyName()
|
||||
* is neither {@link CollectionPropertyNames#COLLECTION_ELEMENTS} nor {@link CollectionPropertyNames#COLLECTION_INDICES}}.
|
||||
* is neither {@code "elements"} and {@code "indices"}.
|
||||
* @throws java.lang.IllegalStateException if there is already an existing join with the same joined property name.
|
||||
*/
|
||||
public void addJoin(Join join);
|
||||
void addJoin(Join join);
|
||||
}
|
||||
|
|
|
@ -47,14 +47,10 @@ import org.hibernate.persister.entity.EntityPersister;
|
|||
import org.hibernate.persister.entity.Loadable;
|
||||
import org.hibernate.type.EntityType;
|
||||
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
/**
|
||||
* @author Steve Ebersole
|
||||
*/
|
||||
public class ResultSetProcessingContextImpl implements ResultSetProcessingContext {
|
||||
private static final Logger LOG = Logger.getLogger( ResultSetProcessingContextImpl.class );
|
||||
|
||||
private final ResultSet resultSet;
|
||||
private final SessionImplementor session;
|
||||
private final LoadPlan loadPlan;
|
||||
|
@ -74,18 +70,9 @@ public class ResultSetProcessingContextImpl implements ResultSetProcessingContex
|
|||
/**
|
||||
* Builds a ResultSetProcessingContextImpl
|
||||
*
|
||||
* @param resultSet
|
||||
* @param session
|
||||
* @param loadPlan
|
||||
* @param readOnly
|
||||
* @param shouldUseOptionalEntityInformation There are times when the "optional entity information" on
|
||||
* QueryParameters should be used and times when they should not. Collection initializers, batch loaders, etc
|
||||
* are times when it should NOT be used.
|
||||
* @param forceFetchLazyAttributes
|
||||
* @param shouldReturnProxies
|
||||
* @param queryParameters
|
||||
* @param namedParameterContext
|
||||
* @param hadSubselectFetches
|
||||
*/
|
||||
public ResultSetProcessingContextImpl(
|
||||
final ResultSet resultSet,
|
||||
|
@ -305,8 +292,9 @@ public class ResultSetProcessingContextImpl implements ResultSetProcessingContex
|
|||
subselectLoadableEntityKeyMap = new HashMap<EntityPersister, Set<EntityKey>>();
|
||||
}
|
||||
for ( HydratedEntityRegistration registration : currentRowHydratedEntityRegistrationList ) {
|
||||
Set<EntityKey> entityKeys = subselectLoadableEntityKeyMap.get( registration.getEntityReference()
|
||||
.getEntityPersister() );
|
||||
Set<EntityKey> entityKeys = subselectLoadableEntityKeyMap.get(
|
||||
registration.getEntityReference().getEntityPersister()
|
||||
);
|
||||
if ( entityKeys == null ) {
|
||||
entityKeys = new HashSet<EntityKey>();
|
||||
subselectLoadableEntityKeyMap.put( registration.getEntityReference().getEntityPersister(), entityKeys );
|
||||
|
|
|
@ -42,31 +42,31 @@ public class Array extends List {
|
|||
}
|
||||
|
||||
public Class getElementClass() throws MappingException {
|
||||
if (elementClassName==null) {
|
||||
if ( elementClassName == null ) {
|
||||
org.hibernate.type.Type elementType = getElement().getType();
|
||||
return isPrimitiveArray() ?
|
||||
( (PrimitiveType) elementType ).getPrimitiveClass() :
|
||||
elementType.getReturnedClass();
|
||||
return isPrimitiveArray()
|
||||
? ( (PrimitiveType) elementType ).getPrimitiveClass()
|
||||
: elementType.getReturnedClass();
|
||||
}
|
||||
else {
|
||||
try {
|
||||
return ReflectHelper.classForName(elementClassName);
|
||||
return ReflectHelper.classForName( elementClassName );
|
||||
}
|
||||
catch (ClassNotFoundException cnfe) {
|
||||
throw new MappingException(cnfe);
|
||||
throw new MappingException( cnfe );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public CollectionType getDefaultCollectionType() throws MappingException {
|
||||
public CollectionType getDefaultCollectionType() throws MappingException {
|
||||
return getMetadata().getTypeResolver()
|
||||
.getTypeFactory()
|
||||
.array( getRole(), getReferencedPropertyName(), getElementClass() );
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isArray() {
|
||||
public boolean isArray() {
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -76,6 +76,7 @@ public class Array extends List {
|
|||
public String getElementClassName() {
|
||||
return elementClassName;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param elementClassName The elementClassName to set.
|
||||
*/
|
||||
|
@ -84,7 +85,7 @@ public class Array extends List {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Object accept(ValueVisitor visitor) {
|
||||
return visitor.accept(this);
|
||||
public Object accept(ValueVisitor visitor) {
|
||||
return visitor.accept( this );
|
||||
}
|
||||
}
|
||||
|
|
|
@ -43,7 +43,7 @@ import org.hibernate.type.Type;
|
|||
|
||||
/**
|
||||
* Mapping for a collection. Subclasses specialize to particular collection styles.
|
||||
*
|
||||
*
|
||||
* @author Gavin King
|
||||
*/
|
||||
public abstract class Collection implements Fetchable, Value, Filterable {
|
||||
|
@ -145,10 +145,10 @@ public abstract class Collection implements Fetchable, Value, Filterable {
|
|||
try {
|
||||
setComparator( (Comparator) ReflectHelper.classForName( comparatorClassName ).newInstance() );
|
||||
}
|
||||
catch ( Exception e ) {
|
||||
catch (Exception e) {
|
||||
throw new MappingException(
|
||||
"Could not instantiate comparator class [" + comparatorClassName
|
||||
+ "] for collection " + getRole()
|
||||
+ "] for collection " + getRole()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -230,12 +230,12 @@ public abstract class Collection implements Fetchable, Value, Filterable {
|
|||
}
|
||||
|
||||
/**
|
||||
* @deprecated Inject the owner into constructor.
|
||||
*
|
||||
* @param owner The owner
|
||||
*
|
||||
* @deprecated Inject the owner into constructor.
|
||||
*/
|
||||
@Deprecated
|
||||
public void setOwner(PersistentClass owner) {
|
||||
public void setOwner(PersistentClass owner) {
|
||||
this.owner = owner;
|
||||
}
|
||||
|
||||
|
@ -305,34 +305,37 @@ public abstract class Collection implements Fetchable, Value, Filterable {
|
|||
|
||||
if ( getKey().isCascadeDeleteEnabled() && ( !isInverse() || !isOneToMany() ) ) {
|
||||
throw new MappingException(
|
||||
"only inverse one-to-many associations may use on-delete=\"cascade\": "
|
||||
+ getRole() );
|
||||
"only inverse one-to-many associations may use on-delete=\"cascade\": "
|
||||
+ getRole()
|
||||
);
|
||||
}
|
||||
if ( !getKey().isValid( mapping ) ) {
|
||||
throw new MappingException(
|
||||
"collection foreign key mapping has wrong number of columns: "
|
||||
+ getRole()
|
||||
+ " type: "
|
||||
+ getKey().getType().getName() );
|
||||
"collection foreign key mapping has wrong number of columns: "
|
||||
+ getRole()
|
||||
+ " type: "
|
||||
+ getKey().getType().getName()
|
||||
);
|
||||
}
|
||||
if ( !getElement().isValid( mapping ) ) {
|
||||
throw new MappingException(
|
||||
"collection element mapping has wrong number of columns: "
|
||||
+ getRole()
|
||||
+ " type: "
|
||||
+ getElement().getType().getName() );
|
||||
throw new MappingException(
|
||||
"collection element mapping has wrong number of columns: "
|
||||
+ getRole()
|
||||
+ " type: "
|
||||
+ getElement().getType().getName()
|
||||
);
|
||||
}
|
||||
|
||||
checkColumnDuplication();
|
||||
|
||||
if ( elementNodeName!=null && elementNodeName.startsWith("@") ) {
|
||||
throw new MappingException("element node must not be an attribute: " + elementNodeName );
|
||||
|
||||
if ( elementNodeName != null && elementNodeName.startsWith( "@" ) ) {
|
||||
throw new MappingException( "element node must not be an attribute: " + elementNodeName );
|
||||
}
|
||||
if ( elementNodeName!=null && elementNodeName.equals(".") ) {
|
||||
throw new MappingException("element node must not be the parent: " + elementNodeName );
|
||||
if ( elementNodeName != null && elementNodeName.equals( "." ) ) {
|
||||
throw new MappingException( "element node must not be the parent: " + elementNodeName );
|
||||
}
|
||||
if ( nodeName!=null && nodeName.indexOf('@')>-1 ) {
|
||||
throw new MappingException("collection node must not be an attribute: " + elementNodeName );
|
||||
if ( nodeName != null && nodeName.indexOf( '@' ) > -1 ) {
|
||||
throw new MappingException( "collection node must not be an attribute: " + elementNodeName );
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -343,10 +346,12 @@ public abstract class Collection implements Fetchable, Value, Filterable {
|
|||
if ( !s.isFormula() ) {
|
||||
Column col = (Column) s;
|
||||
if ( !distinctColumns.add( col.getName() ) ) {
|
||||
throw new MappingException( "Repeated column in mapping for collection: "
|
||||
+ getRole()
|
||||
+ " column: "
|
||||
+ col.getName() );
|
||||
throw new MappingException(
|
||||
"Repeated column in mapping for collection: "
|
||||
+ getRole()
|
||||
+ " column: "
|
||||
+ col.getName()
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -356,14 +361,18 @@ public abstract class Collection implements Fetchable, Value, Filterable {
|
|||
HashSet cols = new HashSet();
|
||||
checkColumnDuplication( cols, getKey().getColumnIterator() );
|
||||
if ( isIndexed() ) {
|
||||
checkColumnDuplication( cols, ( (IndexedCollection) this )
|
||||
.getIndex()
|
||||
.getColumnIterator() );
|
||||
checkColumnDuplication(
|
||||
cols, ( (IndexedCollection) this )
|
||||
.getIndex()
|
||||
.getColumnIterator()
|
||||
);
|
||||
}
|
||||
if ( isIdentified() ) {
|
||||
checkColumnDuplication( cols, ( (IdentifierCollection) this )
|
||||
.getIdentifier()
|
||||
.getColumnIterator() );
|
||||
checkColumnDuplication(
|
||||
cols, ( (IdentifierCollection) this )
|
||||
.getIdentifier()
|
||||
.getColumnIterator()
|
||||
);
|
||||
}
|
||||
if ( !isOneToMany() ) {
|
||||
checkColumnDuplication( cols, getElement().getColumnIterator() );
|
||||
|
@ -454,7 +463,6 @@ public abstract class Collection implements Fetchable, Value, Filterable {
|
|||
}
|
||||
|
||||
|
||||
|
||||
public void setCustomSQLInsert(String customSQLInsert, boolean callable, ExecuteUpdateResultCheckStyle checkStyle) {
|
||||
this.customSQLInsert = customSQLInsert;
|
||||
this.customInsertCallable = callable;
|
||||
|
@ -509,7 +517,10 @@ public abstract class Collection implements Fetchable, Value, Filterable {
|
|||
return deleteCheckStyle;
|
||||
}
|
||||
|
||||
public void setCustomSQLDeleteAll(String customSQLDeleteAll, boolean callable, ExecuteUpdateResultCheckStyle checkStyle) {
|
||||
public void setCustomSQLDeleteAll(
|
||||
String customSQLDeleteAll,
|
||||
boolean callable,
|
||||
ExecuteUpdateResultCheckStyle checkStyle) {
|
||||
this.customSQLDeleteAll = customSQLDeleteAll;
|
||||
this.customDeleteAllCallable = callable;
|
||||
this.deleteAllCheckStyle = checkStyle;
|
||||
|
@ -527,15 +538,44 @@ public abstract class Collection implements Fetchable, Value, Filterable {
|
|||
return deleteAllCheckStyle;
|
||||
}
|
||||
|
||||
public void addFilter(String name, String condition, boolean autoAliasInjection, java.util.Map<String,String> aliasTableMap, java.util.Map<String,String> aliasEntityMap) {
|
||||
filters.add(new FilterConfiguration(name, condition, autoAliasInjection, aliasTableMap, aliasEntityMap, null));
|
||||
public void addFilter(
|
||||
String name,
|
||||
String condition,
|
||||
boolean autoAliasInjection,
|
||||
java.util.Map<String, String> aliasTableMap,
|
||||
java.util.Map<String, String> aliasEntityMap) {
|
||||
filters.add(
|
||||
new FilterConfiguration(
|
||||
name,
|
||||
condition,
|
||||
autoAliasInjection,
|
||||
aliasTableMap,
|
||||
aliasEntityMap,
|
||||
null
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public java.util.List getFilters() {
|
||||
return filters;
|
||||
}
|
||||
|
||||
public void addManyToManyFilter(String name, String condition, boolean autoAliasInjection, java.util.Map<String,String> aliasTableMap, java.util.Map<String,String> aliasEntityMap) {
|
||||
manyToManyFilters.add(new FilterConfiguration(name, condition, autoAliasInjection, aliasTableMap, aliasEntityMap, null));
|
||||
public void addManyToManyFilter(
|
||||
String name,
|
||||
String condition,
|
||||
boolean autoAliasInjection,
|
||||
java.util.Map<String, String> aliasTableMap,
|
||||
java.util.Map<String, String> aliasEntityMap) {
|
||||
manyToManyFilters.add(
|
||||
new FilterConfiguration(
|
||||
name,
|
||||
condition,
|
||||
autoAliasInjection,
|
||||
aliasTableMap,
|
||||
aliasEntityMap,
|
||||
null
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public java.util.List getManyToManyFilters() {
|
||||
|
@ -543,7 +583,7 @@ public abstract class Collection implements Fetchable, Value, Filterable {
|
|||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
public String toString() {
|
||||
return getClass().getName() + '(' + getRole() + ')';
|
||||
}
|
||||
|
||||
|
@ -650,7 +690,7 @@ public abstract class Collection implements Fetchable, Value, Filterable {
|
|||
public boolean isSubselectLoadable() {
|
||||
return subselectLoadable;
|
||||
}
|
||||
|
||||
|
||||
|
||||
public void setSubselectLoadable(boolean subqueryLoadable) {
|
||||
this.subselectLoadable = subqueryLoadable;
|
||||
|
@ -671,15 +711,15 @@ public abstract class Collection implements Fetchable, Value, Filterable {
|
|||
public void setExtraLazy(boolean extraLazy) {
|
||||
this.extraLazy = extraLazy;
|
||||
}
|
||||
|
||||
|
||||
public boolean hasOrder() {
|
||||
return orderBy!=null || manyToManyOrderBy!=null;
|
||||
return orderBy != null || manyToManyOrderBy != null;
|
||||
}
|
||||
|
||||
public void setComparatorClassName(String comparatorClassName) {
|
||||
this.comparatorClassName = comparatorClassName;
|
||||
this.comparatorClassName = comparatorClassName;
|
||||
}
|
||||
|
||||
|
||||
public String getComparatorClassName() {
|
||||
return comparatorClassName;
|
||||
}
|
||||
|
|
|
@ -22,6 +22,7 @@
|
|||
* Boston, MA 02110-1301 USA
|
||||
*/
|
||||
package org.hibernate.mapping;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Locale;
|
||||
|
||||
|
@ -35,6 +36,7 @@ import org.hibernate.sql.Template;
|
|||
|
||||
/**
|
||||
* A column of a relational database table
|
||||
*
|
||||
* @author Gavin King
|
||||
*/
|
||||
public class Column implements Selectable, Serializable, Cloneable {
|
||||
|
@ -43,13 +45,13 @@ public class Column implements Selectable, Serializable, Cloneable {
|
|||
public static final int DEFAULT_PRECISION = 19;
|
||||
public static final int DEFAULT_SCALE = 2;
|
||||
|
||||
private int length=DEFAULT_LENGTH;
|
||||
private int precision=DEFAULT_PRECISION;
|
||||
private int scale=DEFAULT_SCALE;
|
||||
private int length = DEFAULT_LENGTH;
|
||||
private int precision = DEFAULT_PRECISION;
|
||||
private int scale = DEFAULT_SCALE;
|
||||
private Value value;
|
||||
private int typeIndex;
|
||||
private String name;
|
||||
private boolean nullable=true;
|
||||
private boolean nullable = true;
|
||||
private boolean unique;
|
||||
private String sqlType;
|
||||
private Integer sqlTypeCode;
|
||||
|
@ -65,38 +67,45 @@ public class Column implements Selectable, Serializable, Cloneable {
|
|||
}
|
||||
|
||||
public Column(String columnName) {
|
||||
setName(columnName);
|
||||
setName( columnName );
|
||||
}
|
||||
|
||||
public int getLength() {
|
||||
return length;
|
||||
}
|
||||
|
||||
public void setLength(int length) {
|
||||
this.length = length;
|
||||
}
|
||||
|
||||
public Value getValue() {
|
||||
return value;
|
||||
}
|
||||
|
||||
public void setValue(Value value) {
|
||||
this.value= value;
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
if (
|
||||
StringHelper.isNotEmpty( name ) &&
|
||||
Dialect.QUOTE.indexOf( name.charAt(0) ) > -1 //TODO: deprecated, remove eventually
|
||||
) {
|
||||
quoted=true;
|
||||
this.name=name.substring( 1, name.length()-1 );
|
||||
StringHelper.isNotEmpty( name ) &&
|
||||
Dialect.QUOTE.indexOf( name.charAt( 0 ) ) > -1 //TODO: deprecated, remove eventually
|
||||
) {
|
||||
quoted = true;
|
||||
this.name = name.substring( 1, name.length() - 1 );
|
||||
}
|
||||
else {
|
||||
this.name = name;
|
||||
}
|
||||
}
|
||||
|
||||
/** returns quoted name as it would be in the mapping file. */
|
||||
/**
|
||||
* returns quoted name as it would be in the mapping file.
|
||||
*/
|
||||
public String getQuotedName() {
|
||||
return quoted ?
|
||||
"`" + name + "`" :
|
||||
|
@ -105,14 +114,14 @@ public class Column implements Selectable, Serializable, Cloneable {
|
|||
|
||||
public String getQuotedName(Dialect d) {
|
||||
return quoted ?
|
||||
d.openQuote() + name + d.closeQuote() :
|
||||
name;
|
||||
d.openQuote() + name + d.closeQuote() :
|
||||
name;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String getAlias(Dialect dialect) {
|
||||
final int lastLetter = StringHelper.lastIndexOfLetter( name );
|
||||
final String suffix = Integer.toString(uniqueInteger) + '_';
|
||||
final String suffix = Integer.toString( uniqueInteger ) + '_';
|
||||
|
||||
String alias = name;
|
||||
if ( lastLetter == -1 ) {
|
||||
|
@ -123,12 +132,15 @@ public class Column implements Selectable, Serializable, Cloneable {
|
|||
}
|
||||
|
||||
boolean useRawName = name.length() + suffix.length() <= dialect.getMaxAliasLength()
|
||||
&& !quoted && !name.toLowerCase(Locale.ROOT).equals( "rowid" );
|
||||
&& !quoted && !name.toLowerCase( Locale.ROOT ).equals( "rowid" );
|
||||
if ( !useRawName ) {
|
||||
if ( suffix.length() >= dialect.getMaxAliasLength() ) {
|
||||
throw new MappingException( String.format(
|
||||
"Unique suffix [%s] length must be less than maximum [%d]",
|
||||
suffix, dialect.getMaxAliasLength() ) );
|
||||
throw new MappingException(
|
||||
String.format(
|
||||
"Unique suffix [%s] length must be less than maximum [%d]",
|
||||
suffix, dialect.getMaxAliasLength()
|
||||
)
|
||||
);
|
||||
}
|
||||
if ( alias.length() + suffix.length() > dialect.getMaxAliasLength() ) {
|
||||
alias = alias.substring( 0, dialect.getMaxAliasLength() - suffix.length() );
|
||||
|
@ -136,13 +148,13 @@ public class Column implements Selectable, Serializable, Cloneable {
|
|||
}
|
||||
return alias + suffix;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Generate a column alias that is unique across multiple tables
|
||||
*/
|
||||
@Override
|
||||
public String getAlias(Dialect dialect, Table table) {
|
||||
return getAlias(dialect) + table.getUniqueInteger() + '_';
|
||||
return getAlias( dialect ) + table.getUniqueInteger() + '_';
|
||||
}
|
||||
|
||||
public boolean isNullable() {
|
||||
|
@ -150,12 +162,13 @@ public class Column implements Selectable, Serializable, Cloneable {
|
|||
}
|
||||
|
||||
public void setNullable(boolean nullable) {
|
||||
this.nullable=nullable;
|
||||
this.nullable = nullable;
|
||||
}
|
||||
|
||||
public int getTypeIndex() {
|
||||
return typeIndex;
|
||||
}
|
||||
|
||||
public void setTypeIndex(int typeIndex) {
|
||||
this.typeIndex = typeIndex;
|
||||
}
|
||||
|
@ -168,8 +181,8 @@ public class Column implements Selectable, Serializable, Cloneable {
|
|||
public int hashCode() {
|
||||
//used also for generation of FK names!
|
||||
return isQuoted() ?
|
||||
name.hashCode() :
|
||||
name.toLowerCase(Locale.ROOT).hashCode();
|
||||
name.hashCode() :
|
||||
name.toLowerCase( Locale.ROOT ).hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -179,63 +192,63 @@ public class Column implements Selectable, Serializable, Cloneable {
|
|||
|
||||
@SuppressWarnings("SimplifiableIfStatement")
|
||||
public boolean equals(Column column) {
|
||||
if (null == column) {
|
||||
if ( null == column ) {
|
||||
return false;
|
||||
}
|
||||
if (this == column) {
|
||||
if ( this == column ) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return isQuoted() ?
|
||||
name.equals(column.name) :
|
||||
name.equalsIgnoreCase(column.name);
|
||||
name.equals( column.name ) :
|
||||
name.equalsIgnoreCase( column.name );
|
||||
}
|
||||
|
||||
public int getSqlTypeCode(Mapping mapping) throws MappingException {
|
||||
org.hibernate.type.Type type = getValue().getType();
|
||||
try {
|
||||
int sqlTypeCode = type.sqlTypes( mapping )[getTypeIndex()];
|
||||
if ( getSqlTypeCode() != null && getSqlTypeCode() != sqlTypeCode ) {
|
||||
throw new MappingException( "SQLType code's does not match. mapped as " + sqlTypeCode + " but is " + getSqlTypeCode() );
|
||||
}
|
||||
return sqlTypeCode;
|
||||
}
|
||||
catch ( Exception e ) {
|
||||
throw new MappingException(
|
||||
"Could not determine type for column " +
|
||||
name +
|
||||
" of type " +
|
||||
type.getClass().getName() +
|
||||
": " +
|
||||
e.getClass().getName(),
|
||||
e
|
||||
);
|
||||
}
|
||||
}
|
||||
public int getSqlTypeCode(Mapping mapping) throws MappingException {
|
||||
org.hibernate.type.Type type = getValue().getType();
|
||||
try {
|
||||
int sqlTypeCode = type.sqlTypes( mapping )[getTypeIndex()];
|
||||
if ( getSqlTypeCode() != null && getSqlTypeCode() != sqlTypeCode ) {
|
||||
throw new MappingException( "SQLType code's does not match. mapped as " + sqlTypeCode + " but is " + getSqlTypeCode() );
|
||||
}
|
||||
return sqlTypeCode;
|
||||
}
|
||||
catch (Exception e) {
|
||||
throw new MappingException(
|
||||
"Could not determine type for column " +
|
||||
name +
|
||||
" of type " +
|
||||
type.getClass().getName() +
|
||||
": " +
|
||||
e.getClass().getName(),
|
||||
e
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the underlying columns sqltypecode.
|
||||
* If null, it is because the sqltype code is unknown.
|
||||
*
|
||||
* Use #getSqlTypeCode(Mapping) to retreive the sqltypecode used
|
||||
* for the columns associated Value/Type.
|
||||
*
|
||||
* @return sqlTypeCode if it is set, otherwise null.
|
||||
*/
|
||||
public Integer getSqlTypeCode() {
|
||||
return sqlTypeCode;
|
||||
}
|
||||
/**
|
||||
* Returns the underlying columns sqltypecode.
|
||||
* If null, it is because the sqltype code is unknown.
|
||||
* <p/>
|
||||
* Use #getSqlTypeCode(Mapping) to retreive the sqltypecode used
|
||||
* for the columns associated Value/Type.
|
||||
*
|
||||
* @return sqlTypeCode if it is set, otherwise null.
|
||||
*/
|
||||
public Integer getSqlTypeCode() {
|
||||
return sqlTypeCode;
|
||||
}
|
||||
|
||||
public void setSqlTypeCode(Integer typeCode) {
|
||||
sqlTypeCode=typeCode;
|
||||
}
|
||||
public void setSqlTypeCode(Integer typeCode) {
|
||||
sqlTypeCode = typeCode;
|
||||
}
|
||||
|
||||
public String getSqlType(Dialect dialect, Mapping mapping) throws HibernateException {
|
||||
if ( sqlType == null ) {
|
||||
sqlType = dialect.getTypeName( getSqlTypeCode( mapping ), getLength(), getPrecision(), getScale() );
|
||||
}
|
||||
return sqlType;
|
||||
}
|
||||
public String getSqlType(Dialect dialect, Mapping mapping) throws HibernateException {
|
||||
if ( sqlType == null ) {
|
||||
sqlType = dialect.getTypeName( getSqlTypeCode( mapping ), getLength(), getPrecision(), getScale() );
|
||||
}
|
||||
return sqlType;
|
||||
}
|
||||
|
||||
public String getSqlType() {
|
||||
return sqlType;
|
||||
|
@ -267,7 +280,7 @@ public class Column implements Selectable, Serializable, Cloneable {
|
|||
}
|
||||
|
||||
public boolean hasCheckConstraint() {
|
||||
return checkConstraint!=null;
|
||||
return checkConstraint != null;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -284,7 +297,7 @@ public class Column implements Selectable, Serializable, Cloneable {
|
|||
public String getReadExpr(Dialect dialect) {
|
||||
return hasCustomRead() ? customRead : getQuotedName( dialect );
|
||||
}
|
||||
|
||||
|
||||
public String getWriteExpr() {
|
||||
return ( customWrite != null && customWrite.length() > 0 ) ? customWrite : "?";
|
||||
}
|
||||
|
@ -296,17 +309,18 @@ public class Column implements Selectable, Serializable, Cloneable {
|
|||
|
||||
@Override
|
||||
public String getText(Dialect d) {
|
||||
return getQuotedName(d);
|
||||
return getQuotedName( d );
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getText() {
|
||||
return getName();
|
||||
}
|
||||
|
||||
|
||||
public int getPrecision() {
|
||||
return precision;
|
||||
}
|
||||
|
||||
public void setPrecision(int scale) {
|
||||
this.precision = scale;
|
||||
}
|
||||
|
@ -314,6 +328,7 @@ public class Column implements Selectable, Serializable, Cloneable {
|
|||
public int getScale() {
|
||||
return scale;
|
||||
}
|
||||
|
||||
public void setScale(int scale) {
|
||||
this.scale = scale;
|
||||
}
|
||||
|
@ -351,7 +366,7 @@ public class Column implements Selectable, Serializable, Cloneable {
|
|||
}
|
||||
|
||||
public String getCanonicalName() {
|
||||
return quoted ? name : name.toLowerCase(Locale.ROOT);
|
||||
return quoted ? name : name.toLowerCase( Locale.ROOT );
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -36,9 +36,9 @@ import org.hibernate.internal.util.collections.JoinedIterator;
|
|||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public class DenormalizedTable extends Table {
|
||||
|
||||
|
||||
private final Table includedTable;
|
||||
|
||||
|
||||
public DenormalizedTable(Table includedTable) {
|
||||
this.includedTable = includedTable;
|
||||
includedTable.setHasDenormalizedTables();
|
||||
|
@ -50,7 +50,12 @@ public class DenormalizedTable extends Table {
|
|||
includedTable.setHasDenormalizedTables();
|
||||
}
|
||||
|
||||
public DenormalizedTable(Schema schema, Identifier physicalTableName, String subselectFragment, boolean isAbstract, Table includedTable) {
|
||||
public DenormalizedTable(
|
||||
Schema schema,
|
||||
Identifier physicalTableName,
|
||||
String subselectFragment,
|
||||
boolean isAbstract,
|
||||
Table includedTable) {
|
||||
super( schema, physicalTableName, subselectFragment, isAbstract );
|
||||
this.includedTable = includedTable;
|
||||
includedTable.setHasDenormalizedTables();
|
||||
|
@ -63,28 +68,28 @@ public class DenormalizedTable extends Table {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void createForeignKeys() {
|
||||
public void createForeignKeys() {
|
||||
includedTable.createForeignKeys();
|
||||
Iterator iter = includedTable.getForeignKeyIterator();
|
||||
while ( iter.hasNext() ) {
|
||||
ForeignKey fk = (ForeignKey) iter.next();
|
||||
createForeignKey(
|
||||
createForeignKey(
|
||||
Constraint.generateName(
|
||||
fk.generatedConstraintNamePrefix(),
|
||||
this,
|
||||
fk.getColumns()
|
||||
),
|
||||
fk.getColumns(),
|
||||
fk.getColumns(),
|
||||
fk.getReferencedEntityName(),
|
||||
fk.getReferencedColumns()
|
||||
);
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Column getColumn(Column column) {
|
||||
public Column getColumn(Column column) {
|
||||
Column superColumn = super.getColumn( column );
|
||||
if (superColumn != null) {
|
||||
if ( superColumn != null ) {
|
||||
return superColumn;
|
||||
}
|
||||
else {
|
||||
|
@ -103,25 +108,25 @@ public class DenormalizedTable extends Table {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Iterator getColumnIterator() {
|
||||
public Iterator getColumnIterator() {
|
||||
return new JoinedIterator(
|
||||
includedTable.getColumnIterator(),
|
||||
super.getColumnIterator()
|
||||
);
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean containsColumn(Column column) {
|
||||
return super.containsColumn(column) || includedTable.containsColumn(column);
|
||||
public boolean containsColumn(Column column) {
|
||||
return super.containsColumn( column ) || includedTable.containsColumn( column );
|
||||
}
|
||||
|
||||
@Override
|
||||
public PrimaryKey getPrimaryKey() {
|
||||
public PrimaryKey getPrimaryKey() {
|
||||
return includedTable.getPrimaryKey();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator getUniqueKeyIterator() {
|
||||
public Iterator getUniqueKeyIterator() {
|
||||
Iterator iter = includedTable.getUniqueKeyIterator();
|
||||
while ( iter.hasNext() ) {
|
||||
UniqueKey uk = (UniqueKey) iter.next();
|
||||
|
@ -131,21 +136,21 @@ public class DenormalizedTable extends Table {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Iterator getIndexIterator() {
|
||||
public Iterator getIndexIterator() {
|
||||
List indexes = new ArrayList();
|
||||
Iterator iter = includedTable.getIndexIterator();
|
||||
while ( iter.hasNext() ) {
|
||||
Index parentIndex = (Index) iter.next();
|
||||
Index index = new Index();
|
||||
index.setName( getName() + parentIndex.getName() );
|
||||
index.setTable(this);
|
||||
index.setTable( this );
|
||||
index.addColumns( parentIndex.getColumnIterator() );
|
||||
indexes.add( index );
|
||||
}
|
||||
return new JoinedIterator(
|
||||
indexes.iterator(),
|
||||
super.getIndexIterator()
|
||||
);
|
||||
);
|
||||
}
|
||||
|
||||
public Table getIncludedTable() {
|
||||
|
|
|
@ -22,6 +22,7 @@
|
|||
* Boston, MA 02110-1301 USA
|
||||
*/
|
||||
package org.hibernate.mapping;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
@ -32,6 +33,7 @@ import org.hibernate.internal.util.StringHelper;
|
|||
|
||||
/**
|
||||
* A foreign key constraint
|
||||
*
|
||||
* @author Gavin King
|
||||
*/
|
||||
public class ForeignKey extends Constraint {
|
||||
|
@ -68,32 +70,36 @@ public class ForeignKey extends Constraint {
|
|||
}
|
||||
}
|
||||
|
||||
public String sqlConstraintString(Dialect dialect, String constraintName, String defaultCatalog, String defaultSchema) {
|
||||
String[] columnNames = new String[ getColumnSpan() ];
|
||||
String[] referencedColumnNames = new String[ getColumnSpan() ];
|
||||
public String sqlConstraintString(
|
||||
Dialect dialect,
|
||||
String constraintName,
|
||||
String defaultCatalog,
|
||||
String defaultSchema) {
|
||||
String[] columnNames = new String[getColumnSpan()];
|
||||
String[] referencedColumnNames = new String[getColumnSpan()];
|
||||
|
||||
final Iterator<Column> referencedColumnItr;
|
||||
if ( isReferenceToPrimaryKey() ) {
|
||||
referencedColumnItr = referencedTable.getPrimaryKey().getColumnIterator();
|
||||
}
|
||||
}
|
||||
else {
|
||||
referencedColumnItr = referencedColumns.iterator();
|
||||
}
|
||||
|
||||
|
||||
Iterator columnItr = getColumnIterator();
|
||||
int i=0;
|
||||
int i = 0;
|
||||
while ( columnItr.hasNext() ) {
|
||||
columnNames[i] = ( (Column) columnItr.next() ).getQuotedName(dialect);
|
||||
referencedColumnNames[i] = referencedColumnItr.next().getQuotedName(dialect);
|
||||
columnNames[i] = ( (Column) columnItr.next() ).getQuotedName( dialect );
|
||||
referencedColumnNames[i] = referencedColumnItr.next().getQuotedName( dialect );
|
||||
i++;
|
||||
}
|
||||
|
||||
final String result = dialect.getAddForeignKeyConstraintString(
|
||||
constraintName,
|
||||
columnNames,
|
||||
referencedTable.getQualifiedName(dialect, defaultCatalog, defaultSchema),
|
||||
referencedColumnNames,
|
||||
isReferenceToPrimaryKey()
|
||||
constraintName,
|
||||
columnNames,
|
||||
referencedTable.getQualifiedName( dialect, defaultCatalog, defaultSchema ),
|
||||
referencedColumnNames,
|
||||
isReferenceToPrimaryKey()
|
||||
);
|
||||
return cascadeDeleteEnabled && dialect.supportsCascadeDelete()
|
||||
? result + " on delete cascade"
|
||||
|
@ -105,32 +111,32 @@ public class ForeignKey extends Constraint {
|
|||
}
|
||||
|
||||
private void appendColumns(StringBuilder buf, Iterator columns) {
|
||||
while( columns.hasNext() ) {
|
||||
while ( columns.hasNext() ) {
|
||||
Column column = (Column) columns.next();
|
||||
buf.append( column.getName() );
|
||||
if ( columns.hasNext() ) {
|
||||
buf.append(",");
|
||||
buf.append( "," );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void setReferencedTable(Table referencedTable) throws MappingException {
|
||||
//if( isReferenceToPrimaryKey() ) alignColumns(referencedTable); // TODO: possibly remove to allow more piecemal building of a foreignkey.
|
||||
|
||||
|
||||
this.referencedTable = referencedTable;
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates that columnspan of the foreignkey and the primarykey is the same.
|
||||
*
|
||||
* <p/>
|
||||
* Furthermore it aligns the length of the underlying tables columns.
|
||||
*/
|
||||
public void alignColumns() {
|
||||
if ( isReferenceToPrimaryKey() ) {
|
||||
alignColumns(referencedTable);
|
||||
alignColumns( referencedTable );
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void alignColumns(Table referencedTable) {
|
||||
final int referencedPkColumnSpan = referencedTable.getPrimaryKey().getColumnSpan();
|
||||
if ( referencedPkColumnSpan != getColumnSpan() ) {
|
||||
|
@ -139,15 +145,15 @@ public class ForeignKey extends Constraint {
|
|||
.append( getTable().getName() )
|
||||
.append( " [" );
|
||||
appendColumns( sb, getColumnIterator() );
|
||||
sb.append("])")
|
||||
sb.append( "])" )
|
||||
.append( ") must have same number of columns as the referenced primary key (" )
|
||||
.append( referencedTable.getName() )
|
||||
.append( " [" );
|
||||
appendColumns( sb, referencedTable.getPrimaryKey().getColumnIterator() );
|
||||
sb.append("])");
|
||||
sb.append( "])" );
|
||||
throw new MappingException( sb.toString() );
|
||||
}
|
||||
|
||||
|
||||
Iterator fkCols = getColumnIterator();
|
||||
Iterator pkCols = referencedTable.getPrimaryKey().getColumnIterator();
|
||||
while ( pkCols.hasNext() ) {
|
||||
|
@ -166,7 +172,7 @@ public class ForeignKey extends Constraint {
|
|||
|
||||
public String sqlDropString(Dialect dialect, String defaultCatalog, String defaultSchema) {
|
||||
final StringBuilder buf = new StringBuilder( "alter table " );
|
||||
buf.append( getTable().getQualifiedName(dialect, defaultCatalog, defaultSchema) );
|
||||
buf.append( getTable().getQualifiedName( dialect, defaultCatalog, defaultSchema ) );
|
||||
buf.append( dialect.getDropForeignKeyString() );
|
||||
if ( dialect.supportsIfExistsBeforeConstraintName() ) {
|
||||
buf.append( "if exists " );
|
||||
|
@ -185,19 +191,23 @@ public class ForeignKey extends Constraint {
|
|||
public void setCascadeDeleteEnabled(boolean cascadeDeleteEnabled) {
|
||||
this.cascadeDeleteEnabled = cascadeDeleteEnabled;
|
||||
}
|
||||
|
||||
|
||||
public boolean isPhysicalConstraint() {
|
||||
return referencedTable.isPhysicalTable()
|
||||
&& getTable().isPhysicalTable()
|
||||
&& !referencedTable.hasDenormalizedTables();
|
||||
}
|
||||
|
||||
/** Returns the referenced columns if the foreignkey does not refer to the primary key */
|
||||
/**
|
||||
* Returns the referenced columns if the foreignkey does not refer to the primary key
|
||||
*/
|
||||
public List getReferencedColumns() {
|
||||
return referencedColumns;
|
||||
return referencedColumns;
|
||||
}
|
||||
|
||||
/** Does this foreignkey reference the primary key of the reference table */
|
||||
/**
|
||||
* Does this foreignkey reference the primary key of the reference table
|
||||
*/
|
||||
public boolean isReferenceToPrimaryKey() {
|
||||
return referencedColumns.isEmpty();
|
||||
}
|
||||
|
@ -205,28 +215,30 @@ public class ForeignKey extends Constraint {
|
|||
public void addReferencedColumns(Iterator referencedColumnsIterator) {
|
||||
while ( referencedColumnsIterator.hasNext() ) {
|
||||
Selectable col = (Selectable) referencedColumnsIterator.next();
|
||||
if ( !col.isFormula() ) addReferencedColumn( (Column) col );
|
||||
if ( !col.isFormula() ) {
|
||||
addReferencedColumn( (Column) col );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void addReferencedColumn(Column column) {
|
||||
if ( !referencedColumns.contains(column) ) {
|
||||
if ( !referencedColumns.contains( column ) ) {
|
||||
referencedColumns.add( column );
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public String toString() {
|
||||
if(!isReferenceToPrimaryKey() ) {
|
||||
if ( !isReferenceToPrimaryKey() ) {
|
||||
return getClass().getName()
|
||||
+ '(' + getTable().getName() + getColumns()
|
||||
+ " ref-columns:" + '(' + getReferencedColumns() + ") as " + getName() + ")";
|
||||
}
|
||||
}
|
||||
else {
|
||||
return super.toString();
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
public String generatedConstraintNamePrefix() {
|
||||
return "FK_";
|
||||
}
|
||||
|
|
|
@ -176,11 +176,14 @@ public class Index implements RelationalModel, Exportable, Serializable {
|
|||
Iterator iter = getColumnIterator();
|
||||
while ( iter.hasNext() ) {
|
||||
buf.append( ( (Column) iter.next() ).getQuotedName( dialect ) );
|
||||
if ( iter.hasNext() ) buf.append( ", " );
|
||||
if ( iter.hasNext() ) {
|
||||
buf.append( ", " );
|
||||
}
|
||||
}
|
||||
return buf.append( ')' ).toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String sqlDropString(Dialect dialect, String defaultCatalog, String defaultSchema) {
|
||||
return "drop index " +
|
||||
StringHelper.qualify(
|
||||
|
@ -219,13 +222,11 @@ public class Index implements RelationalModel, Exportable, Serializable {
|
|||
}
|
||||
|
||||
public void addColumns(Iterator extraColumns) {
|
||||
while ( extraColumns.hasNext() ) addColumn( (Column) extraColumns.next() );
|
||||
while ( extraColumns.hasNext() ) {
|
||||
addColumn( (Column) extraColumns.next() );
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param column
|
||||
* @return true if this constraint already contains a column with same name.
|
||||
*/
|
||||
public boolean containsColumn(Column column) {
|
||||
return columns.contains( column );
|
||||
}
|
||||
|
@ -238,6 +239,7 @@ public class Index implements RelationalModel, Exportable, Serializable {
|
|||
this.name = name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return getClass().getName() + "(" + getName() + ")";
|
||||
}
|
||||
|
|
|
@ -34,6 +34,7 @@ import org.hibernate.type.Type;
|
|||
|
||||
/**
|
||||
* A mapping for a one-to-many association
|
||||
*
|
||||
* @author Gavin King
|
||||
*/
|
||||
public class OneToMany implements Value {
|
||||
|
@ -47,28 +48,28 @@ public class OneToMany implements Value {
|
|||
|
||||
public OneToMany(MetadataImplementor metadata, PersistentClass owner) throws MappingException {
|
||||
this.metadata = metadata;
|
||||
this.referencingTable = (owner==null) ? null : owner.getTable();
|
||||
this.referencingTable = ( owner == null ) ? null : owner.getTable();
|
||||
}
|
||||
|
||||
private EntityType getEntityType() {
|
||||
return metadata.getTypeResolver().getTypeFactory().manyToOne(
|
||||
getReferencedEntityName(),
|
||||
true,
|
||||
null,
|
||||
true,
|
||||
null,
|
||||
false,
|
||||
false,
|
||||
isIgnoreNotFound(),
|
||||
false
|
||||
);
|
||||
);
|
||||
}
|
||||
|
||||
public PersistentClass getAssociatedClass() {
|
||||
return associatedClass;
|
||||
}
|
||||
|
||||
/**
|
||||
* Associated entity on the many side
|
||||
*/
|
||||
/**
|
||||
* Associated entity on the many side
|
||||
*/
|
||||
public void setAssociatedClass(PersistentClass associatedClass) {
|
||||
this.associatedClass = associatedClass;
|
||||
}
|
||||
|
@ -89,9 +90,9 @@ public class OneToMany implements Value {
|
|||
return FetchMode.JOIN;
|
||||
}
|
||||
|
||||
/**
|
||||
* Table of the owner entity (the "one" side)
|
||||
*/
|
||||
/**
|
||||
* Table of the owner entity (the "one" side)
|
||||
*/
|
||||
public Table getTable() {
|
||||
return referencingTable;
|
||||
}
|
||||
|
@ -115,37 +116,38 @@ public class OneToMany implements Value {
|
|||
public boolean hasFormula() {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
public boolean isValid(Mapping mapping) throws MappingException {
|
||||
if (referencedEntityName==null) {
|
||||
throw new MappingException("one to many association must specify the referenced entity");
|
||||
if ( referencedEntityName == null ) {
|
||||
throw new MappingException( "one to many association must specify the referenced entity" );
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
public String getReferencedEntityName() {
|
||||
public String getReferencedEntityName() {
|
||||
return referencedEntityName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Associated entity on the "many" side
|
||||
*/
|
||||
/**
|
||||
* Associated entity on the "many" side
|
||||
*/
|
||||
public void setReferencedEntityName(String referencedEntityName) {
|
||||
this.referencedEntityName = referencedEntityName==null ? null : referencedEntityName.intern();
|
||||
this.referencedEntityName = referencedEntityName == null ? null : referencedEntityName.intern();
|
||||
}
|
||||
|
||||
public void setTypeUsingReflection(String className, String propertyName) {}
|
||||
|
||||
public Object accept(ValueVisitor visitor) {
|
||||
return visitor.accept(this);
|
||||
public void setTypeUsingReflection(String className, String propertyName) {
|
||||
}
|
||||
|
||||
|
||||
|
||||
public Object accept(ValueVisitor visitor) {
|
||||
return visitor.accept( this );
|
||||
}
|
||||
|
||||
|
||||
public boolean[] getColumnInsertability() {
|
||||
//TODO: we could just return all false...
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
|
||||
public boolean[] getColumnUpdateability() {
|
||||
//TODO: we could just return all false...
|
||||
throw new UnsupportedOperationException();
|
||||
|
@ -176,5 +178,5 @@ public class OneToMany implements Value {
|
|||
public void setIgnoreNotFound(boolean ignoreNotFound) {
|
||||
this.ignoreNotFound = ignoreNotFound;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -33,7 +33,6 @@ import java.util.StringTokenizer;
|
|||
|
||||
import org.hibernate.EntityMode;
|
||||
import org.hibernate.MappingException;
|
||||
import org.hibernate.dialect.Dialect;
|
||||
import org.hibernate.engine.OptimisticLockStyle;
|
||||
import org.hibernate.engine.spi.ExecuteUpdateResultCheckStyle;
|
||||
import org.hibernate.engine.spi.Mapping;
|
||||
|
@ -52,7 +51,7 @@ import org.hibernate.sql.Alias;
|
|||
*/
|
||||
public abstract class PersistentClass implements AttributeContainer, Serializable, Filterable, MetaAttributable {
|
||||
|
||||
private static final Alias PK_ALIAS = new Alias(15, "PK");
|
||||
private static final Alias PK_ALIAS = new Alias( 15, "PK" );
|
||||
|
||||
public static final String NULL_DISCRIMINATOR_MAPPING = "null";
|
||||
public static final String NOT_NULL_DISCRIMINATOR_MAPPING = "not null";
|
||||
|
@ -61,10 +60,10 @@ public abstract class PersistentClass implements AttributeContainer, Serializabl
|
|||
|
||||
private String className;
|
||||
private transient Class mappedClass;
|
||||
|
||||
|
||||
private String proxyInterfaceName;
|
||||
private transient Class proxyInterface;
|
||||
|
||||
|
||||
private String nodeName;
|
||||
private String jpaEntityName;
|
||||
|
||||
|
@ -77,7 +76,7 @@ public abstract class PersistentClass implements AttributeContainer, Serializabl
|
|||
private final ArrayList subclassTables = new ArrayList();
|
||||
private boolean dynamicInsert;
|
||||
private boolean dynamicUpdate;
|
||||
private int batchSize=-1;
|
||||
private int batchSize = -1;
|
||||
private boolean selectBeforeUpdate;
|
||||
private java.util.Map metaAttributes;
|
||||
private ArrayList<Join> joins = new ArrayList<Join>();
|
||||
|
@ -111,7 +110,7 @@ public abstract class PersistentClass implements AttributeContainer, Serializabl
|
|||
}
|
||||
|
||||
public void setClassName(String className) {
|
||||
this.className = className==null ? null : className.intern();
|
||||
this.className = className == null ? null : className.intern();
|
||||
this.mappedClass = null;
|
||||
}
|
||||
|
||||
|
@ -125,32 +124,32 @@ public abstract class PersistentClass implements AttributeContainer, Serializabl
|
|||
}
|
||||
|
||||
public Class getMappedClass() throws MappingException {
|
||||
if (className==null) {
|
||||
if ( className == null ) {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
if (mappedClass == null) {
|
||||
mappedClass = ReflectHelper.classForName(className);
|
||||
if ( mappedClass == null ) {
|
||||
mappedClass = ReflectHelper.classForName( className );
|
||||
}
|
||||
return mappedClass;
|
||||
}
|
||||
catch (ClassNotFoundException cnfe) {
|
||||
throw new MappingException("entity class not found: " + className, cnfe);
|
||||
throw new MappingException( "entity class not found: " + className, cnfe );
|
||||
}
|
||||
}
|
||||
|
||||
public Class getProxyInterface() {
|
||||
if (proxyInterfaceName==null) {
|
||||
if ( proxyInterfaceName == null ) {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
if (proxyInterface == null) {
|
||||
if ( proxyInterface == null ) {
|
||||
proxyInterface = ReflectHelper.classForName( proxyInterfaceName );
|
||||
}
|
||||
return proxyInterface;
|
||||
}
|
||||
catch (ClassNotFoundException cnfe) {
|
||||
throw new MappingException("proxy class not found: " + proxyInterfaceName, cnfe);
|
||||
throw new MappingException( "proxy class not found: " + proxyInterfaceName, cnfe );
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -159,8 +158,9 @@ public abstract class PersistentClass implements AttributeContainer, Serializabl
|
|||
}
|
||||
|
||||
abstract int nextSubclassId();
|
||||
|
||||
public abstract int getSubclassId();
|
||||
|
||||
|
||||
public boolean useDynamicUpdate() {
|
||||
return dynamicUpdate;
|
||||
}
|
||||
|
@ -181,18 +181,18 @@ public abstract class PersistentClass implements AttributeContainer, Serializabl
|
|||
public void addSubclass(Subclass subclass) throws MappingException {
|
||||
// inheritance cycle detection (paranoid check)
|
||||
PersistentClass superclass = getSuperclass();
|
||||
while (superclass!=null) {
|
||||
while ( superclass != null ) {
|
||||
if ( subclass.getEntityName().equals( superclass.getEntityName() ) ) {
|
||||
throw new MappingException(
|
||||
"Circular inheritance mapping detected: " +
|
||||
subclass.getEntityName() +
|
||||
" will have it self as superclass when extending " +
|
||||
getEntityName()
|
||||
"Circular inheritance mapping detected: " +
|
||||
subclass.getEntityName() +
|
||||
" will have it self as superclass when extending " +
|
||||
getEntityName()
|
||||
);
|
||||
}
|
||||
superclass = superclass.getSuperclass();
|
||||
}
|
||||
subclasses.add(subclass);
|
||||
subclasses.add( subclass );
|
||||
}
|
||||
|
||||
public boolean hasSubclasses() {
|
||||
|
@ -201,51 +201,51 @@ public abstract class PersistentClass implements AttributeContainer, Serializabl
|
|||
|
||||
public int getSubclassSpan() {
|
||||
int n = subclasses.size();
|
||||
Iterator iter = subclasses.iterator();
|
||||
while ( iter.hasNext() ) {
|
||||
n += ( (Subclass) iter.next() ).getSubclassSpan();
|
||||
for ( Subclass subclass : subclasses ) {
|
||||
n += subclass.getSubclassSpan();
|
||||
}
|
||||
return n;
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterate over subclasses in a special 'order', most derived subclasses
|
||||
* first.
|
||||
*/
|
||||
public Iterator getSubclassIterator() {
|
||||
Iterator[] iters = new Iterator[ subclasses.size() + 1 ];
|
||||
public Iterator<Subclass> getSubclassIterator() {
|
||||
Iterator[] iters = new Iterator[subclasses.size() + 1];
|
||||
Iterator iter = subclasses.iterator();
|
||||
int i=0;
|
||||
int i = 0;
|
||||
while ( iter.hasNext() ) {
|
||||
iters[i++] = ( (Subclass) iter.next() ).getSubclassIterator();
|
||||
}
|
||||
iters[i] = subclasses.iterator();
|
||||
return new JoinedIterator(iters);
|
||||
return new JoinedIterator( iters );
|
||||
}
|
||||
|
||||
public Iterator getSubclassClosureIterator() {
|
||||
ArrayList iters = new ArrayList();
|
||||
iters.add( new SingletonIterator(this) );
|
||||
iters.add( new SingletonIterator( this ) );
|
||||
Iterator iter = getSubclassIterator();
|
||||
while ( iter.hasNext() ) {
|
||||
PersistentClass clazz = (PersistentClass) iter.next();
|
||||
PersistentClass clazz = (PersistentClass) iter.next();
|
||||
iters.add( clazz.getSubclassClosureIterator() );
|
||||
}
|
||||
return new JoinedIterator(iters);
|
||||
return new JoinedIterator( iters );
|
||||
}
|
||||
|
||||
|
||||
public Table getIdentityTable() {
|
||||
return getRootTable();
|
||||
}
|
||||
|
||||
|
||||
public Iterator getDirectSubclasses() {
|
||||
return subclasses.iterator();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addProperty(Property p) {
|
||||
properties.add(p);
|
||||
declaredProperties.add(p);
|
||||
p.setPersistentClass(this);
|
||||
properties.add( p );
|
||||
declaredProperties.add( p );
|
||||
p.setPersistentClass( this );
|
||||
}
|
||||
|
||||
public abstract Table getTable();
|
||||
|
@ -255,58 +255,80 @@ public abstract class PersistentClass implements AttributeContainer, Serializabl
|
|||
}
|
||||
|
||||
public abstract boolean isMutable();
|
||||
|
||||
public abstract boolean hasIdentifierProperty();
|
||||
|
||||
public abstract Property getIdentifierProperty();
|
||||
|
||||
public abstract Property getDeclaredIdentifierProperty();
|
||||
|
||||
public abstract KeyValue getIdentifier();
|
||||
|
||||
public abstract Property getVersion();
|
||||
|
||||
public abstract Property getDeclaredVersion();
|
||||
|
||||
public abstract Value getDiscriminator();
|
||||
|
||||
public abstract boolean isInherited();
|
||||
|
||||
public abstract boolean isPolymorphic();
|
||||
|
||||
public abstract boolean isVersioned();
|
||||
|
||||
public abstract String getNaturalIdCacheRegionName();
|
||||
|
||||
public abstract String getCacheConcurrencyStrategy();
|
||||
|
||||
public abstract PersistentClass getSuperclass();
|
||||
|
||||
public abstract boolean isExplicitPolymorphism();
|
||||
|
||||
public abstract boolean isDiscriminatorInsertable();
|
||||
|
||||
public abstract Iterator getPropertyClosureIterator();
|
||||
|
||||
public abstract Iterator getTableClosureIterator();
|
||||
|
||||
public abstract Iterator getKeyClosureIterator();
|
||||
|
||||
protected void addSubclassProperty(Property prop) {
|
||||
subclassProperties.add(prop);
|
||||
subclassProperties.add( prop );
|
||||
}
|
||||
|
||||
protected void addSubclassJoin(Join join) {
|
||||
subclassJoins.add(join);
|
||||
subclassJoins.add( join );
|
||||
}
|
||||
|
||||
protected void addSubclassTable(Table subclassTable) {
|
||||
subclassTables.add(subclassTable);
|
||||
subclassTables.add( subclassTable );
|
||||
}
|
||||
|
||||
public Iterator getSubclassPropertyClosureIterator() {
|
||||
ArrayList iters = new ArrayList();
|
||||
iters.add( getPropertyClosureIterator() );
|
||||
iters.add( subclassProperties.iterator() );
|
||||
for ( int i=0; i<subclassJoins.size(); i++ ) {
|
||||
Join join = (Join) subclassJoins.get(i);
|
||||
for ( int i = 0; i < subclassJoins.size(); i++ ) {
|
||||
Join join = (Join) subclassJoins.get( i );
|
||||
iters.add( join.getPropertyIterator() );
|
||||
}
|
||||
return new JoinedIterator(iters);
|
||||
return new JoinedIterator( iters );
|
||||
}
|
||||
|
||||
public Iterator getSubclassJoinClosureIterator() {
|
||||
return new JoinedIterator( getJoinClosureIterator(), subclassJoins.iterator() );
|
||||
}
|
||||
|
||||
public Iterator getSubclassTableClosureIterator() {
|
||||
return new JoinedIterator( getTableClosureIterator(), subclassTables.iterator() );
|
||||
}
|
||||
|
||||
public boolean isClassOrSuperclassJoin(Join join) {
|
||||
return joins.contains(join);
|
||||
return joins.contains( join );
|
||||
}
|
||||
|
||||
public boolean isClassOrSuperclassTable(Table closureTable) {
|
||||
return getTable()==closureTable;
|
||||
return getTable() == closureTable;
|
||||
}
|
||||
|
||||
public boolean isLazy() {
|
||||
|
@ -318,10 +340,15 @@ public abstract class PersistentClass implements AttributeContainer, Serializabl
|
|||
}
|
||||
|
||||
public abstract boolean hasEmbeddedIdentifier();
|
||||
|
||||
public abstract Class getEntityPersisterClass();
|
||||
|
||||
public abstract void setEntityPersisterClass(Class classPersisterClass);
|
||||
|
||||
public abstract Table getRootTable();
|
||||
|
||||
public abstract RootClass getRootClass();
|
||||
|
||||
public abstract KeyValue getKey();
|
||||
|
||||
public void setDiscriminatorValue(String discriminatorValue) {
|
||||
|
@ -329,16 +356,16 @@ public abstract class PersistentClass implements AttributeContainer, Serializabl
|
|||
}
|
||||
|
||||
public void setEntityName(String entityName) {
|
||||
this.entityName = entityName==null ? null : entityName.intern();
|
||||
this.entityName = entityName == null ? null : entityName.intern();
|
||||
}
|
||||
|
||||
public void createPrimaryKey() {
|
||||
//Primary key constraint
|
||||
PrimaryKey pk = new PrimaryKey();
|
||||
Table table = getTable();
|
||||
pk.setTable(table);
|
||||
pk.setTable( table );
|
||||
pk.setName( PK_ALIAS.toAliasString( table.getName() ) );
|
||||
table.setPrimaryKey(pk);
|
||||
table.setPrimaryKey( pk );
|
||||
|
||||
pk.addColumns( getKey().getColumnIterator() );
|
||||
}
|
||||
|
@ -364,8 +391,9 @@ public abstract class PersistentClass implements AttributeContainer, Serializabl
|
|||
/**
|
||||
* Build an iterator of properties which are "referenceable".
|
||||
*
|
||||
* @see #getReferencedProperty for a discussion of "referenceable"
|
||||
* @return The property iterator.
|
||||
*
|
||||
* @see #getReferencedProperty for a discussion of "referenceable"
|
||||
*/
|
||||
public Iterator getReferenceablePropertyIterator() {
|
||||
return getPropertyClosureIterator();
|
||||
|
@ -378,14 +406,16 @@ public abstract class PersistentClass implements AttributeContainer, Serializabl
|
|||
* mapping (an identifier or explcitly named in a property-ref).
|
||||
*
|
||||
* @param propertyPath The property path to resolve into a property reference.
|
||||
*
|
||||
* @return The property reference (never null).
|
||||
*
|
||||
* @throws MappingException If the property could not be found.
|
||||
*/
|
||||
public Property getReferencedProperty(String propertyPath) throws MappingException {
|
||||
try {
|
||||
return getRecursiveProperty( propertyPath, getReferenceablePropertyIterator() );
|
||||
}
|
||||
catch ( MappingException e ) {
|
||||
catch (MappingException e) {
|
||||
throw new MappingException(
|
||||
"property-ref [" + propertyPath + "] not found on entity [" + getEntityName() + "]", e
|
||||
);
|
||||
|
@ -396,7 +426,7 @@ public abstract class PersistentClass implements AttributeContainer, Serializabl
|
|||
try {
|
||||
return getRecursiveProperty( propertyPath, getPropertyIterator() );
|
||||
}
|
||||
catch ( MappingException e ) {
|
||||
catch (MappingException e) {
|
||||
throw new MappingException(
|
||||
"property [" + propertyPath + "] not found on entity [" + getEntityName() + "]", e
|
||||
);
|
||||
|
@ -408,7 +438,7 @@ public abstract class PersistentClass implements AttributeContainer, Serializabl
|
|||
StringTokenizer st = new StringTokenizer( propertyPath, ".", false );
|
||||
try {
|
||||
while ( st.hasMoreElements() ) {
|
||||
final String element = ( String ) st.nextElement();
|
||||
final String element = (String) st.nextElement();
|
||||
if ( property == null ) {
|
||||
Property identifierProperty = getIdentifierProperty();
|
||||
if ( identifierProperty != null && identifierProperty.getName().equals( element ) ) {
|
||||
|
@ -427,7 +457,7 @@ public abstract class PersistentClass implements AttributeContainer, Serializabl
|
|||
property = identifierProperty;
|
||||
}
|
||||
}
|
||||
catch( MappingException ignore ) {
|
||||
catch (MappingException ignore) {
|
||||
// ignore it...
|
||||
}
|
||||
}
|
||||
|
@ -438,11 +468,11 @@ public abstract class PersistentClass implements AttributeContainer, Serializabl
|
|||
}
|
||||
else {
|
||||
//flat recursive algorithm
|
||||
property = ( ( Component ) property.getValue() ).getProperty( element );
|
||||
property = ( (Component) property.getValue() ).getProperty( element );
|
||||
}
|
||||
}
|
||||
}
|
||||
catch ( MappingException e ) {
|
||||
catch (MappingException e) {
|
||||
throw new MappingException( "property [" + propertyPath + "] not found on entity [" + getEntityName() + "]" );
|
||||
}
|
||||
|
||||
|
@ -450,8 +480,8 @@ public abstract class PersistentClass implements AttributeContainer, Serializabl
|
|||
}
|
||||
|
||||
private Property getProperty(String propertyName, Iterator iterator) throws MappingException {
|
||||
if(iterator.hasNext()) {
|
||||
String root = StringHelper.root(propertyName);
|
||||
if ( iterator.hasNext() ) {
|
||||
String root = StringHelper.root( propertyName );
|
||||
while ( iterator.hasNext() ) {
|
||||
Property prop = (Property) iterator.next();
|
||||
if ( prop.getName().equals( root ) ) {
|
||||
|
@ -466,7 +496,7 @@ public abstract class PersistentClass implements AttributeContainer, Serializabl
|
|||
Iterator iter = getPropertyClosureIterator();
|
||||
Property identifierProperty = getIdentifierProperty();
|
||||
if ( identifierProperty != null
|
||||
&& identifierProperty.getName().equals( StringHelper.root(propertyName) ) ) {
|
||||
&& identifierProperty.getName().equals( StringHelper.root( propertyName ) ) ) {
|
||||
return identifierProperty;
|
||||
}
|
||||
else {
|
||||
|
@ -502,26 +532,26 @@ public abstract class PersistentClass implements AttributeContainer, Serializabl
|
|||
Iterator iter = getPropertyIterator();
|
||||
while ( iter.hasNext() ) {
|
||||
Property prop = (Property) iter.next();
|
||||
if ( !prop.isValid(mapping) ) {
|
||||
if ( !prop.isValid( mapping ) ) {
|
||||
throw new MappingException(
|
||||
"property mapping has wrong number of columns: " +
|
||||
StringHelper.qualify( getEntityName(), prop.getName() ) +
|
||||
" type: " +
|
||||
prop.getType().getName()
|
||||
StringHelper.qualify( getEntityName(), prop.getName() ) +
|
||||
" type: " +
|
||||
prop.getType().getName()
|
||||
);
|
||||
}
|
||||
}
|
||||
checkPropertyDuplication();
|
||||
checkColumnDuplication();
|
||||
}
|
||||
|
||||
|
||||
private void checkPropertyDuplication() throws MappingException {
|
||||
HashSet<String> names = new HashSet<String>();
|
||||
Iterator iter = getPropertyIterator();
|
||||
while ( iter.hasNext() ) {
|
||||
Property prop = (Property) iter.next();
|
||||
if ( !names.add( prop.getName() ) ) {
|
||||
throw new MappingException( "Duplicate property mapping of " + prop.getName() + " found in " + getEntityName());
|
||||
throw new MappingException( "Duplicate property mapping of " + prop.getName() + " found in " + getEntityName() );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -529,6 +559,7 @@ public abstract class PersistentClass implements AttributeContainer, Serializabl
|
|||
public boolean isDiscriminatorValueNotNull() {
|
||||
return NOT_NULL_DISCRIMINATOR_MAPPING.equals( getDiscriminatorValue() );
|
||||
}
|
||||
|
||||
public boolean isDiscriminatorValueNull() {
|
||||
return NULL_DISCRIMINATOR_MAPPING.equals( getDiscriminatorValue() );
|
||||
}
|
||||
|
@ -548,10 +579,10 @@ public abstract class PersistentClass implements AttributeContainer, Serializabl
|
|||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
public String toString() {
|
||||
return getClass().getName() + '(' + getEntityName() + ')';
|
||||
}
|
||||
|
||||
|
||||
public Iterator getJoinIterator() {
|
||||
return joins.iterator();
|
||||
}
|
||||
|
@ -561,8 +592,8 @@ public abstract class PersistentClass implements AttributeContainer, Serializabl
|
|||
}
|
||||
|
||||
public void addJoin(Join join) {
|
||||
joins.add(join);
|
||||
join.setPersistentClass(this);
|
||||
joins.add( join );
|
||||
join.setPersistentClass( this );
|
||||
}
|
||||
|
||||
public int getJoinClosureSpan() {
|
||||
|
@ -578,11 +609,11 @@ public abstract class PersistentClass implements AttributeContainer, Serializabl
|
|||
}
|
||||
|
||||
public int getJoinNumber(Property prop) {
|
||||
int result=1;
|
||||
int result = 1;
|
||||
Iterator iter = getSubclassJoinClosureIterator();
|
||||
while ( iter.hasNext() ) {
|
||||
Join join = (Join) iter.next();
|
||||
if ( join.containsProperty(prop) ) {
|
||||
if ( join.containsProperty( prop ) ) {
|
||||
return result;
|
||||
}
|
||||
result++;
|
||||
|
@ -604,7 +635,7 @@ public abstract class PersistentClass implements AttributeContainer, Serializabl
|
|||
ArrayList iterators = new ArrayList();
|
||||
iterators.add( properties.iterator() );
|
||||
for ( int i = 0; i < joins.size(); i++ ) {
|
||||
Join join = ( Join ) joins.get( i );
|
||||
Join join = (Join) joins.get( i );
|
||||
iterators.add( join.getPropertyIterator() );
|
||||
}
|
||||
return new JoinedIterator( iterators );
|
||||
|
@ -675,8 +706,22 @@ public abstract class PersistentClass implements AttributeContainer, Serializabl
|
|||
return deleteCheckStyle;
|
||||
}
|
||||
|
||||
public void addFilter(String name, String condition, boolean autoAliasInjection, java.util.Map<String,String> aliasTableMap, java.util.Map<String,String> aliasEntityMap) {
|
||||
filters.add(new FilterConfiguration(name, condition, autoAliasInjection, aliasTableMap, aliasEntityMap, this));
|
||||
public void addFilter(
|
||||
String name,
|
||||
String condition,
|
||||
boolean autoAliasInjection,
|
||||
java.util.Map<String, String> aliasTableMap,
|
||||
java.util.Map<String, String> aliasEntityMap) {
|
||||
filters.add(
|
||||
new FilterConfiguration(
|
||||
name,
|
||||
condition,
|
||||
autoAliasInjection,
|
||||
aliasTableMap,
|
||||
aliasEntityMap,
|
||||
this
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public java.util.List getFilters() {
|
||||
|
@ -694,13 +739,13 @@ public abstract class PersistentClass implements AttributeContainer, Serializabl
|
|||
}
|
||||
|
||||
public void setLoaderName(String loaderName) {
|
||||
this.loaderName = loaderName==null ? null : loaderName.intern();
|
||||
this.loaderName = loaderName == null ? null : loaderName.intern();
|
||||
}
|
||||
|
||||
public abstract java.util.Set getSynchronizedTables();
|
||||
|
||||
|
||||
public void addSynchronizedTable(String table) {
|
||||
synchronizedTables.add(table);
|
||||
synchronizedTables.add( table );
|
||||
}
|
||||
|
||||
public Boolean isAbstract() {
|
||||
|
@ -711,27 +756,27 @@ public abstract class PersistentClass implements AttributeContainer, Serializabl
|
|||
this.isAbstract = isAbstract;
|
||||
}
|
||||
|
||||
protected void checkColumnDuplication(Set distinctColumns, Iterator columns)
|
||||
throws MappingException {
|
||||
protected void checkColumnDuplication(Set distinctColumns, Iterator columns)
|
||||
throws MappingException {
|
||||
while ( columns.hasNext() ) {
|
||||
Selectable columnOrFormula = (Selectable) columns.next();
|
||||
if ( !columnOrFormula.isFormula() ) {
|
||||
Column col = (Column) columnOrFormula;
|
||||
if ( !distinctColumns.add( col.getName() ) ) {
|
||||
throw new MappingException(
|
||||
throw new MappingException(
|
||||
"Repeated column in mapping for entity: " +
|
||||
getEntityName() +
|
||||
" column: " +
|
||||
col.getName() +
|
||||
" (should be mapped with insert=\"false\" update=\"false\")"
|
||||
);
|
||||
getEntityName() +
|
||||
" column: " +
|
||||
col.getName() +
|
||||
" (should be mapped with insert=\"false\" update=\"false\")"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected void checkPropertyColumnDuplication(Set distinctColumns, Iterator properties)
|
||||
throws MappingException {
|
||||
|
||||
protected void checkPropertyColumnDuplication(Set distinctColumns, Iterator properties)
|
||||
throws MappingException {
|
||||
while ( properties.hasNext() ) {
|
||||
Property prop = (Property) properties.next();
|
||||
if ( prop.getValue() instanceof Component ) { //TODO: remove use of instanceof!
|
||||
|
@ -745,18 +790,18 @@ public abstract class PersistentClass implements AttributeContainer, Serializabl
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
protected Iterator getNonDuplicatedPropertyIterator() {
|
||||
return getUnjoinedPropertyIterator();
|
||||
}
|
||||
|
||||
|
||||
protected Iterator getDiscriminatorColumnIterator() {
|
||||
return EmptyIterator.INSTANCE;
|
||||
}
|
||||
|
||||
|
||||
protected void checkColumnDuplication() {
|
||||
HashSet cols = new HashSet();
|
||||
if (getIdentifierMapper() == null ) {
|
||||
if ( getIdentifierMapper() == null ) {
|
||||
//an identifier mapper => getKey will be included in the getNonDuplicatedPropertyIterator()
|
||||
//and checked later, so it needs to be excluded
|
||||
checkColumnDuplication( cols, getKey().getColumnIterator() );
|
||||
|
@ -771,13 +816,13 @@ public abstract class PersistentClass implements AttributeContainer, Serializabl
|
|||
checkPropertyColumnDuplication( cols, join.getPropertyIterator() );
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public abstract Object accept(PersistentClassVisitor mv);
|
||||
|
||||
|
||||
public String getNodeName() {
|
||||
return nodeName;
|
||||
}
|
||||
|
||||
|
||||
public void setNodeName(String nodeName) {
|
||||
this.nodeName = nodeName;
|
||||
}
|
||||
|
@ -785,23 +830,23 @@ public abstract class PersistentClass implements AttributeContainer, Serializabl
|
|||
public String getJpaEntityName() {
|
||||
return jpaEntityName;
|
||||
}
|
||||
|
||||
|
||||
public void setJpaEntityName(String jpaEntityName) {
|
||||
this.jpaEntityName = jpaEntityName;
|
||||
}
|
||||
|
||||
|
||||
public boolean hasPojoRepresentation() {
|
||||
return getClassName()!=null;
|
||||
return getClassName() != null;
|
||||
}
|
||||
|
||||
public boolean hasDom4jRepresentation() {
|
||||
return getNodeName()!=null;
|
||||
return getNodeName() != null;
|
||||
}
|
||||
|
||||
public boolean hasSubselectLoadableCollections() {
|
||||
return hasSubselectLoadableCollections;
|
||||
}
|
||||
|
||||
|
||||
public void setSubselectLoadableCollections(boolean hasSubselectCollections) {
|
||||
this.hasSubselectLoadableCollections = hasSubselectCollections;
|
||||
}
|
||||
|
@ -834,8 +879,10 @@ public abstract class PersistentClass implements AttributeContainer, Serializabl
|
|||
}
|
||||
|
||||
public String getTuplizerImplClassName(EntityMode mode) {
|
||||
if ( tuplizerImpls == null ) return null;
|
||||
return ( String ) tuplizerImpls.get( mode );
|
||||
if ( tuplizerImpls == null ) {
|
||||
return null;
|
||||
}
|
||||
return (String) tuplizerImpls.get( mode );
|
||||
}
|
||||
|
||||
public java.util.Map getTuplizerMap() {
|
||||
|
@ -862,15 +909,15 @@ public abstract class PersistentClass implements AttributeContainer, Serializabl
|
|||
ArrayList iterators = new ArrayList();
|
||||
iterators.add( declaredProperties.iterator() );
|
||||
for ( int i = 0; i < joins.size(); i++ ) {
|
||||
Join join = ( Join ) joins.get( i );
|
||||
Join join = (Join) joins.get( i );
|
||||
iterators.add( join.getDeclaredPropertyIterator() );
|
||||
}
|
||||
return new JoinedIterator( iterators );
|
||||
}
|
||||
|
||||
public void addMappedsuperclassProperty(Property p) {
|
||||
properties.add(p);
|
||||
p.setPersistentClass(this);
|
||||
properties.add( p );
|
||||
p.setPersistentClass( this );
|
||||
}
|
||||
|
||||
public MappedSuperclass getSuperMappedSuperclass() {
|
||||
|
|
|
@ -198,7 +198,7 @@ public class Property implements Serializable, MetaAttributable {
|
|||
this.valueGenerationStrategy = valueGenerationStrategy;
|
||||
}
|
||||
|
||||
public void setUpdateable(boolean mutable) {
|
||||
public void setUpdateable(boolean mutable) {
|
||||
this.updateable = mutable;
|
||||
}
|
||||
|
||||
|
|
|
@ -30,19 +30,18 @@ import java.util.Set;
|
|||
|
||||
import org.hibernate.MappingException;
|
||||
import org.hibernate.engine.spi.Mapping;
|
||||
import org.hibernate.internal.CoreLogging;
|
||||
import org.hibernate.internal.CoreMessageLogger;
|
||||
import org.hibernate.internal.util.ReflectHelper;
|
||||
import org.hibernate.internal.util.collections.SingletonIterator;
|
||||
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
/**
|
||||
* The root class of an inheritance hierarchy
|
||||
*
|
||||
* @author Gavin King
|
||||
*/
|
||||
public class RootClass extends PersistentClass implements TableOwner {
|
||||
|
||||
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, RootClass.class.getName());
|
||||
private static final CoreMessageLogger LOG = CoreLogging.messageLogger( RootClass.class );
|
||||
|
||||
public static final String DEFAULT_IDENTIFIER_COLUMN_NAME = "id";
|
||||
public static final String DEFAULT_DISCRIMINATOR_COLUMN_NAME = "class";
|
||||
|
@ -70,30 +69,31 @@ public class RootClass extends PersistentClass implements TableOwner {
|
|||
private boolean cachingExplicitlyRequested;
|
||||
|
||||
@Override
|
||||
int nextSubclassId() {
|
||||
int nextSubclassId() {
|
||||
return ++nextSubclassId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getSubclassId() {
|
||||
public int getSubclassId() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
public void setTable(Table table) {
|
||||
this.table=table;
|
||||
this.table = table;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Table getTable() {
|
||||
public Table getTable() {
|
||||
return table;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Property getIdentifierProperty() {
|
||||
public Property getIdentifierProperty() {
|
||||
return identifierProperty;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Property getDeclaredIdentifierProperty() {
|
||||
public Property getDeclaredIdentifierProperty() {
|
||||
return declaredIdentifierProperty;
|
||||
}
|
||||
|
||||
|
@ -102,25 +102,27 @@ public class RootClass extends PersistentClass implements TableOwner {
|
|||
}
|
||||
|
||||
@Override
|
||||
public KeyValue getIdentifier() {
|
||||
public KeyValue getIdentifier() {
|
||||
return identifier;
|
||||
}
|
||||
@Override
|
||||
public boolean hasIdentifierProperty() {
|
||||
return identifierProperty!=null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Value getDiscriminator() {
|
||||
public boolean hasIdentifierProperty() {
|
||||
return identifierProperty != null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Value getDiscriminator() {
|
||||
return discriminator;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isInherited() {
|
||||
public boolean isInherited() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isPolymorphic() {
|
||||
public boolean isPolymorphic() {
|
||||
return polymorphic;
|
||||
}
|
||||
|
||||
|
@ -129,41 +131,43 @@ public class RootClass extends PersistentClass implements TableOwner {
|
|||
}
|
||||
|
||||
@Override
|
||||
public RootClass getRootClass() {
|
||||
public RootClass getRootClass() {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator getPropertyClosureIterator() {
|
||||
public Iterator getPropertyClosureIterator() {
|
||||
return getPropertyIterator();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator getTableClosureIterator() {
|
||||
public Iterator getTableClosureIterator() {
|
||||
return new SingletonIterator( getTable() );
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator getKeyClosureIterator() {
|
||||
public Iterator getKeyClosureIterator() {
|
||||
return new SingletonIterator( getKey() );
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addSubclass(Subclass subclass) throws MappingException {
|
||||
super.addSubclass(subclass);
|
||||
setPolymorphic(true);
|
||||
public void addSubclass(Subclass subclass) throws MappingException {
|
||||
super.addSubclass( subclass );
|
||||
setPolymorphic( true );
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isExplicitPolymorphism() {
|
||||
public boolean isExplicitPolymorphism() {
|
||||
return explicitPolymorphism;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Property getVersion() {
|
||||
public Property getVersion() {
|
||||
return version;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Property getDeclaredVersion() {
|
||||
public Property getDeclaredVersion() {
|
||||
return declaredVersion;
|
||||
}
|
||||
|
||||
|
@ -174,42 +178,44 @@ public class RootClass extends PersistentClass implements TableOwner {
|
|||
public void setVersion(Property version) {
|
||||
this.version = version;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isVersioned() {
|
||||
return version!=null;
|
||||
public boolean isVersioned() {
|
||||
return version != null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isMutable() {
|
||||
public boolean isMutable() {
|
||||
return mutable;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasEmbeddedIdentifier() {
|
||||
public boolean hasEmbeddedIdentifier() {
|
||||
return embeddedIdentifier;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class getEntityPersisterClass() {
|
||||
public Class getEntityPersisterClass() {
|
||||
return entityPersisterClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Table getRootTable() {
|
||||
public Table getRootTable() {
|
||||
return getTable();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setEntityPersisterClass(Class persister) {
|
||||
public void setEntityPersisterClass(Class persister) {
|
||||
this.entityPersisterClass = persister;
|
||||
}
|
||||
|
||||
@Override
|
||||
public PersistentClass getSuperclass() {
|
||||
public PersistentClass getSuperclass() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public KeyValue getKey() {
|
||||
public KeyValue getKey() {
|
||||
return getIdentifier();
|
||||
}
|
||||
|
||||
|
@ -231,7 +237,7 @@ public class RootClass extends PersistentClass implements TableOwner {
|
|||
|
||||
public void setIdentifierProperty(Property identifierProperty) {
|
||||
this.identifierProperty = identifierProperty;
|
||||
identifierProperty.setPersistentClass(this);
|
||||
identifierProperty.setPersistentClass( this );
|
||||
|
||||
}
|
||||
|
||||
|
@ -240,7 +246,7 @@ public class RootClass extends PersistentClass implements TableOwner {
|
|||
}
|
||||
|
||||
@Override
|
||||
public boolean isDiscriminatorInsertable() {
|
||||
public boolean isDiscriminatorInsertable() {
|
||||
return discriminatorInsertable;
|
||||
}
|
||||
|
||||
|
@ -249,7 +255,7 @@ public class RootClass extends PersistentClass implements TableOwner {
|
|||
}
|
||||
|
||||
@Override
|
||||
public boolean isForceDiscriminator() {
|
||||
public boolean isForceDiscriminator() {
|
||||
return forceDiscriminator;
|
||||
}
|
||||
|
||||
|
@ -258,7 +264,7 @@ public class RootClass extends PersistentClass implements TableOwner {
|
|||
}
|
||||
|
||||
@Override
|
||||
public String getWhere() {
|
||||
public String getWhere() {
|
||||
return where;
|
||||
}
|
||||
|
||||
|
@ -267,14 +273,14 @@ public class RootClass extends PersistentClass implements TableOwner {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void validate(Mapping mapping) throws MappingException {
|
||||
super.validate(mapping);
|
||||
if ( !getIdentifier().isValid(mapping) ) {
|
||||
public void validate(Mapping mapping) throws MappingException {
|
||||
super.validate( mapping );
|
||||
if ( !getIdentifier().isValid( mapping ) ) {
|
||||
throw new MappingException(
|
||||
"identifier mapping has wrong number of columns: " +
|
||||
getEntityName() +
|
||||
" type: " +
|
||||
getIdentifier().getType().getName()
|
||||
"identifier mapping has wrong number of columns: " +
|
||||
getEntityName() +
|
||||
" type: " +
|
||||
getIdentifier().getType().getName()
|
||||
);
|
||||
}
|
||||
checkCompositeIdentifier();
|
||||
|
@ -304,7 +310,7 @@ public class RootClass extends PersistentClass implements TableOwner {
|
|||
}
|
||||
|
||||
@Override
|
||||
public String getCacheConcurrencyStrategy() {
|
||||
public String getCacheConcurrencyStrategy() {
|
||||
return cacheConcurrencyStrategy;
|
||||
}
|
||||
|
||||
|
@ -313,22 +319,24 @@ public class RootClass extends PersistentClass implements TableOwner {
|
|||
}
|
||||
|
||||
public String getCacheRegionName() {
|
||||
return cacheRegionName==null ? getEntityName() : cacheRegionName;
|
||||
return cacheRegionName == null ? getEntityName() : cacheRegionName;
|
||||
}
|
||||
|
||||
public void setCacheRegionName(String cacheRegionName) {
|
||||
this.cacheRegionName = cacheRegionName;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String getNaturalIdCacheRegionName() {
|
||||
return naturalIdCacheRegionName;
|
||||
}
|
||||
|
||||
public void setNaturalIdCacheRegionName(String naturalIdCacheRegionName) {
|
||||
this.naturalIdCacheRegionName = naturalIdCacheRegionName;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public boolean isLazyPropertiesCacheable() {
|
||||
public boolean isLazyPropertiesCacheable() {
|
||||
return lazyPropertiesCacheable;
|
||||
}
|
||||
|
||||
|
@ -337,12 +345,12 @@ public class RootClass extends PersistentClass implements TableOwner {
|
|||
}
|
||||
|
||||
@Override
|
||||
public boolean isJoinedSubclass() {
|
||||
public boolean isJoinedSubclass() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public java.util.Set getSynchronizedTables() {
|
||||
public java.util.Set getSynchronizedTables() {
|
||||
return synchronizedTables;
|
||||
}
|
||||
|
||||
|
@ -360,8 +368,8 @@ public class RootClass extends PersistentClass implements TableOwner {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Object accept(PersistentClassVisitor mv) {
|
||||
return mv.accept(this);
|
||||
public Object accept(PersistentClassVisitor mv) {
|
||||
return mv.accept( this );
|
||||
}
|
||||
|
||||
public void setCachingExplicitlyRequested(boolean explicitlyRequested) {
|
||||
|
|
|
@ -236,7 +236,9 @@ public class SimpleValue implements KeyValue {
|
|||
while ( iter.hasNext() ) {
|
||||
Table table= (Table) iter.next();
|
||||
tables.append( table.getQuotedName(dialect) );
|
||||
if ( iter.hasNext() ) tables.append(", ");
|
||||
if ( iter.hasNext() ) {
|
||||
tables.append(", ");
|
||||
}
|
||||
}
|
||||
params.setProperty( PersistentIdentifierGenerator.TABLES, tables.toString() );
|
||||
}
|
||||
|
@ -526,7 +528,7 @@ public class SimpleValue implements KeyValue {
|
|||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
public String toString() {
|
||||
return getClass().getName() + '(' + columns.toString() + ')';
|
||||
}
|
||||
|
||||
|
|
|
@ -22,6 +22,7 @@
|
|||
* Boston, MA 02110-1301 USA
|
||||
*/
|
||||
package org.hibernate.mapping;
|
||||
|
||||
import java.util.Iterator;
|
||||
|
||||
import org.hibernate.MappingException;
|
||||
|
@ -32,18 +33,19 @@ import org.hibernate.internal.util.collections.JoinedIterator;
|
|||
* @author Gavin King
|
||||
*/
|
||||
public class SingleTableSubclass extends Subclass {
|
||||
|
||||
|
||||
public SingleTableSubclass(PersistentClass superclass) {
|
||||
super(superclass);
|
||||
super( superclass );
|
||||
}
|
||||
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected Iterator getNonDuplicatedPropertyIterator() {
|
||||
return new JoinedIterator(
|
||||
getSuperclass().getUnjoinedPropertyIterator(),
|
||||
getUnjoinedPropertyIterator()
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
protected Iterator getDiscriminatorColumnIterator() {
|
||||
if ( isDiscriminatorInsertable() && !getDiscriminator().hasFormula() ) {
|
||||
return getDiscriminator().getColumnIterator();
|
||||
|
@ -54,13 +56,17 @@ public class SingleTableSubclass extends Subclass {
|
|||
}
|
||||
|
||||
public Object accept(PersistentClassVisitor mv) {
|
||||
return mv.accept(this);
|
||||
return mv.accept( this );
|
||||
}
|
||||
|
||||
public void validate(Mapping mapping) throws MappingException {
|
||||
if ( getDiscriminator() == null ) {
|
||||
throw new MappingException(
|
||||
"No discriminator found for " + getEntityName()
|
||||
+ ". Discriminator is needed when 'single-table-per-hierarchy' "
|
||||
+ "is used and a class has subclasses"
|
||||
);
|
||||
}
|
||||
super.validate( mapping );
|
||||
}
|
||||
|
||||
public void validate(Mapping mapping) throws MappingException {
|
||||
if(getDiscriminator()==null) {
|
||||
throw new MappingException("No discriminator found for " + getEntityName() + ". Discriminator is needed when 'single-table-per-hierarchy' is used and a class has subclasses");
|
||||
}
|
||||
super.validate(mapping);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -36,10 +36,10 @@ import org.hibernate.internal.util.StringHelper;
|
|||
* @author Brett Meyer
|
||||
*/
|
||||
public class UniqueKey extends Constraint {
|
||||
private java.util.Map<Column, String> columnOrderMap = new HashMap<Column, String>( );
|
||||
private java.util.Map<Column, String> columnOrderMap = new HashMap<Column, String>();
|
||||
|
||||
@Override
|
||||
public String sqlConstraintString(
|
||||
public String sqlConstraintString(
|
||||
Dialect dialect,
|
||||
String constraintName,
|
||||
String defaultCatalog,
|
||||
|
@ -50,8 +50,11 @@ public class UniqueKey extends Constraint {
|
|||
}
|
||||
|
||||
@Override
|
||||
public String sqlCreateString(Dialect dialect, Mapping p,
|
||||
String defaultCatalog, String defaultSchema) {
|
||||
public String sqlCreateString(
|
||||
Dialect dialect,
|
||||
Mapping p,
|
||||
String defaultCatalog,
|
||||
String defaultSchema) {
|
||||
return null;
|
||||
// return dialect.getUniqueDelegate().getAlterTableToAddUniqueKeyCommand(
|
||||
// this, defaultCatalog, defaultSchema
|
||||
|
@ -59,8 +62,10 @@ public class UniqueKey extends Constraint {
|
|||
}
|
||||
|
||||
@Override
|
||||
public String sqlDropString(Dialect dialect, String defaultCatalog,
|
||||
String defaultSchema) {
|
||||
public String sqlDropString(
|
||||
Dialect dialect,
|
||||
String defaultCatalog,
|
||||
String defaultSchema) {
|
||||
return null;
|
||||
// return dialect.getUniqueDelegate().getAlterTableToDropUniqueKeyCommand(
|
||||
// this, defaultCatalog, defaultSchema
|
||||
|
@ -77,7 +82,7 @@ public class UniqueKey extends Constraint {
|
|||
public Map<Column, String> getColumnOrderMap() {
|
||||
return columnOrderMap;
|
||||
}
|
||||
|
||||
|
||||
public String generatedConstraintNamePrefix() {
|
||||
return "UK_";
|
||||
}
|
||||
|
|
|
@ -61,8 +61,11 @@ public class NamedParameterSpecification extends AbstractExplicitParameterSpecif
|
|||
* @return The number of sql bind positions "eaten" by this bind operation.
|
||||
*/
|
||||
@Override
|
||||
public int bind(PreparedStatement statement, QueryParameters qp, SessionImplementor session, int position)
|
||||
throws SQLException {
|
||||
public int bind(
|
||||
PreparedStatement statement,
|
||||
QueryParameters qp,
|
||||
SessionImplementor session,
|
||||
int position) throws SQLException {
|
||||
TypedValue typedValue = qp.getNamedParameters().get( name );
|
||||
typedValue.getType().nullSafeSet( statement, typedValue.getValue(), position, session );
|
||||
return typedValue.getType().getColumnSpan( session.getFactory() );
|
||||
|
|
|
@ -49,8 +49,11 @@ public class VersionTypeSeedParameterSpecification implements ParameterSpecifica
|
|||
}
|
||||
|
||||
@Override
|
||||
public int bind(PreparedStatement statement, QueryParameters qp, SessionImplementor session, int position)
|
||||
throws SQLException {
|
||||
public int bind(
|
||||
PreparedStatement statement,
|
||||
QueryParameters qp,
|
||||
SessionImplementor session,
|
||||
int position) throws SQLException {
|
||||
type.nullSafeSet( statement, type.seed( session ), position, session );
|
||||
return 1;
|
||||
}
|
||||
|
|
|
@ -864,7 +864,7 @@ public abstract class AbstractCollectionPersister
|
|||
|
||||
protected Object decrementIndexByBase(Object index) {
|
||||
if ( baseIndex != 0 ) {
|
||||
index = (Integer)index - baseIndex;
|
||||
index = (Integer)index - baseIndex;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
|
@ -919,7 +919,7 @@ public abstract class AbstractCollectionPersister
|
|||
|
||||
protected Object incrementIndexByBase(Object index) {
|
||||
if ( baseIndex != 0 ) {
|
||||
index = (Integer)index + baseIndex;
|
||||
index = (Integer)index + baseIndex;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
|
|
|
@ -79,19 +79,19 @@ public class BasicCollectionPersister extends AbstractCollectionPersister {
|
|||
* Generate the SQL DELETE that deletes all rows
|
||||
*/
|
||||
@Override
|
||||
protected String generateDeleteString() {
|
||||
protected String generateDeleteString() {
|
||||
final Delete delete = new Delete()
|
||||
.setTableName( qualifiedTableName )
|
||||
.addPrimaryKeyColumns( keyColumnNames );
|
||||
|
||||
|
||||
if ( hasWhere ) {
|
||||
delete.setWhere( sqlWhereString );
|
||||
}
|
||||
|
||||
|
||||
if ( getFactory().getSessionFactoryOptions().isCommentsEnabled() ) {
|
||||
delete.setComment( "delete collection " + getRole() );
|
||||
}
|
||||
|
||||
|
||||
return delete.toStatementString();
|
||||
}
|
||||
|
||||
|
@ -99,27 +99,27 @@ public class BasicCollectionPersister extends AbstractCollectionPersister {
|
|||
* Generate the SQL INSERT that creates a new row
|
||||
*/
|
||||
@Override
|
||||
protected String generateInsertRowString() {
|
||||
protected String generateInsertRowString() {
|
||||
final Insert insert = new Insert( getDialect() )
|
||||
.setTableName( qualifiedTableName )
|
||||
.addColumns( keyColumnNames );
|
||||
|
||||
if ( hasIdentifier) {
|
||||
|
||||
if ( hasIdentifier ) {
|
||||
insert.addColumn( identifierColumnName );
|
||||
}
|
||||
|
||||
|
||||
if ( hasIndex /*&& !indexIsFormula*/ ) {
|
||||
insert.addColumns( indexColumnNames, indexColumnIsSettable );
|
||||
}
|
||||
|
||||
|
||||
if ( getFactory().getSessionFactoryOptions().isCommentsEnabled() ) {
|
||||
insert.setComment( "insert collection row " + getRole() );
|
||||
}
|
||||
|
||||
|
||||
//if ( !elementIsFormula ) {
|
||||
insert.addColumns( elementColumnNames, elementColumnIsSettable, elementColumnWriters );
|
||||
insert.addColumns( elementColumnNames, elementColumnIsSettable, elementColumnWriters );
|
||||
//}
|
||||
|
||||
|
||||
return insert.toStatementString();
|
||||
}
|
||||
|
||||
|
@ -127,16 +127,16 @@ public class BasicCollectionPersister extends AbstractCollectionPersister {
|
|||
* Generate the SQL UPDATE that updates a row
|
||||
*/
|
||||
@Override
|
||||
protected String generateUpdateRowString() {
|
||||
protected String generateUpdateRowString() {
|
||||
final Update update = new Update( getDialect() )
|
||||
.setTableName( qualifiedTableName );
|
||||
|
||||
.setTableName( qualifiedTableName );
|
||||
|
||||
//if ( !elementIsFormula ) {
|
||||
update.addColumns( elementColumnNames, elementColumnIsSettable, elementColumnWriters );
|
||||
update.addColumns( elementColumnNames, elementColumnIsSettable, elementColumnWriters );
|
||||
//}
|
||||
|
||||
|
||||
if ( hasIdentifier ) {
|
||||
update.addPrimaryKeyColumns( new String[]{ identifierColumnName } );
|
||||
update.addPrimaryKeyColumns( new String[] {identifierColumnName} );
|
||||
}
|
||||
else if ( hasIndex && !indexContainsFormula ) {
|
||||
update.addPrimaryKeyColumns( ArrayHelper.join( keyColumnNames, indexColumnNames ) );
|
||||
|
@ -145,14 +145,14 @@ public class BasicCollectionPersister extends AbstractCollectionPersister {
|
|||
update.addPrimaryKeyColumns( keyColumnNames );
|
||||
update.addPrimaryKeyColumns( elementColumnNames, elementColumnIsInPrimaryKey, elementColumnWriters );
|
||||
}
|
||||
|
||||
|
||||
if ( getFactory().getSessionFactoryOptions().isCommentsEnabled() ) {
|
||||
update.setComment( "update collection row " + getRole() );
|
||||
}
|
||||
|
||||
|
||||
return update.toStatementString();
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
protected void doProcessQueuedOps(PersistentCollection collection, Serializable id, SessionImplementor session)
|
||||
throws HibernateException {
|
||||
|
@ -163,11 +163,11 @@ public class BasicCollectionPersister extends AbstractCollectionPersister {
|
|||
* Generate the SQL DELETE that deletes a particular row
|
||||
*/
|
||||
@Override
|
||||
protected String generateDeleteRowString() {
|
||||
protected String generateDeleteRowString() {
|
||||
final Delete delete = new Delete().setTableName( qualifiedTableName );
|
||||
|
||||
|
||||
if ( hasIdentifier ) {
|
||||
delete.addPrimaryKeyColumns( new String[]{ identifierColumnName } );
|
||||
delete.addPrimaryKeyColumns( new String[] {identifierColumnName} );
|
||||
}
|
||||
else if ( hasIndex && !indexContainsFormula ) {
|
||||
delete.addPrimaryKeyColumns( ArrayHelper.join( keyColumnNames, indexColumnNames ) );
|
||||
|
@ -176,11 +176,11 @@ public class BasicCollectionPersister extends AbstractCollectionPersister {
|
|||
delete.addPrimaryKeyColumns( keyColumnNames );
|
||||
delete.addPrimaryKeyColumns( elementColumnNames, elementColumnIsInPrimaryKey, elementColumnWriters );
|
||||
}
|
||||
|
||||
|
||||
if ( getFactory().getSessionFactoryOptions().isCommentsEnabled() ) {
|
||||
delete.setComment( "delete collection row " + getRole() );
|
||||
}
|
||||
|
||||
|
||||
return delete.toStatementString();
|
||||
}
|
||||
|
||||
|
@ -198,15 +198,16 @@ public class BasicCollectionPersister extends AbstractCollectionPersister {
|
|||
}
|
||||
|
||||
@Override
|
||||
public boolean isManyToMany() {
|
||||
public boolean isManyToMany() {
|
||||
return elementType.isEntityType(); //instanceof AssociationType;
|
||||
}
|
||||
|
||||
private BasicBatchKey updateBatchKey;
|
||||
|
||||
@Override
|
||||
protected int doUpdateRows(Serializable id, PersistentCollection collection, SessionImplementor session) throws HibernateException {
|
||||
if ( ArrayHelper.isAllFalse(elementColumnIsSettable) ) {
|
||||
protected int doUpdateRows(Serializable id, PersistentCollection collection, SessionImplementor session)
|
||||
throws HibernateException {
|
||||
if ( ArrayHelper.isAllFalse( elementColumnIsSettable ) ) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
@ -244,7 +245,7 @@ public class BasicCollectionPersister extends AbstractCollectionPersister {
|
|||
}
|
||||
|
||||
try {
|
||||
offset+= expectation.prepare( st );
|
||||
offset += expectation.prepare( st );
|
||||
int loc = writeElement( st, collection.getElement( entry ), offset, session );
|
||||
if ( hasIdentifier ) {
|
||||
writeIdentifier( st, collection.getIdentifier( entry, i ), loc, session );
|
||||
|
@ -265,10 +266,14 @@ public class BasicCollectionPersister extends AbstractCollectionPersister {
|
|||
.addToBatch();
|
||||
}
|
||||
else {
|
||||
expectation.verifyOutcome( session.getJdbcCoordinator().getResultSetReturn().executeUpdate( st ), st, -1 );
|
||||
expectation.verifyOutcome(
|
||||
session.getJdbcCoordinator().getResultSetReturn().executeUpdate(
|
||||
st
|
||||
), st, -1
|
||||
);
|
||||
}
|
||||
}
|
||||
catch ( SQLException sqle ) {
|
||||
catch (SQLException sqle) {
|
||||
if ( useBatch ) {
|
||||
session.getJdbcCoordinator().abortBatch();
|
||||
}
|
||||
|
@ -286,26 +291,31 @@ public class BasicCollectionPersister extends AbstractCollectionPersister {
|
|||
}
|
||||
return count;
|
||||
}
|
||||
catch ( SQLException sqle ) {
|
||||
catch (SQLException sqle) {
|
||||
throw getSQLExceptionHelper().convert(
|
||||
sqle,
|
||||
"could not update collection rows: " + MessageHelper.collectionInfoString( this, collection, id, session ),
|
||||
"could not update collection rows: " + MessageHelper.collectionInfoString(
|
||||
this,
|
||||
collection,
|
||||
id,
|
||||
session
|
||||
),
|
||||
getSQLUpdateRowString()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
public String selectFragment(
|
||||
Joinable rhs,
|
||||
String rhsAlias,
|
||||
String lhsAlias,
|
||||
String entitySuffix,
|
||||
String collectionSuffix,
|
||||
boolean includeCollectionColumns) {
|
||||
Joinable rhs,
|
||||
String rhsAlias,
|
||||
String lhsAlias,
|
||||
String entitySuffix,
|
||||
String collectionSuffix,
|
||||
boolean includeCollectionColumns) {
|
||||
// we need to determine the best way to know that two joinables
|
||||
// represent a single many-to-many...
|
||||
if ( rhs != null && isManyToMany() && !rhs.isCollection() ) {
|
||||
AssociationType elementType = ( ( AssociationType ) getElementType() );
|
||||
AssociationType elementType = ( (AssociationType) getElementType() );
|
||||
if ( rhs.equals( elementType.getAssociatedJoinable( getFactory() ) ) ) {
|
||||
return manyToManySelectFragment( rhs, rhsAlias, lhsAlias, collectionSuffix );
|
||||
}
|
||||
|
@ -314,10 +324,10 @@ public class BasicCollectionPersister extends AbstractCollectionPersister {
|
|||
}
|
||||
|
||||
private String manyToManySelectFragment(
|
||||
Joinable rhs,
|
||||
String rhsAlias,
|
||||
String lhsAlias,
|
||||
String collectionSuffix) {
|
||||
Joinable rhs,
|
||||
String rhsAlias,
|
||||
String lhsAlias,
|
||||
String collectionSuffix) {
|
||||
SelectFragment frag = generateSelectFragment( lhsAlias, collectionSuffix );
|
||||
|
||||
String[] elementColumnNames = rhs.getKeyColumnNames();
|
||||
|
@ -335,7 +345,7 @@ public class BasicCollectionPersister extends AbstractCollectionPersister {
|
|||
* @see org.hibernate.loader.collection.BasicCollectionLoader
|
||||
*/
|
||||
@Override
|
||||
protected CollectionInitializer createCollectionInitializer(LoadQueryInfluencers loadQueryInfluencers)
|
||||
protected CollectionInitializer createCollectionInitializer(LoadQueryInfluencers loadQueryInfluencers)
|
||||
throws MappingException {
|
||||
return BatchingCollectionInitializerBuilder.getBuilder( getFactory() )
|
||||
.createBatchingCollectionInitializer( this, batchSize, getFactory(), loadQueryInfluencers );
|
||||
|
@ -347,7 +357,11 @@ public class BasicCollectionPersister extends AbstractCollectionPersister {
|
|||
}
|
||||
|
||||
@Override
|
||||
public String fromJoinFragment(String alias, boolean innerJoin, boolean includeSubclasses, Set<String> treatAsDeclarations) {
|
||||
public String fromJoinFragment(
|
||||
String alias,
|
||||
boolean innerJoin,
|
||||
boolean includeSubclasses,
|
||||
Set<String> treatAsDeclarations) {
|
||||
return "";
|
||||
}
|
||||
|
||||
|
@ -357,26 +371,30 @@ public class BasicCollectionPersister extends AbstractCollectionPersister {
|
|||
}
|
||||
|
||||
@Override
|
||||
public String whereJoinFragment(String alias, boolean innerJoin, boolean includeSubclasses, Set<String> treatAsDeclarations) {
|
||||
public String whereJoinFragment(
|
||||
String alias,
|
||||
boolean innerJoin,
|
||||
boolean includeSubclasses,
|
||||
Set<String> treatAsDeclarations) {
|
||||
return "";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected CollectionInitializer createSubselectInitializer(SubselectFetch subselect, SessionImplementor session) {
|
||||
return new SubselectCollectionLoader(
|
||||
protected CollectionInitializer createSubselectInitializer(SubselectFetch subselect, SessionImplementor session) {
|
||||
return new SubselectCollectionLoader(
|
||||
this,
|
||||
subselect.toSubselectString( getCollectionType().getLHSPropertyName() ),
|
||||
subselect.getResult(),
|
||||
subselect.getQueryParameters(),
|
||||
subselect.getNamedParameterLocMap(),
|
||||
session.getFactory(),
|
||||
session.getLoadQueryInfluencers()
|
||||
session.getLoadQueryInfluencers()
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FilterAliasGenerator getFilterAliasGenerator(String rootAlias) {
|
||||
return new StaticFilterAliasGenerator(rootAlias);
|
||||
return new StaticFilterAliasGenerator( rootAlias );
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -43,7 +43,9 @@ public class CollectionPropertyMapping implements PropertyMapping {
|
|||
return memberPersister.getElementType();
|
||||
}
|
||||
else if ( propertyName.equals(CollectionPropertyNames.COLLECTION_INDICES) ) {
|
||||
if ( !memberPersister.hasIndex() ) throw new QueryException("unindexed collection before indices()");
|
||||
if ( !memberPersister.hasIndex() ) {
|
||||
throw new QueryException("unindexed collection before indices()");
|
||||
}
|
||||
return memberPersister.getIndexType();
|
||||
}
|
||||
else if ( propertyName.equals(CollectionPropertyNames.COLLECTION_SIZE) ) {
|
||||
|
|
|
@ -30,21 +30,19 @@ import org.hibernate.FlushMode;
|
|||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.engine.spi.SessionImplementor;
|
||||
import org.hibernate.internal.AbstractQueryImpl;
|
||||
import org.hibernate.internal.CoreLogging;
|
||||
import org.hibernate.internal.CoreMessageLogger;
|
||||
import org.hibernate.loader.collection.CollectionInitializer;
|
||||
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
/**
|
||||
* A wrapper around a named query.
|
||||
*
|
||||
* @author Gavin King
|
||||
*/
|
||||
public final class NamedQueryCollectionInitializer implements CollectionInitializer {
|
||||
private static final CoreMessageLogger LOG = CoreLogging.messageLogger( NamedQueryCollectionInitializer.class );
|
||||
|
||||
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class,
|
||||
NamedQueryCollectionInitializer.class.getName());
|
||||
|
||||
private final String queryName;
|
||||
private final String queryName;
|
||||
private final CollectionPersister persister;
|
||||
|
||||
public NamedQueryCollectionInitializer(String queryName, CollectionPersister persister) {
|
||||
|
@ -53,26 +51,22 @@ public final class NamedQueryCollectionInitializer implements CollectionInitiali
|
|||
this.persister = persister;
|
||||
}
|
||||
|
||||
public void initialize(Serializable key, SessionImplementor session)
|
||||
throws HibernateException {
|
||||
|
||||
LOG.debugf("Initializing collection: %s using named query: %s", persister.getRole(), queryName);
|
||||
public void initialize(Serializable key, SessionImplementor session) throws HibernateException {
|
||||
LOG.debugf( "Initializing collection: %s using named query: %s", persister.getRole(), queryName );
|
||||
|
||||
//TODO: is there a more elegant way than downcasting?
|
||||
AbstractQueryImpl query = (AbstractQueryImpl) session.getNamedSQLQuery(queryName);
|
||||
if ( query.getNamedParameters().length>0 ) {
|
||||
AbstractQueryImpl query = (AbstractQueryImpl) session.getNamedSQLQuery( queryName );
|
||||
if ( query.getNamedParameters().length > 0 ) {
|
||||
query.setParameter(
|
||||
query.getNamedParameters()[0],
|
||||
key,
|
||||
persister.getKeyType()
|
||||
);
|
||||
);
|
||||
}
|
||||
else {
|
||||
query.setParameter( 0, key, persister.getKeyType() );
|
||||
}
|
||||
query.setCollectionKey( key )
|
||||
.setFlushMode( FlushMode.MANUAL )
|
||||
.list();
|
||||
|
||||
query.setCollectionKey( key ).setFlushMode( FlushMode.MANUAL ).list();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -67,12 +67,12 @@ public class OneToManyPersister extends AbstractCollectionPersister {
|
|||
private final boolean keyIsUpdateable;
|
||||
|
||||
@Override
|
||||
protected boolean isRowDeleteEnabled() {
|
||||
protected boolean isRowDeleteEnabled() {
|
||||
return keyIsUpdateable && keyIsNullable;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean isRowInsertEnabled() {
|
||||
protected boolean isRowInsertEnabled() {
|
||||
return keyIsUpdateable;
|
||||
}
|
||||
|
||||
|
@ -95,24 +95,24 @@ public class OneToManyPersister extends AbstractCollectionPersister {
|
|||
* Generate the SQL UPDATE that updates all the foreign keys to null
|
||||
*/
|
||||
@Override
|
||||
protected String generateDeleteString() {
|
||||
protected String generateDeleteString() {
|
||||
final Update update = new Update( getDialect() )
|
||||
.setTableName( qualifiedTableName )
|
||||
.addColumns( keyColumnNames, "null" )
|
||||
.addPrimaryKeyColumns( keyColumnNames );
|
||||
|
||||
|
||||
if ( hasIndex && !indexContainsFormula ) {
|
||||
update.addColumns( indexColumnNames, "null" );
|
||||
}
|
||||
|
||||
|
||||
if ( hasWhere ) {
|
||||
update.setWhere( sqlWhereString );
|
||||
}
|
||||
|
||||
|
||||
if ( getFactory().getSessionFactoryOptions().isCommentsEnabled() ) {
|
||||
update.setComment( "delete one-to-many " + getRole() );
|
||||
}
|
||||
|
||||
|
||||
return update.toStatementString();
|
||||
}
|
||||
|
||||
|
@ -120,21 +120,21 @@ public class OneToManyPersister extends AbstractCollectionPersister {
|
|||
* Generate the SQL UPDATE that updates a foreign key to a value
|
||||
*/
|
||||
@Override
|
||||
protected String generateInsertRowString() {
|
||||
protected String generateInsertRowString() {
|
||||
final Update update = new Update( getDialect() )
|
||||
.setTableName( qualifiedTableName )
|
||||
.addColumns( keyColumnNames );
|
||||
|
||||
|
||||
if ( hasIndex && !indexContainsFormula ) {
|
||||
update.addColumns( indexColumnNames );
|
||||
}
|
||||
|
||||
|
||||
//identifier collections not supported for 1-to-many
|
||||
|
||||
if ( getFactory().getSessionFactoryOptions().isCommentsEnabled() ) {
|
||||
update.setComment( "create one-to-many row " + getRole() );
|
||||
}
|
||||
|
||||
|
||||
return update.addPrimaryKeyColumns( elementColumnNames, elementColumnWriters )
|
||||
.toStatementString();
|
||||
}
|
||||
|
@ -143,16 +143,16 @@ public class OneToManyPersister extends AbstractCollectionPersister {
|
|||
* Generate the SQL UPDATE that inserts a collection index
|
||||
*/
|
||||
@Override
|
||||
protected String generateUpdateRowString() {
|
||||
protected String generateUpdateRowString() {
|
||||
final Update update = new Update( getDialect() ).setTableName( qualifiedTableName );
|
||||
update.addPrimaryKeyColumns( elementColumnNames, elementColumnIsSettable, elementColumnWriters );
|
||||
if ( hasIdentifier ) {
|
||||
update.addPrimaryKeyColumns( new String[]{ identifierColumnName } );
|
||||
update.addPrimaryKeyColumns( new String[] {identifierColumnName} );
|
||||
}
|
||||
if ( hasIndex && !indexContainsFormula ) {
|
||||
update.addColumns( indexColumnNames );
|
||||
}
|
||||
|
||||
|
||||
return update.toStatementString();
|
||||
}
|
||||
|
||||
|
@ -161,19 +161,19 @@ public class OneToManyPersister extends AbstractCollectionPersister {
|
|||
* key to null
|
||||
*/
|
||||
@Override
|
||||
protected String generateDeleteRowString() {
|
||||
protected String generateDeleteRowString() {
|
||||
final Update update = new Update( getDialect() )
|
||||
.setTableName( qualifiedTableName )
|
||||
.addColumns( keyColumnNames, "null" );
|
||||
|
||||
|
||||
if ( hasIndex && !indexContainsFormula ) {
|
||||
update.addColumns( indexColumnNames, "null" );
|
||||
}
|
||||
|
||||
|
||||
if ( getFactory().getSessionFactoryOptions().isCommentsEnabled() ) {
|
||||
update.setComment( "delete one-to-many row " + getRole() );
|
||||
}
|
||||
|
||||
|
||||
//use a combination of foreign key columns and pk columns, since
|
||||
//the ordering of removal and addition is not guaranteed when
|
||||
//a child moves from one parent to another
|
||||
|
@ -181,21 +181,21 @@ public class OneToManyPersister extends AbstractCollectionPersister {
|
|||
return update.addPrimaryKeyColumns( rowSelectColumnNames )
|
||||
.toStatementString();
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void recreate(PersistentCollection collection, Serializable id, SessionImplementor session)
|
||||
throws HibernateException {
|
||||
super.recreate( collection, id, session );
|
||||
writeIndex( collection, collection.entries( this ), id, true, session );
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void insertRows(PersistentCollection collection, Serializable id, SessionImplementor session)
|
||||
throws HibernateException {
|
||||
super.insertRows( collection, id, session );
|
||||
writeIndex( collection, collection.entries( this ), id, true, session );
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
protected void doProcessQueuedOps(PersistentCollection collection, Serializable id, SessionImplementor session)
|
||||
throws HibernateException {
|
||||
|
@ -229,7 +229,7 @@ public class OneToManyPersister extends AbstractCollectionPersister {
|
|||
recreateBatchKey = new BasicBatchKey(
|
||||
getRole() + "#RECREATE",
|
||||
expectation
|
||||
);
|
||||
);
|
||||
}
|
||||
st = session
|
||||
.getJdbcCoordinator()
|
||||
|
@ -246,9 +246,19 @@ public class OneToManyPersister extends AbstractCollectionPersister {
|
|||
try {
|
||||
offset += expectation.prepare( st );
|
||||
if ( hasIdentifier ) {
|
||||
offset = writeIdentifier( st, collection.getIdentifier( entry, nextIndex ), offset, session );
|
||||
offset = writeIdentifier(
|
||||
st,
|
||||
collection.getIdentifier( entry, nextIndex ),
|
||||
offset,
|
||||
session
|
||||
);
|
||||
}
|
||||
offset = writeIndex( st, collection.getIndex( entry, nextIndex, this ), offset, session );
|
||||
offset = writeIndex(
|
||||
st,
|
||||
collection.getIndex( entry, nextIndex, this ),
|
||||
offset,
|
||||
session
|
||||
);
|
||||
offset = writeElement( st, collection.getElement( entry ), offset, session );
|
||||
|
||||
if ( useBatch ) {
|
||||
|
@ -257,10 +267,14 @@ public class OneToManyPersister extends AbstractCollectionPersister {
|
|||
.addToBatch();
|
||||
}
|
||||
else {
|
||||
expectation.verifyOutcome( session.getJdbcCoordinator().getResultSetReturn().executeUpdate( st ), st, -1 );
|
||||
expectation.verifyOutcome(
|
||||
session.getJdbcCoordinator()
|
||||
.getResultSetReturn()
|
||||
.executeUpdate( st ), st, -1
|
||||
);
|
||||
}
|
||||
}
|
||||
catch ( SQLException sqle ) {
|
||||
catch (SQLException sqle) {
|
||||
if ( useBatch ) {
|
||||
session.getJdbcCoordinator().abortBatch();
|
||||
}
|
||||
|
@ -278,7 +292,7 @@ public class OneToManyPersister extends AbstractCollectionPersister {
|
|||
}
|
||||
}
|
||||
}
|
||||
catch ( SQLException sqle ) {
|
||||
catch (SQLException sqle) {
|
||||
throw sqlExceptionHelper.convert(
|
||||
sqle,
|
||||
"could not update collection: " +
|
||||
|
@ -292,6 +306,7 @@ public class OneToManyPersister extends AbstractCollectionPersister {
|
|||
public boolean consumesEntityAlias() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public boolean consumesCollectionAlias() {
|
||||
return true;
|
||||
}
|
||||
|
@ -301,7 +316,7 @@ public class OneToManyPersister extends AbstractCollectionPersister {
|
|||
}
|
||||
|
||||
@Override
|
||||
public boolean isManyToMany() {
|
||||
public boolean isManyToMany() {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -309,11 +324,11 @@ public class OneToManyPersister extends AbstractCollectionPersister {
|
|||
private BasicBatchKey insertRowBatchKey;
|
||||
|
||||
@Override
|
||||
protected int doUpdateRows(Serializable id, PersistentCollection collection, SessionImplementor session) {
|
||||
protected int doUpdateRows(Serializable id, PersistentCollection collection, SessionImplementor session) {
|
||||
|
||||
// we finish all the "removes" first to take care of possible unique
|
||||
// constraints and so that we can take better advantage of batching
|
||||
|
||||
|
||||
try {
|
||||
int count = 0;
|
||||
if ( isRowDeleteEnabled() ) {
|
||||
|
@ -335,7 +350,11 @@ public class OneToManyPersister extends AbstractCollectionPersister {
|
|||
int offset = 1;
|
||||
while ( entries.hasNext() ) {
|
||||
Object entry = entries.next();
|
||||
if ( collection.needsUpdating( entry, i, elementType ) ) { // will still be issued when it used to be null
|
||||
if ( collection.needsUpdating(
|
||||
entry,
|
||||
i,
|
||||
elementType
|
||||
) ) { // will still be issued when it used to be null
|
||||
if ( useBatch ) {
|
||||
st = session
|
||||
.getJdbcCoordinator()
|
||||
|
@ -349,7 +368,7 @@ public class OneToManyPersister extends AbstractCollectionPersister {
|
|||
.prepareStatement( sql, isDeleteCallable() );
|
||||
}
|
||||
int loc = writeKey( st, id, offset, session );
|
||||
writeElementToWhere( st, collection.getSnapshotElement(entry, i), loc, session );
|
||||
writeElementToWhere( st, collection.getSnapshotElement( entry, i ), loc, session );
|
||||
if ( useBatch ) {
|
||||
session
|
||||
.getJdbcCoordinator()
|
||||
|
@ -357,14 +376,18 @@ public class OneToManyPersister extends AbstractCollectionPersister {
|
|||
.addToBatch();
|
||||
}
|
||||
else {
|
||||
deleteExpectation.verifyOutcome( session.getJdbcCoordinator().getResultSetReturn().executeUpdate( st ), st, -1 );
|
||||
deleteExpectation.verifyOutcome(
|
||||
session.getJdbcCoordinator()
|
||||
.getResultSetReturn()
|
||||
.executeUpdate( st ), st, -1
|
||||
);
|
||||
}
|
||||
count++;
|
||||
}
|
||||
i++;
|
||||
}
|
||||
}
|
||||
catch ( SQLException e ) {
|
||||
catch (SQLException e) {
|
||||
if ( useBatch ) {
|
||||
session.getJdbcCoordinator().abortBatch();
|
||||
}
|
||||
|
@ -377,7 +400,7 @@ public class OneToManyPersister extends AbstractCollectionPersister {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if ( isRowInsertEnabled() ) {
|
||||
final Expectation insertExpectation = Expectations.appropriateExpectation( getInsertCheckStyle() );
|
||||
boolean useBatch = insertExpectation.canBeBatched();
|
||||
|
@ -425,14 +448,18 @@ public class OneToManyPersister extends AbstractCollectionPersister {
|
|||
session.getJdbcCoordinator().getBatch( insertRowBatchKey ).addToBatch();
|
||||
}
|
||||
else {
|
||||
insertExpectation.verifyOutcome( session.getJdbcCoordinator().getResultSetReturn().executeUpdate( st ), st, -1 );
|
||||
insertExpectation.verifyOutcome(
|
||||
session.getJdbcCoordinator()
|
||||
.getResultSetReturn()
|
||||
.executeUpdate( st ), st, -1
|
||||
);
|
||||
}
|
||||
count++;
|
||||
}
|
||||
i++;
|
||||
}
|
||||
}
|
||||
catch ( SQLException sqle ) {
|
||||
catch (SQLException sqle) {
|
||||
if ( useBatch ) {
|
||||
session.getJdbcCoordinator().abortBatch();
|
||||
}
|
||||
|
@ -448,30 +475,30 @@ public class OneToManyPersister extends AbstractCollectionPersister {
|
|||
|
||||
return count;
|
||||
}
|
||||
catch ( SQLException sqle ) {
|
||||
catch (SQLException sqle) {
|
||||
throw getFactory().getSQLExceptionHelper().convert(
|
||||
sqle,
|
||||
"could not update collection rows: " +
|
||||
MessageHelper.collectionInfoString( this, collection, id, session ),
|
||||
"could not update collection rows: " +
|
||||
MessageHelper.collectionInfoString( this, collection, id, session ),
|
||||
getSQLInsertRowString()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
public String selectFragment(
|
||||
Joinable rhs,
|
||||
String rhsAlias,
|
||||
String lhsAlias,
|
||||
String entitySuffix,
|
||||
String collectionSuffix,
|
||||
boolean includeCollectionColumns) {
|
||||
Joinable rhs,
|
||||
String rhsAlias,
|
||||
String lhsAlias,
|
||||
String entitySuffix,
|
||||
String collectionSuffix,
|
||||
boolean includeCollectionColumns) {
|
||||
StringBuilder buf = new StringBuilder();
|
||||
if ( includeCollectionColumns ) {
|
||||
// buf.append( selectFragment( lhsAlias, "" ) )//ignore suffix for collection columns!
|
||||
buf.append( selectFragment( lhsAlias, collectionSuffix ) )
|
||||
.append( ", " );
|
||||
}
|
||||
OuterJoinLoadable ojl = ( OuterJoinLoadable ) getElementPersister();
|
||||
OuterJoinLoadable ojl = (OuterJoinLoadable) getElementPersister();
|
||||
return buf.append( ojl.selectFragment( lhsAlias, entitySuffix ) )//use suffix for the entity columns
|
||||
.toString();
|
||||
}
|
||||
|
@ -482,7 +509,7 @@ public class OneToManyPersister extends AbstractCollectionPersister {
|
|||
* @see org.hibernate.loader.collection.OneToManyLoader
|
||||
*/
|
||||
@Override
|
||||
protected CollectionInitializer createCollectionInitializer(LoadQueryInfluencers loadQueryInfluencers)
|
||||
protected CollectionInitializer createCollectionInitializer(LoadQueryInfluencers loadQueryInfluencers)
|
||||
throws MappingException {
|
||||
return BatchingCollectionInitializerBuilder.getBuilder( getFactory() )
|
||||
.createBatchingOneToManyInitializer( this, batchSize, getFactory(), loadQueryInfluencers );
|
||||
|
@ -499,7 +526,12 @@ public class OneToManyPersister extends AbstractCollectionPersister {
|
|||
boolean innerJoin,
|
||||
boolean includeSubclasses,
|
||||
Set<String> treatAsDeclarations) {
|
||||
return ( (Joinable) getElementPersister() ).fromJoinFragment( alias, innerJoin, includeSubclasses, treatAsDeclarations );
|
||||
return ( (Joinable) getElementPersister() ).fromJoinFragment(
|
||||
alias,
|
||||
innerJoin,
|
||||
includeSubclasses,
|
||||
treatAsDeclarations
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -513,19 +545,24 @@ public class OneToManyPersister extends AbstractCollectionPersister {
|
|||
boolean innerJoin,
|
||||
boolean includeSubclasses,
|
||||
Set<String> treatAsDeclarations) {
|
||||
return ( (Joinable) getElementPersister() ).whereJoinFragment( alias, innerJoin, includeSubclasses, treatAsDeclarations );
|
||||
return ( (Joinable) getElementPersister() ).whereJoinFragment(
|
||||
alias,
|
||||
innerJoin,
|
||||
includeSubclasses,
|
||||
treatAsDeclarations
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTableName() {
|
||||
public String getTableName() {
|
||||
return ( (Joinable) getElementPersister() ).getTableName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String filterFragment(String alias) throws MappingException {
|
||||
public String filterFragment(String alias) throws MappingException {
|
||||
String result = super.filterFragment( alias );
|
||||
if ( getElementPersister() instanceof Joinable ) {
|
||||
result += ( ( Joinable ) getElementPersister() ).oneToManyFilterFragment( alias );
|
||||
result += ( (Joinable) getElementPersister() ).oneToManyFilterFragment( alias );
|
||||
}
|
||||
return result;
|
||||
|
||||
|
@ -535,14 +572,14 @@ public class OneToManyPersister extends AbstractCollectionPersister {
|
|||
protected String filterFragment(String alias, Set<String> treatAsDeclarations) throws MappingException {
|
||||
String result = super.filterFragment( alias );
|
||||
if ( getElementPersister() instanceof Joinable ) {
|
||||
result += ( ( Joinable ) getElementPersister() ).oneToManyFilterFragment( alias, treatAsDeclarations );
|
||||
result += ( (Joinable) getElementPersister() ).oneToManyFilterFragment( alias, treatAsDeclarations );
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected CollectionInitializer createSubselectInitializer(SubselectFetch subselect, SessionImplementor session) {
|
||||
return new SubselectOneToManyLoader(
|
||||
protected CollectionInitializer createSubselectInitializer(SubselectFetch subselect, SessionImplementor session) {
|
||||
return new SubselectOneToManyLoader(
|
||||
this,
|
||||
subselect.toSubselectString( getCollectionType().getLHSPropertyName() ),
|
||||
subselect.getResult(),
|
||||
|
@ -550,18 +587,18 @@ public class OneToManyPersister extends AbstractCollectionPersister {
|
|||
subselect.getNamedParameterLocMap(),
|
||||
session.getFactory(),
|
||||
session.getLoadQueryInfluencers()
|
||||
);
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getElementByIndex(Serializable key, Object index, SessionImplementor session, Object owner) {
|
||||
public Object getElementByIndex(Serializable key, Object index, SessionImplementor session, Object owner) {
|
||||
return new CollectionElementLoader( this, getFactory(), session.getLoadQueryInfluencers() )
|
||||
.loadElement( session, key, incrementIndexByBase(index) );
|
||||
.loadElement( session, key, incrementIndexByBase( index ) );
|
||||
}
|
||||
|
||||
@Override
|
||||
public FilterAliasGenerator getFilterAliasGenerator(String rootAlias) {
|
||||
return getElementPersister().getFilterAliasGenerator(rootAlias);
|
||||
return getElementPersister().getFilterAliasGenerator( rootAlias );
|
||||
}
|
||||
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -40,38 +40,36 @@ import org.hibernate.type.CompositeType;
|
|||
import org.hibernate.type.EntityType;
|
||||
import org.hibernate.type.Type;
|
||||
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
/**
|
||||
* Basic implementation of the {@link PropertyMapping} contract.
|
||||
*
|
||||
* @author Gavin King
|
||||
*/
|
||||
public abstract class AbstractPropertyMapping implements PropertyMapping {
|
||||
private static final CoreMessageLogger LOG = CoreLogging.messageLogger( AbstractPropertyMapping.class );
|
||||
private static final CoreMessageLogger LOG = CoreLogging.messageLogger( AbstractPropertyMapping.class );
|
||||
|
||||
private final Map typesByPropertyPath = new HashMap();
|
||||
private final Map columnsByPropertyPath = new HashMap();
|
||||
private final Map columnReadersByPropertyPath = new HashMap();
|
||||
private final Map columnReaderTemplatesByPropertyPath = new HashMap();
|
||||
private final Map formulaTemplatesByPropertyPath = new HashMap();
|
||||
private final Map<String, Type> typesByPropertyPath = new HashMap<String, Type>();
|
||||
private final Map<String, String[]> columnsByPropertyPath = new HashMap<String, String[]>();
|
||||
private final Map<String, String[]> columnReadersByPropertyPath = new HashMap<String, String[]>();
|
||||
private final Map<String, String[]> columnReaderTemplatesByPropertyPath = new HashMap<String, String[]>();
|
||||
private final Map<String, String[]> formulaTemplatesByPropertyPath = new HashMap<String, String[]>();
|
||||
|
||||
public String[] getIdentifierColumnNames() {
|
||||
throw new UnsupportedOperationException("one-to-one is not supported here");
|
||||
throw new UnsupportedOperationException( "one-to-one is not supported here" );
|
||||
}
|
||||
|
||||
public String[] getIdentifierColumnReaderTemplates() {
|
||||
throw new UnsupportedOperationException("one-to-one is not supported here");
|
||||
throw new UnsupportedOperationException( "one-to-one is not supported here" );
|
||||
}
|
||||
|
||||
public String[] getIdentifierColumnReaders() {
|
||||
throw new UnsupportedOperationException("one-to-one is not supported here");
|
||||
throw new UnsupportedOperationException( "one-to-one is not supported here" );
|
||||
}
|
||||
|
||||
protected abstract String getEntityName();
|
||||
|
||||
public Type toType(String propertyName) throws QueryException {
|
||||
Type type = (Type) typesByPropertyPath.get(propertyName);
|
||||
Type type = typesByPropertyPath.get( propertyName );
|
||||
if ( type == null ) {
|
||||
throw propertyException( propertyName );
|
||||
}
|
||||
|
@ -83,24 +81,24 @@ public abstract class AbstractPropertyMapping implements PropertyMapping {
|
|||
}
|
||||
|
||||
public String[] getColumnNames(String propertyName) {
|
||||
String[] cols = (String[]) columnsByPropertyPath.get(propertyName);
|
||||
if (cols==null) {
|
||||
throw new MappingException("unknown property: " + propertyName);
|
||||
String[] cols = columnsByPropertyPath.get( propertyName );
|
||||
if ( cols == null ) {
|
||||
throw new MappingException( "unknown property: " + propertyName );
|
||||
}
|
||||
return cols;
|
||||
}
|
||||
|
||||
public String[] toColumns(String alias, String propertyName) throws QueryException {
|
||||
//TODO: *two* hashmap lookups here is one too many...
|
||||
String[] columns = (String[]) columnsByPropertyPath.get(propertyName);
|
||||
String[] columns = columnsByPropertyPath.get( propertyName );
|
||||
if ( columns == null ) {
|
||||
throw propertyException( propertyName );
|
||||
}
|
||||
String[] formulaTemplates = (String[]) formulaTemplatesByPropertyPath.get(propertyName);
|
||||
String[] columnReaderTemplates = (String[]) columnReaderTemplatesByPropertyPath.get(propertyName);
|
||||
String[] formulaTemplates = formulaTemplatesByPropertyPath.get( propertyName );
|
||||
String[] columnReaderTemplates = columnReaderTemplatesByPropertyPath.get( propertyName );
|
||||
String[] result = new String[columns.length];
|
||||
for ( int i=0; i<columns.length; i++ ) {
|
||||
if ( columnReaderTemplates[i]==null ) {
|
||||
for ( int i = 0; i < columns.length; i++ ) {
|
||||
if ( columnReaderTemplates[i] == null ) {
|
||||
result[i] = StringHelper.replace( formulaTemplates[i], Template.TEMPLATE, alias );
|
||||
}
|
||||
else {
|
||||
|
@ -111,15 +109,15 @@ public abstract class AbstractPropertyMapping implements PropertyMapping {
|
|||
}
|
||||
|
||||
public String[] toColumns(String propertyName) throws QueryException {
|
||||
String[] columns = (String[]) columnsByPropertyPath.get(propertyName);
|
||||
String[] columns = columnsByPropertyPath.get( propertyName );
|
||||
if ( columns == null ) {
|
||||
throw propertyException( propertyName );
|
||||
}
|
||||
String[] formulaTemplates = (String[]) formulaTemplatesByPropertyPath.get(propertyName);
|
||||
String[] columnReaders = (String[]) columnReadersByPropertyPath.get(propertyName);
|
||||
String[] formulaTemplates = formulaTemplatesByPropertyPath.get( propertyName );
|
||||
String[] columnReaders = columnReadersByPropertyPath.get( propertyName );
|
||||
String[] result = new String[columns.length];
|
||||
for ( int i=0; i<columns.length; i++ ) {
|
||||
if ( columnReaders[i]==null ) {
|
||||
for ( int i = 0; i < columns.length; i++ ) {
|
||||
if ( columnReaders[i] == null ) {
|
||||
result[i] = StringHelper.replace( formulaTemplates[i], Template.TEMPLATE, "" );
|
||||
}
|
||||
else {
|
||||
|
@ -139,16 +137,21 @@ public abstract class AbstractPropertyMapping implements PropertyMapping {
|
|||
// TODO : not quite sure yet of the difference, but this is only needed from annotations for @Id @ManyToOne support
|
||||
if ( typesByPropertyPath.containsKey( path ) ) {
|
||||
if ( LOG.isTraceEnabled() ) {
|
||||
LOG.tracev( "Skipping duplicate registration of path [{0}], existing type = [{1}], incoming type = [{2}]", path, typesByPropertyPath.get( path ), type );
|
||||
LOG.tracev(
|
||||
"Skipping duplicate registration of path [{0}], existing type = [{1}], incoming type = [{2}]",
|
||||
path,
|
||||
typesByPropertyPath.get( path ),
|
||||
type
|
||||
);
|
||||
}
|
||||
return;
|
||||
}
|
||||
typesByPropertyPath.put(path, type);
|
||||
columnsByPropertyPath.put(path, columns);
|
||||
columnReadersByPropertyPath.put(path, columnReaders);
|
||||
columnReaderTemplatesByPropertyPath.put(path, columnReaderTemplates);
|
||||
if (formulaTemplates!=null) {
|
||||
formulaTemplatesByPropertyPath.put(path, formulaTemplates);
|
||||
typesByPropertyPath.put( path, type );
|
||||
columnsByPropertyPath.put( path, columns );
|
||||
columnReadersByPropertyPath.put( path, columnReaders );
|
||||
columnReaderTemplatesByPropertyPath.put( path, columnReaderTemplates );
|
||||
if ( formulaTemplates != null ) {
|
||||
formulaTemplatesByPropertyPath.put( path, formulaTemplates );
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -174,11 +177,11 @@ public abstract class AbstractPropertyMapping implements PropertyMapping {
|
|||
assert columns != null : "Incoming columns should not be null : " + path;
|
||||
assert type != null : "Incoming type should not be null : " + path;
|
||||
|
||||
if ( columns.length!=type.getColumnSpan(factory) ) {
|
||||
if ( columns.length != type.getColumnSpan( factory ) ) {
|
||||
throw new MappingException(
|
||||
"broken column mapping for: " + path +
|
||||
" of: " + getEntityName()
|
||||
);
|
||||
" of: " + getEntityName()
|
||||
);
|
||||
}
|
||||
|
||||
if ( type.isAssociationType() ) {
|
||||
|
@ -190,38 +193,55 @@ public abstract class AbstractPropertyMapping implements PropertyMapping {
|
|||
}
|
||||
else {
|
||||
String foreignKeyProperty = actype.getLHSPropertyName();
|
||||
if ( foreignKeyProperty!=null && !path.equals(foreignKeyProperty) ) {
|
||||
if ( foreignKeyProperty != null && !path.equals( foreignKeyProperty ) ) {
|
||||
//TODO: this requires that the collection is defined after the
|
||||
// referenced property in the mapping file (ok?)
|
||||
columns = (String[]) columnsByPropertyPath.get(foreignKeyProperty);
|
||||
if (columns==null) return; //get em on the second pass!
|
||||
columnReaders = (String[]) columnReadersByPropertyPath.get(foreignKeyProperty);
|
||||
columnReaderTemplates = (String[]) columnReaderTemplatesByPropertyPath.get(foreignKeyProperty);
|
||||
columns = columnsByPropertyPath.get( foreignKeyProperty );
|
||||
if ( columns == null ) {
|
||||
return; //get em on the second pass!
|
||||
}
|
||||
columnReaders = columnReadersByPropertyPath.get( foreignKeyProperty );
|
||||
columnReaderTemplates = columnReaderTemplatesByPropertyPath.get( foreignKeyProperty );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (path!=null) {
|
||||
addPropertyPath(path, type, columns, columnReaders, columnReaderTemplates, formulaTemplates);
|
||||
if ( path != null ) {
|
||||
addPropertyPath( path, type, columns, columnReaders, columnReaderTemplates, formulaTemplates );
|
||||
}
|
||||
|
||||
if ( type.isComponentType() ) {
|
||||
CompositeType actype = (CompositeType) type;
|
||||
initComponentPropertyPaths( path, actype, columns, columnReaders, columnReaderTemplates, formulaTemplates, factory );
|
||||
initComponentPropertyPaths(
|
||||
path,
|
||||
actype,
|
||||
columns,
|
||||
columnReaders,
|
||||
columnReaderTemplates,
|
||||
formulaTemplates,
|
||||
factory
|
||||
);
|
||||
if ( actype.isEmbedded() ) {
|
||||
initComponentPropertyPaths(
|
||||
path==null ? null : StringHelper.qualifier(path),
|
||||
path == null ? null : StringHelper.qualifier( path ),
|
||||
actype,
|
||||
columns,
|
||||
columnReaders,
|
||||
columnReaderTemplates,
|
||||
formulaTemplates,
|
||||
factory
|
||||
);
|
||||
);
|
||||
}
|
||||
}
|
||||
else if ( type.isEntityType() ) {
|
||||
initIdentifierPropertyPaths( path, (EntityType) type, columns, columnReaders, columnReaderTemplates, factory );
|
||||
initIdentifierPropertyPaths(
|
||||
path,
|
||||
(EntityType) type,
|
||||
columns,
|
||||
columnReaders,
|
||||
columnReaderTemplates,
|
||||
factory
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -234,21 +254,21 @@ public abstract class AbstractPropertyMapping implements PropertyMapping {
|
|||
final Mapping factory) throws MappingException {
|
||||
|
||||
Type idtype = etype.getIdentifierOrUniqueKeyType( factory );
|
||||
String idPropName = etype.getIdentifierOrUniqueKeyPropertyName(factory);
|
||||
String idPropName = etype.getIdentifierOrUniqueKeyPropertyName( factory );
|
||||
boolean hasNonIdentifierPropertyNamedId = hasNonIdentifierPropertyNamedId( etype, factory );
|
||||
|
||||
if ( etype.isReferenceToPrimaryKey() ) {
|
||||
if ( !hasNonIdentifierPropertyNamedId ) {
|
||||
String idpath1 = extendPath(path, EntityPersister.ENTITY_ID);
|
||||
addPropertyPath(idpath1, idtype, columns, columnReaders, columnReaderTemplates, null);
|
||||
initPropertyPaths(idpath1, idtype, columns, columnReaders, columnReaderTemplates, null, factory);
|
||||
String idpath1 = extendPath( path, EntityPersister.ENTITY_ID );
|
||||
addPropertyPath( idpath1, idtype, columns, columnReaders, columnReaderTemplates, null );
|
||||
initPropertyPaths( idpath1, idtype, columns, columnReaders, columnReaderTemplates, null, factory );
|
||||
}
|
||||
}
|
||||
|
||||
if (idPropName!=null) {
|
||||
String idpath2 = extendPath(path, idPropName);
|
||||
addPropertyPath(idpath2, idtype, columns, columnReaders, columnReaderTemplates, null);
|
||||
initPropertyPaths(idpath2, idtype, columns, columnReaders, columnReaderTemplates, null, factory);
|
||||
if ( idPropName != null ) {
|
||||
String idpath2 = extendPath( path, idPropName );
|
||||
addPropertyPath( idpath2, idtype, columns, columnReaders, columnReaderTemplates, null );
|
||||
initPropertyPaths( idpath2, idtype, columns, columnReaders, columnReaderTemplates, null, factory );
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -257,9 +277,12 @@ public abstract class AbstractPropertyMapping implements PropertyMapping {
|
|||
// I don't believe that Mapping#getReferencedPropertyType accounts for the identifier property; so
|
||||
// if it returns for a property named 'id', then we should have a non-id field named id
|
||||
try {
|
||||
return factory.getReferencedPropertyType( entityType.getAssociatedEntityName(), EntityPersister.ENTITY_ID ) != null;
|
||||
return factory.getReferencedPropertyType(
|
||||
entityType.getAssociatedEntityName(),
|
||||
EntityPersister.ENTITY_ID
|
||||
) != null;
|
||||
}
|
||||
catch( MappingException e ) {
|
||||
catch (MappingException e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -274,21 +297,29 @@ public abstract class AbstractPropertyMapping implements PropertyMapping {
|
|||
|
||||
Type[] types = type.getSubtypes();
|
||||
String[] properties = type.getPropertyNames();
|
||||
int begin=0;
|
||||
for ( int i=0; i<properties.length; i++ ) {
|
||||
int begin = 0;
|
||||
for ( int i = 0; i < properties.length; i++ ) {
|
||||
String subpath = extendPath( path, properties[i] );
|
||||
try {
|
||||
int length = types[i].getColumnSpan(factory);
|
||||
String[] columnSlice = ArrayHelper.slice(columns, begin, length);
|
||||
String[] columnReaderSlice = ArrayHelper.slice(columnReaders, begin, length);
|
||||
int length = types[i].getColumnSpan( factory );
|
||||
String[] columnSlice = ArrayHelper.slice( columns, begin, length );
|
||||
String[] columnReaderSlice = ArrayHelper.slice( columnReaders, begin, length );
|
||||
String[] columnReaderTemplateSlice = ArrayHelper.slice( columnReaderTemplates, begin, length );
|
||||
String[] formulaSlice = formulaTemplates==null ?
|
||||
null : ArrayHelper.slice(formulaTemplates, begin, length);
|
||||
initPropertyPaths(subpath, types[i], columnSlice, columnReaderSlice, columnReaderTemplateSlice, formulaSlice, factory);
|
||||
begin+=length;
|
||||
String[] formulaSlice = formulaTemplates == null ?
|
||||
null : ArrayHelper.slice( formulaTemplates, begin, length );
|
||||
initPropertyPaths(
|
||||
subpath,
|
||||
types[i],
|
||||
columnSlice,
|
||||
columnReaderSlice,
|
||||
columnReaderTemplateSlice,
|
||||
formulaSlice,
|
||||
factory
|
||||
);
|
||||
begin += length;
|
||||
}
|
||||
catch (Exception e) {
|
||||
throw new MappingException("bug in initComponentPropertyPaths", e);
|
||||
throw new MappingException( "bug in initComponentPropertyPaths", e );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -31,12 +31,12 @@ import java.util.Map;
|
|||
import org.hibernate.EntityMode;
|
||||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.MappingException;
|
||||
import org.hibernate.engine.jdbc.Size;
|
||||
import org.hibernate.engine.spi.Mapping;
|
||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||
import org.hibernate.engine.spi.SessionImplementor;
|
||||
import org.hibernate.internal.util.collections.ArrayHelper;
|
||||
import org.hibernate.internal.util.compare.EqualsHelper;
|
||||
import org.hibernate.engine.jdbc.Size;
|
||||
import org.hibernate.type.AbstractType;
|
||||
import org.hibernate.type.Type;
|
||||
|
||||
|
@ -87,7 +87,7 @@ public class DiscriminatorType extends AbstractType {
|
|||
throw new HibernateException( "Unable to resolve discriminator value [" + discriminatorValue + "] to entity name" );
|
||||
}
|
||||
final EntityPersister entityPersister = session.getEntityPersister( entityName, null );
|
||||
return ( EntityMode.POJO == entityPersister.getEntityMode() ) ? entityPersister.getMappedClass() : entityName;
|
||||
return ( EntityMode.POJO == entityPersister.getEntityMode() ) ? entityPersister.getMappedClass() : entityName;
|
||||
}
|
||||
|
||||
public void nullSafeSet(
|
||||
|
|
|
@ -79,7 +79,7 @@ public class SingleTableEntityPersister extends AbstractEntityPersister {
|
|||
private final String[][] keyColumnNames;
|
||||
private final boolean[] cascadeDeleteEnabled;
|
||||
private final boolean hasSequentialSelects;
|
||||
|
||||
|
||||
private final String[] spaces;
|
||||
|
||||
private final String[] subclassClosure;
|
||||
|
@ -103,7 +103,7 @@ public class SingleTableEntityPersister extends AbstractEntityPersister {
|
|||
private final int[] subclassFormulaTableNumberClosure;
|
||||
|
||||
// discriminator column
|
||||
private final Map subclassesByDiscriminatorValue = new HashMap();
|
||||
private final Map<Object, String> subclassesByDiscriminatorValue = new HashMap<Object, String>();
|
||||
private final boolean forceDiscriminator;
|
||||
private final String discriminatorColumnName;
|
||||
private final String discriminatorColumnReaders;
|
||||
|
@ -120,19 +120,19 @@ public class SingleTableEntityPersister extends AbstractEntityPersister {
|
|||
private final String[][] constraintOrderedKeyColumnNames;
|
||||
|
||||
//private final Map propertyTableNumbersByName = new HashMap();
|
||||
private final Map propertyTableNumbersByNameAndSubclass = new HashMap();
|
||||
|
||||
private final Map sequentialSelectStringsByEntityName = new HashMap();
|
||||
private final Map<String, Integer> propertyTableNumbersByNameAndSubclass = new HashMap<String, Integer>();
|
||||
|
||||
private static final Object NULL_DISCRIMINATOR = new MarkerObject("<null discriminator>");
|
||||
private static final Object NOT_NULL_DISCRIMINATOR = new MarkerObject("<not null discriminator>");
|
||||
private final Map<String, String> sequentialSelectStringsByEntityName = new HashMap<String, String>();
|
||||
|
||||
private static final Object NULL_DISCRIMINATOR = new MarkerObject( "<null discriminator>" );
|
||||
private static final Object NOT_NULL_DISCRIMINATOR = new MarkerObject( "<not null discriminator>" );
|
||||
private static final String NULL_STRING = "null";
|
||||
private static final String NOT_NULL_STRING = "not null";
|
||||
|
||||
//INITIALIZATION:
|
||||
|
||||
public SingleTableEntityPersister(
|
||||
final PersistentClass persistentClass,
|
||||
final PersistentClass persistentClass,
|
||||
final EntityRegionAccessStrategy cacheAccessStrategy,
|
||||
final NaturalIdRegionAccessStrategy naturalIdRegionAccessStrategy,
|
||||
final PersisterCreationContext creationContext) throws HibernateException {
|
||||
|
@ -143,16 +143,16 @@ public class SingleTableEntityPersister extends AbstractEntityPersister {
|
|||
|
||||
// CLASS + TABLE
|
||||
|
||||
joinSpan = persistentClass.getJoinClosureSpan()+1;
|
||||
joinSpan = persistentClass.getJoinClosureSpan() + 1;
|
||||
qualifiedTableNames = new String[joinSpan];
|
||||
isInverseTable = new boolean[joinSpan];
|
||||
isNullableTable = new boolean[joinSpan];
|
||||
keyColumnNames = new String[joinSpan][];
|
||||
final Table table = persistentClass.getRootTable();
|
||||
qualifiedTableNames[0] = table.getQualifiedName(
|
||||
factory.getDialect(),
|
||||
factory.getSettings().getDefaultCatalogName(),
|
||||
factory.getSettings().getDefaultSchemaName()
|
||||
qualifiedTableNames[0] = table.getQualifiedName(
|
||||
factory.getDialect(),
|
||||
factory.getSettings().getDefaultCatalogName(),
|
||||
factory.getSettings().getDefaultSchemaName()
|
||||
);
|
||||
isInverseTable[0] = false;
|
||||
isNullableTable[0] = false;
|
||||
|
@ -173,18 +173,18 @@ public class SingleTableEntityPersister extends AbstractEntityPersister {
|
|||
customSQLInsert[0] = persistentClass.getCustomSQLInsert();
|
||||
insertCallable[0] = customSQLInsert[0] != null && persistentClass.isCustomInsertCallable();
|
||||
insertResultCheckStyles[0] = persistentClass.getCustomSQLInsertCheckStyle() == null
|
||||
? ExecuteUpdateResultCheckStyle.determineDefault( customSQLInsert[0], insertCallable[0] )
|
||||
: persistentClass.getCustomSQLInsertCheckStyle();
|
||||
? ExecuteUpdateResultCheckStyle.determineDefault( customSQLInsert[0], insertCallable[0] )
|
||||
: persistentClass.getCustomSQLInsertCheckStyle();
|
||||
customSQLUpdate[0] = persistentClass.getCustomSQLUpdate();
|
||||
updateCallable[0] = customSQLUpdate[0] != null && persistentClass.isCustomUpdateCallable();
|
||||
updateResultCheckStyles[0] = persistentClass.getCustomSQLUpdateCheckStyle() == null
|
||||
? ExecuteUpdateResultCheckStyle.determineDefault( customSQLUpdate[0], updateCallable[0] )
|
||||
: persistentClass.getCustomSQLUpdateCheckStyle();
|
||||
? ExecuteUpdateResultCheckStyle.determineDefault( customSQLUpdate[0], updateCallable[0] )
|
||||
: persistentClass.getCustomSQLUpdateCheckStyle();
|
||||
customSQLDelete[0] = persistentClass.getCustomSQLDelete();
|
||||
deleteCallable[0] = customSQLDelete[0] != null && persistentClass.isCustomDeleteCallable();
|
||||
deleteResultCheckStyles[0] = persistentClass.getCustomSQLDeleteCheckStyle() == null
|
||||
? ExecuteUpdateResultCheckStyle.determineDefault( customSQLDelete[0], deleteCallable[0] )
|
||||
: persistentClass.getCustomSQLDeleteCheckStyle();
|
||||
? ExecuteUpdateResultCheckStyle.determineDefault( customSQLDelete[0], deleteCallable[0] )
|
||||
: persistentClass.getCustomSQLDeleteCheckStyle();
|
||||
|
||||
// JOINS
|
||||
|
||||
|
@ -192,34 +192,34 @@ public class SingleTableEntityPersister extends AbstractEntityPersister {
|
|||
int j = 1;
|
||||
while ( joinIter.hasNext() ) {
|
||||
Join join = (Join) joinIter.next();
|
||||
qualifiedTableNames[j] = join.getTable().getQualifiedName(
|
||||
factory.getDialect(),
|
||||
factory.getSettings().getDefaultCatalogName(),
|
||||
factory.getSettings().getDefaultSchemaName()
|
||||
qualifiedTableNames[j] = join.getTable().getQualifiedName(
|
||||
factory.getDialect(),
|
||||
factory.getSettings().getDefaultCatalogName(),
|
||||
factory.getSettings().getDefaultSchemaName()
|
||||
);
|
||||
isInverseTable[j] = join.isInverse();
|
||||
isNullableTable[j] = join.isOptional();
|
||||
cascadeDeleteEnabled[j] = join.getKey().isCascadeDeleteEnabled() &&
|
||||
factory.getDialect().supportsCascadeDelete();
|
||||
cascadeDeleteEnabled[j] = join.getKey().isCascadeDeleteEnabled() &&
|
||||
factory.getDialect().supportsCascadeDelete();
|
||||
|
||||
customSQLInsert[j] = join.getCustomSQLInsert();
|
||||
insertCallable[j] = customSQLInsert[j] != null && join.isCustomInsertCallable();
|
||||
insertResultCheckStyles[j] = join.getCustomSQLInsertCheckStyle() == null
|
||||
? ExecuteUpdateResultCheckStyle.determineDefault( customSQLInsert[j], insertCallable[j] )
|
||||
: join.getCustomSQLInsertCheckStyle();
|
||||
? ExecuteUpdateResultCheckStyle.determineDefault( customSQLInsert[j], insertCallable[j] )
|
||||
: join.getCustomSQLInsertCheckStyle();
|
||||
customSQLUpdate[j] = join.getCustomSQLUpdate();
|
||||
updateCallable[j] = customSQLUpdate[j] != null && join.isCustomUpdateCallable();
|
||||
updateResultCheckStyles[j] = join.getCustomSQLUpdateCheckStyle() == null
|
||||
? ExecuteUpdateResultCheckStyle.determineDefault( customSQLUpdate[j], updateCallable[j] )
|
||||
: join.getCustomSQLUpdateCheckStyle();
|
||||
? ExecuteUpdateResultCheckStyle.determineDefault( customSQLUpdate[j], updateCallable[j] )
|
||||
: join.getCustomSQLUpdateCheckStyle();
|
||||
customSQLDelete[j] = join.getCustomSQLDelete();
|
||||
deleteCallable[j] = customSQLDelete[j] != null && join.isCustomDeleteCallable();
|
||||
deleteResultCheckStyles[j] = join.getCustomSQLDeleteCheckStyle() == null
|
||||
? ExecuteUpdateResultCheckStyle.determineDefault( customSQLDelete[j], deleteCallable[j] )
|
||||
: join.getCustomSQLDeleteCheckStyle();
|
||||
? ExecuteUpdateResultCheckStyle.determineDefault( customSQLDelete[j], deleteCallable[j] )
|
||||
: join.getCustomSQLDeleteCheckStyle();
|
||||
|
||||
Iterator iter = join.getKey().getColumnIterator();
|
||||
keyColumnNames[j] = new String[ join.getKey().getColumnSpan() ];
|
||||
keyColumnNames[j] = new String[join.getKey().getColumnSpan()];
|
||||
int i = 0;
|
||||
while ( iter.hasNext() ) {
|
||||
Column col = (Column) iter.next();
|
||||
|
@ -237,15 +237,15 @@ public class SingleTableEntityPersister extends AbstractEntityPersister {
|
|||
}
|
||||
|
||||
spaces = ArrayHelper.join(
|
||||
qualifiedTableNames,
|
||||
qualifiedTableNames,
|
||||
ArrayHelper.toStringArray( persistentClass.getSynchronizedTables() )
|
||||
);
|
||||
|
||||
|
||||
final boolean lazyAvailable = isInstrumented();
|
||||
|
||||
boolean hasDeferred = false;
|
||||
ArrayList subclassTables = new ArrayList();
|
||||
ArrayList joinKeyColumns = new ArrayList();
|
||||
ArrayList<String> subclassTables = new ArrayList<String>();
|
||||
ArrayList<String[]> joinKeyColumns = new ArrayList<String[]>();
|
||||
ArrayList<Boolean> isConcretes = new ArrayList<Boolean>();
|
||||
ArrayList<Boolean> isDeferreds = new ArrayList<Boolean>();
|
||||
ArrayList<Boolean> isInverses = new ArrayList<Boolean>();
|
||||
|
@ -253,59 +253,64 @@ public class SingleTableEntityPersister extends AbstractEntityPersister {
|
|||
ArrayList<Boolean> isLazies = new ArrayList<Boolean>();
|
||||
subclassTables.add( qualifiedTableNames[0] );
|
||||
joinKeyColumns.add( getIdentifierColumnNames() );
|
||||
isConcretes.add(Boolean.TRUE);
|
||||
isDeferreds.add(Boolean.FALSE);
|
||||
isInverses.add(Boolean.FALSE);
|
||||
isNullables.add(Boolean.FALSE);
|
||||
isLazies.add(Boolean.FALSE);
|
||||
isConcretes.add( Boolean.TRUE );
|
||||
isDeferreds.add( Boolean.FALSE );
|
||||
isInverses.add( Boolean.FALSE );
|
||||
isNullables.add( Boolean.FALSE );
|
||||
isLazies.add( Boolean.FALSE );
|
||||
joinIter = persistentClass.getSubclassJoinClosureIterator();
|
||||
while ( joinIter.hasNext() ) {
|
||||
Join join = (Join) joinIter.next();
|
||||
isConcretes.add( persistentClass.isClassOrSuperclassJoin(join) );
|
||||
isConcretes.add( persistentClass.isClassOrSuperclassJoin( join ) );
|
||||
isDeferreds.add( join.isSequentialSelect() );
|
||||
isInverses.add( join.isInverse() );
|
||||
isNullables.add( join.isOptional() );
|
||||
isLazies.add( lazyAvailable && join.isLazy() );
|
||||
if ( join.isSequentialSelect() && !persistentClass.isClassOrSuperclassJoin(join) ) {
|
||||
if ( join.isSequentialSelect() && !persistentClass.isClassOrSuperclassJoin( join ) ) {
|
||||
hasDeferred = true;
|
||||
}
|
||||
subclassTables.add( join.getTable().getQualifiedName(
|
||||
factory.getDialect(),
|
||||
factory.getSettings().getDefaultCatalogName(),
|
||||
factory.getSettings().getDefaultSchemaName()
|
||||
) );
|
||||
subclassTables.add(
|
||||
join.getTable().getQualifiedName(
|
||||
factory.getDialect(),
|
||||
factory.getSettings().getDefaultCatalogName(),
|
||||
factory.getSettings().getDefaultSchemaName()
|
||||
)
|
||||
);
|
||||
Iterator iter = join.getKey().getColumnIterator();
|
||||
String[] keyCols = new String[ join.getKey().getColumnSpan() ];
|
||||
String[] keyCols = new String[join.getKey().getColumnSpan()];
|
||||
int i = 0;
|
||||
while ( iter.hasNext() ) {
|
||||
Column col = (Column) iter.next();
|
||||
keyCols[i++] = col.getQuotedName( factory.getDialect() );
|
||||
}
|
||||
joinKeyColumns.add(keyCols);
|
||||
joinKeyColumns.add( keyCols );
|
||||
}
|
||||
|
||||
subclassTableSequentialSelect = ArrayHelper.toBooleanArray(isDeferreds);
|
||||
subclassTableNameClosure = ArrayHelper.toStringArray(subclassTables);
|
||||
subclassTableIsLazyClosure = ArrayHelper.toBooleanArray(isLazies);
|
||||
|
||||
subclassTableSequentialSelect = ArrayHelper.toBooleanArray( isDeferreds );
|
||||
subclassTableNameClosure = ArrayHelper.toStringArray( subclassTables );
|
||||
subclassTableIsLazyClosure = ArrayHelper.toBooleanArray( isLazies );
|
||||
subclassTableKeyColumnClosure = ArrayHelper.to2DStringArray( joinKeyColumns );
|
||||
isClassOrSuperclassTable = ArrayHelper.toBooleanArray(isConcretes);
|
||||
isInverseSubclassTable = ArrayHelper.toBooleanArray(isInverses);
|
||||
isNullableSubclassTable = ArrayHelper.toBooleanArray(isNullables);
|
||||
isClassOrSuperclassTable = ArrayHelper.toBooleanArray( isConcretes );
|
||||
isInverseSubclassTable = ArrayHelper.toBooleanArray( isInverses );
|
||||
isNullableSubclassTable = ArrayHelper.toBooleanArray( isNullables );
|
||||
hasSequentialSelects = hasDeferred;
|
||||
|
||||
// DISCRIMINATOR
|
||||
|
||||
if ( persistentClass.isPolymorphic() ) {
|
||||
Value discrimValue = persistentClass.getDiscriminator();
|
||||
if (discrimValue==null) {
|
||||
throw new MappingException("discriminator mapping required for single table polymorphic persistence");
|
||||
if ( discrimValue == null ) {
|
||||
throw new MappingException( "discriminator mapping required for single table polymorphic persistence" );
|
||||
}
|
||||
forceDiscriminator = persistentClass.isForceDiscriminator();
|
||||
Selectable selectable = (Selectable) discrimValue.getColumnIterator().next();
|
||||
if ( discrimValue.hasFormula() ) {
|
||||
Formula formula = (Formula) selectable;
|
||||
discriminatorFormula = formula.getFormula();
|
||||
discriminatorFormulaTemplate = formula.getTemplate( factory.getDialect(), factory.getSqlFunctionRegistry() );
|
||||
discriminatorFormulaTemplate = formula.getTemplate(
|
||||
factory.getDialect(),
|
||||
factory.getSqlFunctionRegistry()
|
||||
);
|
||||
discriminatorColumnName = null;
|
||||
discriminatorColumnReaders = null;
|
||||
discriminatorColumnReaderTemplate = null;
|
||||
|
@ -315,7 +320,10 @@ public class SingleTableEntityPersister extends AbstractEntityPersister {
|
|||
Column column = (Column) selectable;
|
||||
discriminatorColumnName = column.getQuotedName( factory.getDialect() );
|
||||
discriminatorColumnReaders = column.getReadExpr( factory.getDialect() );
|
||||
discriminatorColumnReaderTemplate = column.getTemplate( factory.getDialect(), factory.getSqlFunctionRegistry() );
|
||||
discriminatorColumnReaderTemplate = column.getTemplate(
|
||||
factory.getDialect(),
|
||||
factory.getSqlFunctionRegistry()
|
||||
);
|
||||
discriminatorAlias = column.getAlias( factory.getDialect(), persistentClass.getRootTable() );
|
||||
discriminatorFormula = null;
|
||||
discriminatorFormulaTemplate = null;
|
||||
|
@ -339,10 +347,10 @@ public class SingleTableEntityPersister extends AbstractEntityPersister {
|
|||
discriminatorSQLValue = dtype.objectToSQLString( discriminatorValue, factory.getDialect() );
|
||||
}
|
||||
catch (ClassCastException cce) {
|
||||
throw new MappingException("Illegal discriminator type: " + discriminatorType.getName() );
|
||||
throw new MappingException( "Illegal discriminator type: " + discriminatorType.getName() );
|
||||
}
|
||||
catch (Exception e) {
|
||||
throw new MappingException("Could not format discriminator value to SQL string", e);
|
||||
throw new MappingException( "Could not format discriminator value to SQL string", e );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -362,47 +370,47 @@ public class SingleTableEntityPersister extends AbstractEntityPersister {
|
|||
|
||||
// PROPERTIES
|
||||
|
||||
propertyTableNumbers = new int[ getPropertySpan() ];
|
||||
propertyTableNumbers = new int[getPropertySpan()];
|
||||
Iterator iter = persistentClass.getPropertyClosureIterator();
|
||||
int i=0;
|
||||
while( iter.hasNext() ) {
|
||||
int i = 0;
|
||||
while ( iter.hasNext() ) {
|
||||
Property prop = (Property) iter.next();
|
||||
propertyTableNumbers[i++] = persistentClass.getJoinNumber(prop);
|
||||
propertyTableNumbers[i++] = persistentClass.getJoinNumber( prop );
|
||||
|
||||
}
|
||||
|
||||
//TODO: code duplication with JoinedSubclassEntityPersister
|
||||
|
||||
ArrayList columnJoinNumbers = new ArrayList();
|
||||
ArrayList formulaJoinedNumbers = new ArrayList();
|
||||
ArrayList propertyJoinNumbers = new ArrayList();
|
||||
|
||||
|
||||
ArrayList<Integer> columnJoinNumbers = new ArrayList<Integer>();
|
||||
ArrayList<Integer> formulaJoinedNumbers = new ArrayList<Integer>();
|
||||
ArrayList<Integer> propertyJoinNumbers = new ArrayList<Integer>();
|
||||
|
||||
iter = persistentClass.getSubclassPropertyClosureIterator();
|
||||
while ( iter.hasNext() ) {
|
||||
Property prop = (Property) iter.next();
|
||||
Integer join = persistentClass.getJoinNumber(prop);
|
||||
propertyJoinNumbers.add(join);
|
||||
Integer join = persistentClass.getJoinNumber( prop );
|
||||
propertyJoinNumbers.add( join );
|
||||
|
||||
//propertyTableNumbersByName.put( prop.getName(), join );
|
||||
propertyTableNumbersByNameAndSubclass.put(
|
||||
prop.getPersistentClass().getEntityName() + '.' + prop.getName(),
|
||||
join
|
||||
propertyTableNumbersByNameAndSubclass.put(
|
||||
prop.getPersistentClass().getEntityName() + '.' + prop.getName(),
|
||||
join
|
||||
);
|
||||
|
||||
Iterator citer = prop.getColumnIterator();
|
||||
while ( citer.hasNext() ) {
|
||||
Selectable thing = (Selectable) citer.next();
|
||||
if ( thing.isFormula() ) {
|
||||
formulaJoinedNumbers.add(join);
|
||||
formulaJoinedNumbers.add( join );
|
||||
}
|
||||
else {
|
||||
columnJoinNumbers.add(join);
|
||||
columnJoinNumbers.add( join );
|
||||
}
|
||||
}
|
||||
}
|
||||
subclassColumnTableNumberClosure = ArrayHelper.toIntArray(columnJoinNumbers);
|
||||
subclassFormulaTableNumberClosure = ArrayHelper.toIntArray(formulaJoinedNumbers);
|
||||
subclassPropertyTableNumberClosure = ArrayHelper.toIntArray(propertyJoinNumbers);
|
||||
subclassColumnTableNumberClosure = ArrayHelper.toIntArray( columnJoinNumbers );
|
||||
subclassFormulaTableNumberClosure = ArrayHelper.toIntArray( formulaJoinedNumbers );
|
||||
subclassPropertyTableNumberClosure = ArrayHelper.toIntArray( propertyJoinNumbers );
|
||||
|
||||
int subclassSpan = persistentClass.getSubclassSpan() + 1;
|
||||
subclassClosure = new String[subclassSpan];
|
||||
|
@ -414,7 +422,7 @@ public class SingleTableEntityPersister extends AbstractEntityPersister {
|
|||
// SUBCLASSES
|
||||
if ( persistentClass.isPolymorphic() ) {
|
||||
iter = persistentClass.getSubclassIterator();
|
||||
int k=1;
|
||||
int k = 1;
|
||||
while ( iter.hasNext() ) {
|
||||
Subclass sc = (Subclass) iter.next();
|
||||
subclassClosure[k++] = sc.getEntityName();
|
||||
|
@ -428,15 +436,15 @@ public class SingleTableEntityPersister extends AbstractEntityPersister {
|
|||
try {
|
||||
DiscriminatorType dtype = (DiscriminatorType) discriminatorType;
|
||||
addSubclassByDiscriminatorValue(
|
||||
dtype.stringToObject( sc.getDiscriminatorValue() ),
|
||||
sc.getEntityName()
|
||||
dtype.stringToObject( sc.getDiscriminatorValue() ),
|
||||
sc.getEntityName()
|
||||
);
|
||||
}
|
||||
catch (ClassCastException cce) {
|
||||
throw new MappingException("Illegal discriminator type: " + discriminatorType.getName() );
|
||||
throw new MappingException( "Illegal discriminator type: " + discriminatorType.getName() );
|
||||
}
|
||||
catch (Exception e) {
|
||||
throw new MappingException("Error parsing discriminator value", e);
|
||||
throw new MappingException( "Error parsing discriminator value", e );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -445,13 +453,13 @@ public class SingleTableEntityPersister extends AbstractEntityPersister {
|
|||
initLockers();
|
||||
|
||||
initSubclassPropertyAliasesMap( persistentClass );
|
||||
|
||||
|
||||
postConstruct( creationContext.getMetadata() );
|
||||
|
||||
}
|
||||
|
||||
private void addSubclassByDiscriminatorValue(Object discriminatorValue, String entityName) {
|
||||
String mappedEntityName = (String) subclassesByDiscriminatorValue.put( discriminatorValue, entityName );
|
||||
String mappedEntityName = subclassesByDiscriminatorValue.put( discriminatorValue, entityName );
|
||||
if ( mappedEntityName != null ) {
|
||||
throw new MappingException(
|
||||
"Entities [" + entityName + "] and [" + mappedEntityName
|
||||
|
@ -474,12 +482,12 @@ public class SingleTableEntityPersister extends AbstractEntityPersister {
|
|||
|
||||
public String getDiscriminatorColumnReaders() {
|
||||
return discriminatorColumnReaders;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public String getDiscriminatorColumnReaderTemplate() {
|
||||
return discriminatorColumnReaderTemplate;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
protected String getDiscriminatorAlias() {
|
||||
return discriminatorAlias;
|
||||
}
|
||||
|
@ -509,13 +517,13 @@ public class SingleTableEntityPersister extends AbstractEntityPersister {
|
|||
}
|
||||
|
||||
public String getSubclassForDiscriminatorValue(Object value) {
|
||||
if (value==null) {
|
||||
return (String) subclassesByDiscriminatorValue.get(NULL_DISCRIMINATOR);
|
||||
if ( value == null ) {
|
||||
return subclassesByDiscriminatorValue.get( NULL_DISCRIMINATOR );
|
||||
}
|
||||
else {
|
||||
String result = (String) subclassesByDiscriminatorValue.get(value);
|
||||
if (result==null) {
|
||||
result = (String) subclassesByDiscriminatorValue.get(NOT_NULL_DISCRIMINATOR);
|
||||
String result = subclassesByDiscriminatorValue.get( value );
|
||||
if ( result == null ) {
|
||||
result = subclassesByDiscriminatorValue.get( NOT_NULL_DISCRIMINATOR );
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
@ -528,7 +536,7 @@ public class SingleTableEntityPersister extends AbstractEntityPersister {
|
|||
//Access cached SQL
|
||||
|
||||
protected boolean isDiscriminatorFormula() {
|
||||
return discriminatorColumnName==null;
|
||||
return discriminatorColumnName == null;
|
||||
}
|
||||
|
||||
protected String getDiscriminatorFormula() {
|
||||
|
@ -538,23 +546,23 @@ public class SingleTableEntityPersister extends AbstractEntityPersister {
|
|||
protected String getTableName(int j) {
|
||||
return qualifiedTableNames[j];
|
||||
}
|
||||
|
||||
|
||||
protected String[] getKeyColumns(int j) {
|
||||
return keyColumnNames[j];
|
||||
}
|
||||
|
||||
|
||||
protected boolean isTableCascadeDeleteEnabled(int j) {
|
||||
return cascadeDeleteEnabled[j];
|
||||
}
|
||||
|
||||
|
||||
protected boolean isPropertyOfTable(int property, int j) {
|
||||
return propertyTableNumbers[property]==j;
|
||||
return propertyTableNumbers[property] == j;
|
||||
}
|
||||
|
||||
protected boolean isSubclassTableSequentialSelect(int j) {
|
||||
return subclassTableSequentialSelect[j] && !isClassOrSuperclassTable[j];
|
||||
}
|
||||
|
||||
|
||||
// Execute the SQL:
|
||||
|
||||
public String fromTableFragment(String name) {
|
||||
|
@ -563,9 +571,9 @@ public class SingleTableEntityPersister extends AbstractEntityPersister {
|
|||
|
||||
@Override
|
||||
public String filterFragment(String alias) throws MappingException {
|
||||
String result = discriminatorFilterFragment(alias);
|
||||
String result = discriminatorFilterFragment( alias );
|
||||
if ( hasWhere() ) {
|
||||
result += " and " + getSQLWhereString(alias);
|
||||
result += " and " + getSQLWhereString( alias );
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
@ -573,7 +581,7 @@ public class SingleTableEntityPersister extends AbstractEntityPersister {
|
|||
private String discriminatorFilterFragment(String alias) throws MappingException {
|
||||
return discriminatorFilterFragment( alias, null );
|
||||
}
|
||||
|
||||
|
||||
public String oneToManyFilterFragment(String alias) throws MappingException {
|
||||
return forceDiscriminator
|
||||
? discriminatorFilterFragment( alias, null )
|
||||
|
@ -596,10 +604,10 @@ public class SingleTableEntityPersister extends AbstractEntityPersister {
|
|||
return result;
|
||||
}
|
||||
|
||||
private String discriminatorFilterFragment(String alias, Set<String> treatAsDeclarations) {
|
||||
private String discriminatorFilterFragment(String alias, Set<String> treatAsDeclarations) {
|
||||
final boolean hasTreatAs = treatAsDeclarations != null && !treatAsDeclarations.isEmpty();
|
||||
|
||||
if ( !needsDiscriminator() && !hasTreatAs) {
|
||||
if ( !needsDiscriminator() && !hasTreatAs ) {
|
||||
return "";
|
||||
}
|
||||
|
||||
|
@ -633,7 +641,7 @@ public class SingleTableEntityPersister extends AbstractEntityPersister {
|
|||
values.add( queryable.getDiscriminatorSQLValue() );
|
||||
}
|
||||
}
|
||||
return values.toArray( new String[ values.size() ] );
|
||||
return values.toArray( new String[values.size()] );
|
||||
}
|
||||
|
||||
private String[] fullDiscriminatorValues;
|
||||
|
@ -648,14 +656,14 @@ public class SingleTableEntityPersister extends AbstractEntityPersister {
|
|||
values.add( queryable.getDiscriminatorSQLValue() );
|
||||
}
|
||||
}
|
||||
fullDiscriminatorValues = values.toArray( new String[values.size() ] );
|
||||
fullDiscriminatorValues = values.toArray( new String[values.size()] );
|
||||
}
|
||||
|
||||
return fullDiscriminatorValues;
|
||||
}
|
||||
|
||||
public String getSubclassPropertyTableName(int i) {
|
||||
return subclassTableNameClosure[ subclassPropertyTableNumberClosure[i] ];
|
||||
return subclassTableNameClosure[subclassPropertyTableNumberClosure[i]];
|
||||
}
|
||||
|
||||
protected void addDiscriminatorToSelect(SelectFragment select, String name, String suffix) {
|
||||
|
@ -663,10 +671,10 @@ public class SingleTableEntityPersister extends AbstractEntityPersister {
|
|||
select.addFormula( name, getDiscriminatorFormulaTemplate(), getDiscriminatorAlias() );
|
||||
}
|
||||
else {
|
||||
select.addColumn( name, getDiscriminatorColumnName(), getDiscriminatorAlias() );
|
||||
select.addColumn( name, getDiscriminatorColumnName(), getDiscriminatorAlias() );
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
protected int[] getPropertyTableNumbersInSelect() {
|
||||
return propertyTableNumbers;
|
||||
}
|
||||
|
@ -681,7 +689,7 @@ public class SingleTableEntityPersister extends AbstractEntityPersister {
|
|||
|
||||
protected void addDiscriminatorToInsert(Insert insert) {
|
||||
|
||||
if (discriminatorInsertable) {
|
||||
if ( discriminatorInsertable ) {
|
||||
insert.addColumn( getDiscriminatorColumnName(), discriminatorSQLValue );
|
||||
}
|
||||
|
||||
|
@ -698,27 +706,27 @@ public class SingleTableEntityPersister extends AbstractEntityPersister {
|
|||
protected int[] getPropertyTableNumbers() {
|
||||
return propertyTableNumbers;
|
||||
}
|
||||
|
||||
|
||||
protected boolean isSubclassPropertyDeferred(String propertyName, String entityName) {
|
||||
return hasSequentialSelects &&
|
||||
isSubclassTableSequentialSelect( getSubclassPropertyTableNumber(propertyName, entityName) );
|
||||
return hasSequentialSelects &&
|
||||
isSubclassTableSequentialSelect( getSubclassPropertyTableNumber( propertyName, entityName ) );
|
||||
}
|
||||
|
||||
|
||||
public boolean hasSequentialSelect() {
|
||||
return hasSequentialSelects;
|
||||
}
|
||||
|
||||
|
||||
private int getSubclassPropertyTableNumber(String propertyName, String entityName) {
|
||||
Type type = propertyMapping.toType(propertyName);
|
||||
Type type = propertyMapping.toType( propertyName );
|
||||
if ( type.isAssociationType() && ( (AssociationType) type ).useLHSPrimaryKey() ) {
|
||||
return 0;
|
||||
}
|
||||
final Integer tabnum = (Integer) propertyTableNumbersByNameAndSubclass.get(entityName + '.' + propertyName);
|
||||
return tabnum==null ? 0 : tabnum;
|
||||
final Integer tabnum = propertyTableNumbersByNameAndSubclass.get( entityName + '.' + propertyName );
|
||||
return tabnum == null ? 0 : tabnum;
|
||||
}
|
||||
|
||||
|
||||
protected String getSequentialSelect(String entityName) {
|
||||
return (String) sequentialSelectStringsByEntityName.get(entityName);
|
||||
return sequentialSelectStringsByEntityName.get( entityName );
|
||||
}
|
||||
|
||||
private String generateSequentialSelect(Loadable persister) {
|
||||
|
@ -726,49 +734,49 @@ public class SingleTableEntityPersister extends AbstractEntityPersister {
|
|||
|
||||
//note that this method could easily be moved up to BasicEntityPersister,
|
||||
//if we ever needed to reuse it from other subclasses
|
||||
|
||||
|
||||
//figure out which tables need to be fetched
|
||||
AbstractEntityPersister subclassPersister = (AbstractEntityPersister) persister;
|
||||
HashSet tableNumbers = new HashSet();
|
||||
HashSet<Integer> tableNumbers = new HashSet<Integer>();
|
||||
String[] props = subclassPersister.getPropertyNames();
|
||||
String[] classes = subclassPersister.getPropertySubclassNames();
|
||||
for ( int i=0; i<props.length; i++ ) {
|
||||
for ( int i = 0; i < props.length; i++ ) {
|
||||
int propTableNumber = getSubclassPropertyTableNumber( props[i], classes[i] );
|
||||
if ( isSubclassTableSequentialSelect(propTableNumber) && !isSubclassTableLazy(propTableNumber) ) {
|
||||
tableNumbers.add( propTableNumber);
|
||||
if ( isSubclassTableSequentialSelect( propTableNumber ) && !isSubclassTableLazy( propTableNumber ) ) {
|
||||
tableNumbers.add( propTableNumber );
|
||||
}
|
||||
}
|
||||
if ( tableNumbers.isEmpty() ) {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
//figure out which columns are needed
|
||||
ArrayList columnNumbers = new ArrayList();
|
||||
ArrayList<Integer> columnNumbers = new ArrayList<Integer>();
|
||||
final int[] columnTableNumbers = getSubclassColumnTableNumberClosure();
|
||||
for ( int i=0; i<getSubclassColumnClosure().length; i++ ) {
|
||||
for ( int i = 0; i < getSubclassColumnClosure().length; i++ ) {
|
||||
if ( tableNumbers.contains( columnTableNumbers[i] ) ) {
|
||||
columnNumbers.add( i );
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
//figure out which formulas are needed
|
||||
ArrayList formulaNumbers = new ArrayList();
|
||||
ArrayList<Integer> formulaNumbers = new ArrayList<Integer>();
|
||||
final int[] formulaTableNumbers = getSubclassColumnTableNumberClosure();
|
||||
for ( int i=0; i<getSubclassFormulaTemplateClosure().length; i++ ) {
|
||||
for ( int i = 0; i < getSubclassFormulaTemplateClosure().length; i++ ) {
|
||||
if ( tableNumbers.contains( formulaTableNumbers[i] ) ) {
|
||||
formulaNumbers.add( i );
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
//render the SQL
|
||||
return renderSelect(
|
||||
ArrayHelper.toIntArray(tableNumbers),
|
||||
ArrayHelper.toIntArray(columnNumbers),
|
||||
ArrayHelper.toIntArray(formulaNumbers)
|
||||
return renderSelect(
|
||||
ArrayHelper.toIntArray( tableNumbers ),
|
||||
ArrayHelper.toIntArray( columnNumbers ),
|
||||
ArrayHelper.toIntArray( formulaNumbers )
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
protected String[] getSubclassTableKeyColumns(int j) {
|
||||
return subclassTableKeyColumnClosure[j];
|
||||
}
|
||||
|
@ -788,30 +796,30 @@ public class SingleTableEntityPersister extends AbstractEntityPersister {
|
|||
protected boolean isSubclassTableLazy(int j) {
|
||||
return subclassTableIsLazyClosure[j];
|
||||
}
|
||||
|
||||
|
||||
protected boolean isNullableTable(int j) {
|
||||
return isNullableTable[j];
|
||||
}
|
||||
|
||||
|
||||
protected boolean isNullableSubclassTable(int j) {
|
||||
return isNullableSubclassTable[j];
|
||||
}
|
||||
|
||||
public String getPropertyTableName(String propertyName) {
|
||||
Integer index = getEntityMetamodel().getPropertyIndexOrNull(propertyName);
|
||||
if (index==null) {
|
||||
Integer index = getEntityMetamodel().getPropertyIndexOrNull( propertyName );
|
||||
if ( index == null ) {
|
||||
return null;
|
||||
}
|
||||
return qualifiedTableNames[ propertyTableNumbers[index] ];
|
||||
return qualifiedTableNames[propertyTableNumbers[index]];
|
||||
}
|
||||
|
||||
|
||||
protected void doPostInstantiate() {
|
||||
if (hasSequentialSelects) {
|
||||
if ( hasSequentialSelects ) {
|
||||
String[] entityNames = getSubclassClosure();
|
||||
for ( int i=1; i<entityNames.length; i++ ) {
|
||||
for ( int i = 1; i < entityNames.length; i++ ) {
|
||||
Loadable loadable = (Loadable) getFactory().getEntityPersister( entityNames[i] );
|
||||
if ( !loadable.isAbstract() ) { //perhaps not really necessary...
|
||||
String sequentialSelect = generateSequentialSelect(loadable);
|
||||
String sequentialSelect = generateSequentialSelect( loadable );
|
||||
sequentialSelectStringsByEntityName.put( entityNames[i], sequentialSelect );
|
||||
}
|
||||
}
|
||||
|
@ -832,6 +840,6 @@ public class SingleTableEntityPersister extends AbstractEntityPersister {
|
|||
|
||||
@Override
|
||||
public FilterAliasGenerator getFilterAliasGenerator(String rootAlias) {
|
||||
return new DynamicFilterAliasGenerator(qualifiedTableNames, rootAlias);
|
||||
return new DynamicFilterAliasGenerator( qualifiedTableNames, rootAlias );
|
||||
}
|
||||
}
|
||||
|
|
|
@ -60,7 +60,7 @@ import org.hibernate.type.StandardBasicTypes;
|
|||
import org.hibernate.type.Type;
|
||||
|
||||
/**
|
||||
* Implementation of the "table-per-concrete-class" or "roll-down" mapping
|
||||
* Implementation of the "table-per-concrete-class" or "roll-down" mapping
|
||||
* strategy for an entity and its inheritence hierarchy.
|
||||
*
|
||||
* @author Gavin King
|
||||
|
@ -84,7 +84,7 @@ public class UnionSubclassEntityPersister extends AbstractEntityPersister {
|
|||
//INITIALIZATION:
|
||||
|
||||
public UnionSubclassEntityPersister(
|
||||
final PersistentClass persistentClass,
|
||||
final PersistentClass persistentClass,
|
||||
final EntityRegionAccessStrategy cacheAccessStrategy,
|
||||
final NaturalIdRegionAccessStrategy naturalIdRegionAccessStrategy,
|
||||
final PersisterCreationContext creationContext) throws HibernateException {
|
||||
|
@ -95,17 +95,17 @@ public class UnionSubclassEntityPersister extends AbstractEntityPersister {
|
|||
|
||||
if ( getIdentifierGenerator() instanceof IdentityGenerator ) {
|
||||
throw new MappingException(
|
||||
"Cannot use identity column key generation with <union-subclass> mapping for: " +
|
||||
getEntityName()
|
||||
"Cannot use identity column key generation with <union-subclass> mapping for: " +
|
||||
getEntityName()
|
||||
);
|
||||
}
|
||||
|
||||
// TABLE
|
||||
|
||||
tableName = persistentClass.getTable().getQualifiedName(
|
||||
factory.getDialect(),
|
||||
factory.getSettings().getDefaultCatalogName(),
|
||||
factory.getSettings().getDefaultSchemaName()
|
||||
tableName = persistentClass.getTable().getQualifiedName(
|
||||
factory.getDialect(),
|
||||
factory.getSettings().getDefaultCatalogName(),
|
||||
factory.getSettings().getDefaultSchemaName()
|
||||
);
|
||||
/*rootTableName = persistentClass.getRootTable().getQualifiedName(
|
||||
factory.getDialect(),
|
||||
|
@ -122,34 +122,34 @@ public class UnionSubclassEntityPersister extends AbstractEntityPersister {
|
|||
callable = sql != null && persistentClass.isCustomInsertCallable();
|
||||
checkStyle = sql == null
|
||||
? ExecuteUpdateResultCheckStyle.COUNT
|
||||
: persistentClass.getCustomSQLInsertCheckStyle() == null
|
||||
? ExecuteUpdateResultCheckStyle.determineDefault( sql, callable )
|
||||
: persistentClass.getCustomSQLInsertCheckStyle();
|
||||
customSQLInsert = new String[] { sql };
|
||||
insertCallable = new boolean[] { callable };
|
||||
insertResultCheckStyles = new ExecuteUpdateResultCheckStyle[] { checkStyle };
|
||||
: persistentClass.getCustomSQLInsertCheckStyle() == null
|
||||
? ExecuteUpdateResultCheckStyle.determineDefault( sql, callable )
|
||||
: persistentClass.getCustomSQLInsertCheckStyle();
|
||||
customSQLInsert = new String[] {sql};
|
||||
insertCallable = new boolean[] {callable};
|
||||
insertResultCheckStyles = new ExecuteUpdateResultCheckStyle[] {checkStyle};
|
||||
|
||||
sql = persistentClass.getCustomSQLUpdate();
|
||||
callable = sql != null && persistentClass.isCustomUpdateCallable();
|
||||
checkStyle = sql == null
|
||||
? ExecuteUpdateResultCheckStyle.COUNT
|
||||
: persistentClass.getCustomSQLUpdateCheckStyle() == null
|
||||
? ExecuteUpdateResultCheckStyle.determineDefault( sql, callable )
|
||||
: persistentClass.getCustomSQLUpdateCheckStyle();
|
||||
customSQLUpdate = new String[] { sql };
|
||||
updateCallable = new boolean[] { callable };
|
||||
updateResultCheckStyles = new ExecuteUpdateResultCheckStyle[] { checkStyle };
|
||||
: persistentClass.getCustomSQLUpdateCheckStyle() == null
|
||||
? ExecuteUpdateResultCheckStyle.determineDefault( sql, callable )
|
||||
: persistentClass.getCustomSQLUpdateCheckStyle();
|
||||
customSQLUpdate = new String[] {sql};
|
||||
updateCallable = new boolean[] {callable};
|
||||
updateResultCheckStyles = new ExecuteUpdateResultCheckStyle[] {checkStyle};
|
||||
|
||||
sql = persistentClass.getCustomSQLDelete();
|
||||
callable = sql != null && persistentClass.isCustomDeleteCallable();
|
||||
checkStyle = sql == null
|
||||
? ExecuteUpdateResultCheckStyle.COUNT
|
||||
: persistentClass.getCustomSQLDeleteCheckStyle() == null
|
||||
? ExecuteUpdateResultCheckStyle.determineDefault( sql, callable )
|
||||
: persistentClass.getCustomSQLDeleteCheckStyle();
|
||||
customSQLDelete = new String[] { sql };
|
||||
deleteCallable = new boolean[] { callable };
|
||||
deleteResultCheckStyles = new ExecuteUpdateResultCheckStyle[] { checkStyle };
|
||||
: persistentClass.getCustomSQLDeleteCheckStyle() == null
|
||||
? ExecuteUpdateResultCheckStyle.determineDefault( sql, callable )
|
||||
: persistentClass.getCustomSQLDeleteCheckStyle();
|
||||
customSQLDelete = new String[] {sql};
|
||||
deleteCallable = new boolean[] {callable};
|
||||
deleteResultCheckStyles = new ExecuteUpdateResultCheckStyle[] {checkStyle};
|
||||
|
||||
discriminatorValue = persistentClass.getSubclassId();
|
||||
discriminatorSQLValue = String.valueOf( persistentClass.getSubclassId() );
|
||||
|
@ -161,43 +161,45 @@ public class UnionSubclassEntityPersister extends AbstractEntityPersister {
|
|||
subclassClosure[0] = getEntityName();
|
||||
|
||||
// SUBCLASSES
|
||||
subclassByDiscriminatorValue.put(
|
||||
subclassByDiscriminatorValue.put(
|
||||
persistentClass.getSubclassId(),
|
||||
persistentClass.getEntityName()
|
||||
persistentClass.getEntityName()
|
||||
);
|
||||
if ( persistentClass.isPolymorphic() ) {
|
||||
Iterator iter = persistentClass.getSubclassIterator();
|
||||
int k=1;
|
||||
int k = 1;
|
||||
while ( iter.hasNext() ) {
|
||||
Subclass sc = (Subclass) iter.next();
|
||||
subclassClosure[k++] = sc.getEntityName();
|
||||
subclassByDiscriminatorValue.put( sc.getSubclassId(), sc.getEntityName() );
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
//SPACES
|
||||
//TODO: i'm not sure, but perhaps we should exclude
|
||||
// abstract denormalized tables?
|
||||
|
||||
|
||||
int spacesSize = 1 + persistentClass.getSynchronizedTables().size();
|
||||
spaces = new String[spacesSize];
|
||||
spaces[0] = tableName;
|
||||
Iterator iter = persistentClass.getSynchronizedTables().iterator();
|
||||
for ( int i=1; i<spacesSize; i++ ) {
|
||||
for ( int i = 1; i < spacesSize; i++ ) {
|
||||
spaces[i] = (String) iter.next();
|
||||
}
|
||||
|
||||
|
||||
HashSet subclassTables = new HashSet();
|
||||
iter = persistentClass.getSubclassTableClosureIterator();
|
||||
while ( iter.hasNext() ) {
|
||||
Table table = (Table) iter.next();
|
||||
subclassTables.add( table.getQualifiedName(
|
||||
factory.getDialect(),
|
||||
factory.getSettings().getDefaultCatalogName(),
|
||||
factory.getSettings().getDefaultSchemaName()
|
||||
) );
|
||||
subclassTables.add(
|
||||
table.getQualifiedName(
|
||||
factory.getDialect(),
|
||||
factory.getSettings().getDefaultCatalogName(),
|
||||
factory.getSettings().getDefaultSchemaName()
|
||||
)
|
||||
);
|
||||
}
|
||||
subclassSpaces = ArrayHelper.toStringArray(subclassTables);
|
||||
subclassSpaces = ArrayHelper.toStringArray( subclassTables );
|
||||
|
||||
subquery = generateSubquery( persistentClass, creationContext.getMetadata() );
|
||||
|
||||
|
@ -211,7 +213,7 @@ public class UnionSubclassEntityPersister extends AbstractEntityPersister {
|
|||
}
|
||||
iter = persistentClass.getSubclassTableClosureIterator();
|
||||
while ( iter.hasNext() ) {
|
||||
Table tab = ( Table ) iter.next();
|
||||
Table tab = (Table) iter.next();
|
||||
if ( !tab.isAbstractUnionTable() ) {
|
||||
String tableName = tab.getQualifiedName(
|
||||
factory.getDialect(),
|
||||
|
@ -221,8 +223,8 @@ public class UnionSubclassEntityPersister extends AbstractEntityPersister {
|
|||
tableNames.add( tableName );
|
||||
String[] key = new String[idColumnSpan];
|
||||
Iterator citer = tab.getPrimaryKey().getColumnIterator();
|
||||
for ( int k=0; k<idColumnSpan; k++ ) {
|
||||
key[k] = ( ( Column ) citer.next() ).getQuotedName( factory.getDialect() );
|
||||
for ( int k = 0; k < idColumnSpan; k++ ) {
|
||||
key[k] = ( (Column) citer.next() ).getQuotedName( factory.getDialect() );
|
||||
}
|
||||
keyColumns.add( key );
|
||||
}
|
||||
|
@ -232,14 +234,14 @@ public class UnionSubclassEntityPersister extends AbstractEntityPersister {
|
|||
constraintOrderedKeyColumnNames = ArrayHelper.to2DStringArray( keyColumns );
|
||||
}
|
||||
else {
|
||||
constraintOrderedTableNames = new String[] { tableName };
|
||||
constraintOrderedKeyColumnNames = new String[][] { getIdentifierColumnNames() };
|
||||
constraintOrderedTableNames = new String[] {tableName};
|
||||
constraintOrderedKeyColumnNames = new String[][] {getIdentifierColumnNames()};
|
||||
}
|
||||
|
||||
initLockers();
|
||||
|
||||
initSubclassPropertyAliasesMap( persistentClass );
|
||||
|
||||
|
||||
postConstruct( creationContext.getMetadata() );
|
||||
|
||||
}
|
||||
|
@ -247,7 +249,7 @@ public class UnionSubclassEntityPersister extends AbstractEntityPersister {
|
|||
public Serializable[] getQuerySpaces() {
|
||||
return subclassSpaces;
|
||||
}
|
||||
|
||||
|
||||
public String getTableName() {
|
||||
return subquery;
|
||||
}
|
||||
|
@ -269,7 +271,7 @@ public class UnionSubclassEntityPersister extends AbstractEntityPersister {
|
|||
}
|
||||
|
||||
public String getSubclassForDiscriminatorValue(Object value) {
|
||||
return (String) subclassByDiscriminatorValue.get(value);
|
||||
return (String) subclassByDiscriminatorValue.get( value );
|
||||
}
|
||||
|
||||
public Serializable[] getPropertySpaces() {
|
||||
|
@ -285,19 +287,19 @@ public class UnionSubclassEntityPersister extends AbstractEntityPersister {
|
|||
*/
|
||||
protected String generateSelectString(LockMode lockMode) {
|
||||
SimpleSelect select = new SimpleSelect( getFactory().getDialect() )
|
||||
.setLockMode(lockMode)
|
||||
.setTableName( getTableName() )
|
||||
.addColumns( getIdentifierColumnNames() )
|
||||
.addColumns(
|
||||
getSubclassColumnClosure(),
|
||||
getSubclassColumnAliasClosure(),
|
||||
getSubclassColumnLazyiness()
|
||||
)
|
||||
.addColumns(
|
||||
getSubclassFormulaClosure(),
|
||||
getSubclassFormulaAliasClosure(),
|
||||
getSubclassFormulaLazyiness()
|
||||
);
|
||||
.setLockMode( lockMode )
|
||||
.setTableName( getTableName() )
|
||||
.addColumns( getIdentifierColumnNames() )
|
||||
.addColumns(
|
||||
getSubclassColumnClosure(),
|
||||
getSubclassColumnAliasClosure(),
|
||||
getSubclassColumnLazyiness()
|
||||
)
|
||||
.addColumns(
|
||||
getSubclassFormulaClosure(),
|
||||
getSubclassFormulaAliasClosure(),
|
||||
getSubclassFormulaLazyiness()
|
||||
);
|
||||
//TODO: include the rowids!!!!
|
||||
if ( hasSubclasses() ) {
|
||||
if ( isDiscriminatorFormula() ) {
|
||||
|
@ -324,11 +326,11 @@ public class UnionSubclassEntityPersister extends AbstractEntityPersister {
|
|||
protected String[] getKeyColumns(int j) {
|
||||
return getIdentifierColumnNames();
|
||||
}
|
||||
|
||||
|
||||
protected boolean isTableCascadeDeleteEnabled(int j) {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
protected boolean isPropertyOfTable(int property, int j) {
|
||||
return true;
|
||||
}
|
||||
|
@ -336,7 +338,7 @@ public class UnionSubclassEntityPersister extends AbstractEntityPersister {
|
|||
// Execute the SQL:
|
||||
|
||||
public String fromTableFragment(String name) {
|
||||
return getTableName() + ' ' + name;
|
||||
return getTableName() + ' ' + name;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -356,11 +358,11 @@ public class UnionSubclassEntityPersister extends AbstractEntityPersister {
|
|||
}
|
||||
|
||||
protected void addDiscriminatorToSelect(SelectFragment select, String name, String suffix) {
|
||||
select.addColumn( name, getDiscriminatorColumnName(), getDiscriminatorAlias() );
|
||||
select.addColumn( name, getDiscriminatorColumnName(), getDiscriminatorAlias() );
|
||||
}
|
||||
|
||||
|
||||
protected int[] getPropertyTableNumbersInSelect() {
|
||||
return new int[ getPropertySpan() ];
|
||||
return new int[getPropertySpan()];
|
||||
}
|
||||
|
||||
protected int getSubclassPropertyTableNumber(int i) {
|
||||
|
@ -381,32 +383,32 @@ public class UnionSubclassEntityPersister extends AbstractEntityPersister {
|
|||
}
|
||||
|
||||
protected int[] getSubclassColumnTableNumberClosure() {
|
||||
return new int[ getSubclassColumnClosure().length ];
|
||||
return new int[getSubclassColumnClosure().length];
|
||||
}
|
||||
|
||||
protected int[] getSubclassFormulaTableNumberClosure() {
|
||||
return new int[ getSubclassFormulaClosure().length ];
|
||||
return new int[getSubclassFormulaClosure().length];
|
||||
}
|
||||
|
||||
protected boolean[] getTableHasColumns() {
|
||||
return new boolean[] { true };
|
||||
return new boolean[] {true};
|
||||
}
|
||||
|
||||
protected int[] getPropertyTableNumbers() {
|
||||
return new int[ getPropertySpan() ];
|
||||
return new int[getPropertySpan()];
|
||||
}
|
||||
|
||||
protected String generateSubquery(PersistentClass model, Mapping mapping) {
|
||||
|
||||
Dialect dialect = getFactory().getDialect();
|
||||
Settings settings = getFactory().getSettings();
|
||||
|
||||
|
||||
if ( !model.hasSubclasses() ) {
|
||||
return model.getTable().getQualifiedName(
|
||||
dialect,
|
||||
settings.getDefaultCatalogName(),
|
||||
settings.getDefaultSchemaName()
|
||||
);
|
||||
);
|
||||
}
|
||||
|
||||
HashSet columns = new LinkedHashSet();
|
||||
|
@ -422,11 +424,11 @@ public class UnionSubclassEntityPersister extends AbstractEntityPersister {
|
|||
}
|
||||
|
||||
StringBuilder buf = new StringBuilder()
|
||||
.append("( ");
|
||||
.append( "( " );
|
||||
|
||||
Iterator siter = new JoinedIterator(
|
||||
new SingletonIterator(model),
|
||||
model.getSubclassIterator()
|
||||
new SingletonIterator( model ),
|
||||
model.getSubclassIterator()
|
||||
);
|
||||
|
||||
while ( siter.hasNext() ) {
|
||||
|
@ -434,51 +436,53 @@ public class UnionSubclassEntityPersister extends AbstractEntityPersister {
|
|||
Table table = clazz.getTable();
|
||||
if ( !table.isAbstractUnionTable() ) {
|
||||
//TODO: move to .sql package!!
|
||||
buf.append("select ");
|
||||
buf.append( "select " );
|
||||
Iterator citer = columns.iterator();
|
||||
while ( citer.hasNext() ) {
|
||||
Column col = (Column) citer.next();
|
||||
if ( !table.containsColumn(col) ) {
|
||||
int sqlType = col.getSqlTypeCode(mapping);
|
||||
buf.append( dialect.getSelectClauseNullString(sqlType) )
|
||||
.append(" as ");
|
||||
if ( !table.containsColumn( col ) ) {
|
||||
int sqlType = col.getSqlTypeCode( mapping );
|
||||
buf.append( dialect.getSelectClauseNullString( sqlType ) )
|
||||
.append( " as " );
|
||||
}
|
||||
buf.append( col.getQuotedName(dialect) );
|
||||
buf.append(", ");
|
||||
buf.append( col.getQuotedName( dialect ) );
|
||||
buf.append( ", " );
|
||||
}
|
||||
buf.append( clazz.getSubclassId() )
|
||||
.append(" as clazz_");
|
||||
buf.append(" from ")
|
||||
.append( table.getQualifiedName(
|
||||
dialect,
|
||||
settings.getDefaultCatalogName(),
|
||||
settings.getDefaultSchemaName()
|
||||
) );
|
||||
buf.append(" union ");
|
||||
.append( " as clazz_" );
|
||||
buf.append( " from " )
|
||||
.append(
|
||||
table.getQualifiedName(
|
||||
dialect,
|
||||
settings.getDefaultCatalogName(),
|
||||
settings.getDefaultSchemaName()
|
||||
)
|
||||
);
|
||||
buf.append( " union " );
|
||||
if ( dialect.supportsUnionAll() ) {
|
||||
buf.append("all ");
|
||||
buf.append( "all " );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if ( buf.length() > 2 ) {
|
||||
//chop the last union (all)
|
||||
buf.setLength( buf.length() - ( dialect.supportsUnionAll() ? 11 : 7 ) );
|
||||
}
|
||||
|
||||
return buf.append(" )").toString();
|
||||
return buf.append( " )" ).toString();
|
||||
}
|
||||
|
||||
protected String[] getSubclassTableKeyColumns(int j) {
|
||||
if (j!=0) {
|
||||
throw new AssertionFailure("only one table");
|
||||
if ( j != 0 ) {
|
||||
throw new AssertionFailure( "only one table" );
|
||||
}
|
||||
return getIdentifierColumnNames();
|
||||
}
|
||||
|
||||
public String getSubclassTableName(int j) {
|
||||
if (j!=0) {
|
||||
throw new AssertionFailure("only one table");
|
||||
if ( j != 0 ) {
|
||||
throw new AssertionFailure( "only one table" );
|
||||
}
|
||||
return tableName;
|
||||
}
|
||||
|
@ -488,8 +492,8 @@ public class UnionSubclassEntityPersister extends AbstractEntityPersister {
|
|||
}
|
||||
|
||||
protected boolean isClassOrSuperclassTable(int j) {
|
||||
if (j!=0) {
|
||||
throw new AssertionFailure("only one table");
|
||||
if ( j != 0 ) {
|
||||
throw new AssertionFailure( "only one table" );
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
@ -509,6 +513,6 @@ public class UnionSubclassEntityPersister extends AbstractEntityPersister {
|
|||
|
||||
@Override
|
||||
public FilterAliasGenerator getFilterAliasGenerator(String rootAlias) {
|
||||
return new StaticFilterAliasGenerator(rootAlias);
|
||||
return new StaticFilterAliasGenerator( rootAlias );
|
||||
}
|
||||
}
|
||||
|
|
|
@ -48,13 +48,13 @@ public class StandardPersisterClassResolver implements PersisterClassResolver {
|
|||
public Class<? extends EntityPersister> getEntityPersisterClass(PersistentClass metadata) {
|
||||
// todo : make sure this is based on an attribute kept on the metamodel in the new code, not the concrete PersistentClass impl found!
|
||||
if ( RootClass.class.isInstance( metadata ) ) {
|
||||
if ( metadata.hasSubclasses() ) {
|
||||
//If the class has children, we need to find of which kind
|
||||
metadata = (PersistentClass) metadata.getDirectSubclasses().next();
|
||||
}
|
||||
else {
|
||||
return singleTableEntityPersister();
|
||||
}
|
||||
if ( metadata.hasSubclasses() ) {
|
||||
//If the class has children, we need to find of which kind
|
||||
metadata = (PersistentClass) metadata.getDirectSubclasses().next();
|
||||
}
|
||||
else {
|
||||
return singleTableEntityPersister();
|
||||
}
|
||||
}
|
||||
if ( JoinedSubclass.class.isInstance( metadata ) ) {
|
||||
return joinedSubclassEntityPersister();
|
||||
|
@ -62,7 +62,7 @@ public class StandardPersisterClassResolver implements PersisterClassResolver {
|
|||
else if ( UnionSubclass.class.isInstance( metadata ) ) {
|
||||
return unionSubclassEntityPersister();
|
||||
}
|
||||
else if ( SingleTableSubclass.class.isInstance( metadata ) ) {
|
||||
else if ( SingleTableSubclass.class.isInstance( metadata ) ) {
|
||||
return singleTableEntityPersister();
|
||||
}
|
||||
else {
|
||||
|
@ -72,7 +72,7 @@ public class StandardPersisterClassResolver implements PersisterClassResolver {
|
|||
}
|
||||
}
|
||||
|
||||
public Class<? extends EntityPersister> singleTableEntityPersister() {
|
||||
public Class<? extends EntityPersister> singleTableEntityPersister() {
|
||||
return SingleTableEntityPersister.class;
|
||||
}
|
||||
|
||||
|
|
|
@ -276,7 +276,8 @@ public final class MessageHelper {
|
|||
if ( collectionKey.getClass().isAssignableFrom(
|
||||
ownerIdentifierType.getReturnedClass() ) ) {
|
||||
ownerKey = collectionKey;
|
||||
} else {
|
||||
}
|
||||
else {
|
||||
ownerKey = session.getPersistenceContext()
|
||||
.getEntry( collection.getOwner() ).getId();
|
||||
}
|
||||
|
@ -372,7 +373,8 @@ public final class MessageHelper {
|
|||
if ( id.getClass().isAssignableFrom(
|
||||
ownerIdentifierType.getReturnedClass() ) ) {
|
||||
s.append( ownerIdentifierType.toLoggableString( id, factory ) );
|
||||
} else {
|
||||
}
|
||||
else {
|
||||
// TODO: This is a crappy backup if a property-ref is used.
|
||||
// If the reference is an object w/o toString(), this isn't going to work.
|
||||
s.append( id.toString() );
|
||||
|
|
|
@ -37,9 +37,6 @@ import org.hibernate.engine.spi.SessionFactoryImplementor;
|
|||
import org.hibernate.engine.spi.SessionImplementor;
|
||||
import org.hibernate.internal.CoreLogging;
|
||||
import org.hibernate.internal.CoreMessageLogger;
|
||||
import org.hibernate.internal.util.ReflectHelper;
|
||||
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
/**
|
||||
* Accesses property values via a get/set pair, which may be nonpublic.
|
||||
|
@ -56,19 +53,19 @@ public class BasicPropertyAccessor implements PropertyAccessor {
|
|||
private final String propertyName;
|
||||
|
||||
private BasicSetter(Class clazz, Method method, String propertyName) {
|
||||
this.clazz=clazz;
|
||||
this.method=method;
|
||||
this.propertyName=propertyName;
|
||||
this.clazz = clazz;
|
||||
this.method = method;
|
||||
this.propertyName = propertyName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void set(Object target, Object value, SessionFactoryImplementor factory)
|
||||
throws HibernateException {
|
||||
throws HibernateException {
|
||||
try {
|
||||
method.invoke( target, value );
|
||||
}
|
||||
catch (NullPointerException npe) {
|
||||
if ( value==null && method.getParameterTypes()[0].isPrimitive() ) {
|
||||
if ( value == null && method.getParameterTypes()[0].isPrimitive() ) {
|
||||
throw new PropertyAccessException(
|
||||
npe,
|
||||
"Null value was assigned to a property of primitive type",
|
||||
|
@ -107,7 +104,7 @@ public class BasicPropertyAccessor implements PropertyAccessor {
|
|||
//cannot occur
|
||||
}
|
||||
catch (IllegalArgumentException iae) {
|
||||
if ( value==null && method.getParameterTypes()[0].isPrimitive() ) {
|
||||
if ( value == null && method.getParameterTypes()[0].isPrimitive() ) {
|
||||
throw new PropertyAccessException(
|
||||
iae,
|
||||
"Null value was assigned to a property of primitive type",
|
||||
|
@ -143,11 +140,11 @@ public class BasicPropertyAccessor implements PropertyAccessor {
|
|||
}
|
||||
|
||||
Object readResolve() {
|
||||
return createSetter(clazz, propertyName);
|
||||
return createSetter( clazz, propertyName );
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
public String toString() {
|
||||
return "BasicSetter(" + clazz.getName() + '.' + propertyName + ')';
|
||||
}
|
||||
}
|
||||
|
@ -158,9 +155,9 @@ public class BasicPropertyAccessor implements PropertyAccessor {
|
|||
private final String propertyName;
|
||||
|
||||
private BasicGetter(Class clazz, Method method, String propertyName) {
|
||||
this.clazz=clazz;
|
||||
this.method=method;
|
||||
this.propertyName=propertyName;
|
||||
this.clazz = clazz;
|
||||
this.method = method;
|
||||
this.propertyName = propertyName;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -188,7 +185,7 @@ public class BasicPropertyAccessor implements PropertyAccessor {
|
|||
//cannot occur
|
||||
}
|
||||
catch (IllegalArgumentException iae) {
|
||||
LOG.illegalPropertyGetterArgument(clazz.getName(), propertyName);
|
||||
LOG.illegalPropertyGetterArgument( clazz.getName(), propertyName );
|
||||
throw new PropertyAccessException(
|
||||
iae,
|
||||
"IllegalArgumentException occurred calling",
|
||||
|
@ -225,51 +222,51 @@ public class BasicPropertyAccessor implements PropertyAccessor {
|
|||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
public String toString() {
|
||||
return "BasicGetter(" + clazz.getName() + '.' + propertyName + ')';
|
||||
}
|
||||
|
||||
Object readResolve() {
|
||||
return createGetter(clazz, propertyName);
|
||||
return createGetter( clazz, propertyName );
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public Setter getSetter(Class theClass, String propertyName) throws PropertyNotFoundException {
|
||||
return createSetter(theClass, propertyName);
|
||||
return createSetter( theClass, propertyName );
|
||||
}
|
||||
|
||||
private static Setter createSetter(Class theClass, String propertyName) throws PropertyNotFoundException {
|
||||
BasicSetter result = getSetterOrNull(theClass, propertyName);
|
||||
if (result==null) {
|
||||
BasicSetter result = getSetterOrNull( theClass, propertyName );
|
||||
if ( result == null ) {
|
||||
throw new PropertyNotFoundException(
|
||||
"Could not find a setter for property " +
|
||||
propertyName +
|
||||
" in class " +
|
||||
theClass.getName()
|
||||
propertyName +
|
||||
" in class " +
|
||||
theClass.getName()
|
||||
);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private static BasicSetter getSetterOrNull(Class theClass, String propertyName) {
|
||||
if (theClass==Object.class || theClass==null) {
|
||||
if ( theClass == Object.class || theClass == null ) {
|
||||
return null;
|
||||
}
|
||||
|
||||
Method method = setterMethod(theClass, propertyName);
|
||||
Method method = setterMethod( theClass, propertyName );
|
||||
|
||||
if (method!=null) {
|
||||
method.setAccessible(true);
|
||||
return new BasicSetter(theClass, method, propertyName);
|
||||
if ( method != null ) {
|
||||
method.setAccessible( true );
|
||||
return new BasicSetter( theClass, method, propertyName );
|
||||
}
|
||||
else {
|
||||
BasicSetter setter = getSetterOrNull( theClass.getSuperclass(), propertyName );
|
||||
if (setter==null) {
|
||||
if ( setter == null ) {
|
||||
Class[] interfaces = theClass.getInterfaces();
|
||||
for ( int i=0; setter==null && i<interfaces.length; i++ ) {
|
||||
setter=getSetterOrNull( interfaces[i], propertyName );
|
||||
for ( int i = 0; setter == null && i < interfaces.length; i++ ) {
|
||||
setter = getSetterOrNull( interfaces[i], propertyName );
|
||||
}
|
||||
}
|
||||
return setter;
|
||||
|
@ -278,8 +275,8 @@ public class BasicPropertyAccessor implements PropertyAccessor {
|
|||
}
|
||||
|
||||
private static Method setterMethod(Class theClass, String propertyName) {
|
||||
BasicGetter getter = getGetterOrNull(theClass, propertyName);
|
||||
Class returnType = (getter==null) ? null : getter.getReturnType();
|
||||
BasicGetter getter = getGetterOrNull( theClass, propertyName );
|
||||
Class returnType = ( getter == null ) ? null : getter.getReturnType();
|
||||
|
||||
Method[] methods = theClass.getDeclaredMethods();
|
||||
Method potentialSetter = null;
|
||||
|
@ -301,39 +298,39 @@ public class BasicPropertyAccessor implements PropertyAccessor {
|
|||
|
||||
@Override
|
||||
public Getter getGetter(Class theClass, String propertyName) throws PropertyNotFoundException {
|
||||
return createGetter(theClass, propertyName);
|
||||
return createGetter( theClass, propertyName );
|
||||
}
|
||||
|
||||
public static Getter createGetter(Class theClass, String propertyName) throws PropertyNotFoundException {
|
||||
BasicGetter result = getGetterOrNull(theClass, propertyName);
|
||||
if (result==null) {
|
||||
BasicGetter result = getGetterOrNull( theClass, propertyName );
|
||||
if ( result == null ) {
|
||||
throw new PropertyNotFoundException(
|
||||
"Could not find a getter for " +
|
||||
propertyName +
|
||||
" in class " +
|
||||
theClass.getName()
|
||||
propertyName +
|
||||
" in class " +
|
||||
theClass.getName()
|
||||
);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private static BasicGetter getGetterOrNull(Class theClass, String propertyName) {
|
||||
if (theClass==Object.class || theClass==null) {
|
||||
if ( theClass == Object.class || theClass == null ) {
|
||||
return null;
|
||||
}
|
||||
|
||||
Method method = getterMethod(theClass, propertyName);
|
||||
Method method = getterMethod( theClass, propertyName );
|
||||
|
||||
if (method!=null) {
|
||||
method.setAccessible(true);
|
||||
return new BasicGetter(theClass, method, propertyName);
|
||||
if ( method != null ) {
|
||||
method.setAccessible( true );
|
||||
return new BasicGetter( theClass, method, propertyName );
|
||||
}
|
||||
else {
|
||||
BasicGetter getter = getGetterOrNull( theClass.getSuperclass(), propertyName );
|
||||
if (getter==null) {
|
||||
if ( getter == null ) {
|
||||
Class[] interfaces = theClass.getInterfaces();
|
||||
for ( int i=0; getter==null && i<interfaces.length; i++ ) {
|
||||
getter=getGetterOrNull( interfaces[i], propertyName );
|
||||
for ( int i = 0; getter == null && i < interfaces.length; i++ ) {
|
||||
getter = getGetterOrNull( interfaces[i], propertyName );
|
||||
}
|
||||
}
|
||||
return getter;
|
||||
|
|
|
@ -22,48 +22,48 @@
|
|||
* Boston, MA 02110-1301 USA
|
||||
*/
|
||||
package org.hibernate.property;
|
||||
|
||||
import org.hibernate.PropertyNotFoundException;
|
||||
|
||||
/**
|
||||
* @author max
|
||||
*
|
||||
*/
|
||||
public class ChainedPropertyAccessor implements PropertyAccessor {
|
||||
|
||||
final PropertyAccessor[] chain;
|
||||
|
||||
|
||||
public ChainedPropertyAccessor(PropertyAccessor[] chain) {
|
||||
this.chain = chain;
|
||||
}
|
||||
|
||||
|
||||
public Getter getGetter(Class theClass, String propertyName)
|
||||
throws PropertyNotFoundException {
|
||||
Getter result = null;
|
||||
for (int i = 0; i < chain.length; i++) {
|
||||
PropertyAccessor candidate = chain[i];
|
||||
for ( PropertyAccessor candidate : chain ) {
|
||||
try {
|
||||
result = candidate.getGetter(theClass, propertyName);
|
||||
result = candidate.getGetter( theClass, propertyName );
|
||||
return result;
|
||||
} catch (PropertyNotFoundException pnfe) {
|
||||
}
|
||||
catch (PropertyNotFoundException pnfe) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
throw new PropertyNotFoundException("Could not find getter for " + propertyName + " on " + theClass);
|
||||
throw new PropertyNotFoundException( "Could not find getter for " + propertyName + " on " + theClass );
|
||||
}
|
||||
|
||||
public Setter getSetter(Class theClass, String propertyName)
|
||||
throws PropertyNotFoundException {
|
||||
Setter result = null;
|
||||
for (int i = 0; i < chain.length; i++) {
|
||||
PropertyAccessor candidate = chain[i];
|
||||
for ( PropertyAccessor candidate : chain ) {
|
||||
try {
|
||||
result = candidate.getSetter(theClass, propertyName);
|
||||
result = candidate.getSetter( theClass, propertyName );
|
||||
return result;
|
||||
} catch (PropertyNotFoundException pnfe) {
|
||||
//
|
||||
}
|
||||
catch (PropertyNotFoundException pnfe) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
throw new PropertyNotFoundException("Could not find setter for " + propertyName + " on " + theClass);
|
||||
throw new PropertyNotFoundException( "Could not find setter for " + propertyName + " on " + theClass );
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -22,6 +22,7 @@
|
|||
* Boston, MA 02110-1301 USA
|
||||
*/
|
||||
package org.hibernate.property;
|
||||
|
||||
import java.lang.reflect.Field;
|
||||
import java.lang.reflect.Member;
|
||||
import java.lang.reflect.Method;
|
||||
|
@ -32,7 +33,6 @@ import org.hibernate.PropertyAccessException;
|
|||
import org.hibernate.PropertyNotFoundException;
|
||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||
import org.hibernate.engine.spi.SessionImplementor;
|
||||
import org.hibernate.internal.util.ReflectHelper;
|
||||
|
||||
/**
|
||||
* Accesses fields directly.
|
||||
|
@ -130,7 +130,8 @@ public class DirectPropertyAccessor implements PropertyAccessor {
|
|||
clazz,
|
||||
name
|
||||
);
|
||||
} else {
|
||||
}
|
||||
else {
|
||||
throw new PropertyAccessException(e, "could not set a field value by reflection", true, clazz, name);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -57,26 +57,28 @@ public final class PropertyAccessorFactory {
|
|||
// 3) Code can then simply call PropertyAccess.getGetter() with no parameters; likewise with
|
||||
// PropertyAccessor.getSetter()
|
||||
|
||||
/**
|
||||
* Retrieves a PropertyAccessor instance based on the given property definition and
|
||||
* entity mode.
|
||||
*
|
||||
* @param property The property for which to retrieve an accessor.
|
||||
* @param mode The mode for the resulting entity.
|
||||
* @return An appropriate accessor.
|
||||
* @throws MappingException
|
||||
*/
|
||||
/**
|
||||
* Retrieves a PropertyAccessor instance based on the given property definition and
|
||||
* entity mode.
|
||||
*
|
||||
* @param property The property for which to retrieve an accessor.
|
||||
* @param mode The mode for the resulting entity.
|
||||
*
|
||||
* @return An appropriate accessor.
|
||||
*
|
||||
* @throws MappingException
|
||||
*/
|
||||
public static PropertyAccessor getPropertyAccessor(Property property, EntityMode mode) throws MappingException {
|
||||
//TODO: this is temporary in that the end result will probably not take a Property reference per-se.
|
||||
if ( null == mode || EntityMode.POJO.equals( mode ) ) {
|
||||
return getPojoPropertyAccessor( property.getPropertyAccessorName() );
|
||||
}
|
||||
else if ( EntityMode.MAP.equals( mode ) ) {
|
||||
return getDynamicMapPropertyAccessor();
|
||||
}
|
||||
else {
|
||||
throw new MappingException( "Unknown entity mode [" + mode + "]" );
|
||||
}
|
||||
if ( null == mode || EntityMode.POJO.equals( mode ) ) {
|
||||
return getPojoPropertyAccessor( property.getPropertyAccessorName() );
|
||||
}
|
||||
else if ( EntityMode.MAP.equals( mode ) ) {
|
||||
return getDynamicMapPropertyAccessor();
|
||||
}
|
||||
else {
|
||||
throw new MappingException( "Unknown entity mode [" + mode + "]" );
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
@ -84,6 +86,7 @@ public final class PropertyAccessorFactory {
|
|||
* Retreives a PropertyAccessor specific for a PojoRepresentation with the given access strategy.
|
||||
*
|
||||
* @param pojoAccessorStrategy The access strategy.
|
||||
*
|
||||
* @return An appropriate accessor.
|
||||
*/
|
||||
private static PropertyAccessor getPojoPropertyAccessor(String pojoAccessorStrategy) {
|
||||
|
@ -96,7 +99,7 @@ public final class PropertyAccessorFactory {
|
|||
else if ( "embedded".equals( pojoAccessorStrategy ) ) {
|
||||
return EMBEDDED_PROPERTY_ACCESSOR;
|
||||
}
|
||||
else if ( "noop".equals(pojoAccessorStrategy) ) {
|
||||
else if ( "noop".equals( pojoAccessorStrategy ) ) {
|
||||
return NOOP_ACCESSOR;
|
||||
}
|
||||
else {
|
||||
|
@ -114,44 +117,45 @@ public final class PropertyAccessorFactory {
|
|||
accessorClass = ReflectHelper.classForName( accessorName );
|
||||
}
|
||||
catch (ClassNotFoundException cnfe) {
|
||||
throw new MappingException("could not find PropertyAccessor class: " + accessorName, cnfe);
|
||||
throw new MappingException( "could not find PropertyAccessor class: " + accessorName, cnfe );
|
||||
}
|
||||
try {
|
||||
return (PropertyAccessor) accessorClass.newInstance();
|
||||
}
|
||||
catch (Exception e) {
|
||||
throw new MappingException("could not instantiate PropertyAccessor class: " + accessorName, e);
|
||||
throw new MappingException( "could not instantiate PropertyAccessor class: " + accessorName, e );
|
||||
}
|
||||
}
|
||||
|
||||
private PropertyAccessorFactory() {}
|
||||
private PropertyAccessorFactory() {
|
||||
}
|
||||
|
||||
// todo : this eventually needs to be removed
|
||||
public static PropertyAccessor getPropertyAccessor(Class optionalClass, String type) throws MappingException {
|
||||
if ( type==null ) {
|
||||
type = optionalClass==null || optionalClass==Map.class ? "map" : "property";
|
||||
if ( type == null ) {
|
||||
type = optionalClass == null || optionalClass == Map.class ? "map" : "property";
|
||||
}
|
||||
return getPropertyAccessor(type);
|
||||
return getPropertyAccessor( type );
|
||||
}
|
||||
|
||||
// todo : this eventually needs to be removed
|
||||
public static PropertyAccessor getPropertyAccessor(String type) throws MappingException {
|
||||
if ( type==null || "property".equals(type) ) {
|
||||
if ( type == null || "property".equals( type ) ) {
|
||||
return BASIC_PROPERTY_ACCESSOR;
|
||||
}
|
||||
if ( "field".equals(type) ) {
|
||||
if ( "field".equals( type ) ) {
|
||||
return DIRECT_PROPERTY_ACCESSOR;
|
||||
}
|
||||
if ( "map".equals(type) ) {
|
||||
if ( "map".equals( type ) ) {
|
||||
return MAP_ACCESSOR;
|
||||
}
|
||||
if ( "embedded".equals(type) ) {
|
||||
if ( "embedded".equals( type ) ) {
|
||||
return EMBEDDED_PROPERTY_ACCESSOR;
|
||||
}
|
||||
if ( "noop".equals(type)) {
|
||||
if ( "noop".equals( type ) ) {
|
||||
return NOOP_ACCESSOR;
|
||||
}
|
||||
|
||||
return resolveCustomAccessor(type);
|
||||
return resolveCustomAccessor( type );
|
||||
}
|
||||
}
|
||||
|
|
|
@ -39,7 +39,7 @@ import org.hibernate.type.CompositeType;
|
|||
*/
|
||||
public abstract class BasicLazyInitializer extends AbstractLazyInitializer {
|
||||
|
||||
protected static final Object INVOKE_IMPLEMENTATION = new MarkerObject("INVOKE_IMPLEMENTATION");
|
||||
protected static final Object INVOKE_IMPLEMENTATION = new MarkerObject( "INVOKE_IMPLEMENTATION" );
|
||||
|
||||
protected final Class persistentClass;
|
||||
protected final Method getIdentifierMethod;
|
||||
|
@ -51,14 +51,14 @@ public abstract class BasicLazyInitializer extends AbstractLazyInitializer {
|
|||
|
||||
protected BasicLazyInitializer(
|
||||
String entityName,
|
||||
Class persistentClass,
|
||||
Serializable id,
|
||||
Method getIdentifierMethod,
|
||||
Method setIdentifierMethod,
|
||||
CompositeType componentIdType,
|
||||
SessionImplementor session,
|
||||
boolean overridesEquals) {
|
||||
super(entityName, id, session);
|
||||
Class persistentClass,
|
||||
Serializable id,
|
||||
Method getIdentifierMethod,
|
||||
Method setIdentifierMethod,
|
||||
CompositeType componentIdType,
|
||||
SessionImplementor session,
|
||||
boolean overridesEquals) {
|
||||
super( entityName, id, session );
|
||||
this.persistentClass = persistentClass;
|
||||
this.getIdentifierMethod = getIdentifierMethod;
|
||||
this.setIdentifierMethod = setIdentifierMethod;
|
||||
|
@ -72,25 +72,25 @@ public abstract class BasicLazyInitializer extends AbstractLazyInitializer {
|
|||
String methodName = method.getName();
|
||||
int params = args.length;
|
||||
|
||||
if ( params==0 ) {
|
||||
if ( "writeReplace".equals(methodName) ) {
|
||||
if ( params == 0 ) {
|
||||
if ( "writeReplace".equals( methodName ) ) {
|
||||
return getReplacement();
|
||||
}
|
||||
else if ( !overridesEquals && "hashCode".equals(methodName) ) {
|
||||
return System.identityHashCode(proxy);
|
||||
else if ( !overridesEquals && "hashCode".equals( methodName ) ) {
|
||||
return System.identityHashCode( proxy );
|
||||
}
|
||||
else if ( isUninitialized() && method.equals(getIdentifierMethod) ) {
|
||||
else if ( isUninitialized() && method.equals( getIdentifierMethod ) ) {
|
||||
return getIdentifier();
|
||||
}
|
||||
else if ( "getHibernateLazyInitializer".equals(methodName) ) {
|
||||
else if ( "getHibernateLazyInitializer".equals( methodName ) ) {
|
||||
return this;
|
||||
}
|
||||
}
|
||||
else if ( params==1 ) {
|
||||
if ( !overridesEquals && "equals".equals(methodName) ) {
|
||||
return args[0]==proxy;
|
||||
else if ( params == 1 ) {
|
||||
if ( !overridesEquals && "equals".equals( methodName ) ) {
|
||||
return args[0] == proxy;
|
||||
}
|
||||
else if ( method.equals(setIdentifierMethod) ) {
|
||||
else if ( method.equals( setIdentifierMethod ) ) {
|
||||
initialize();
|
||||
setIdentifier( (Serializable) args[0] );
|
||||
return INVOKE_IMPLEMENTATION;
|
||||
|
@ -98,7 +98,7 @@ public abstract class BasicLazyInitializer extends AbstractLazyInitializer {
|
|||
}
|
||||
|
||||
//if it is a property of an embedded component, invoke on the "identifier"
|
||||
if ( componentIdType!=null && componentIdType.isMethodOf(method) ) {
|
||||
if ( componentIdType != null && componentIdType.isMethodOf( method ) ) {
|
||||
return method.invoke( getIdentifier(), args );
|
||||
}
|
||||
|
||||
|
@ -109,19 +109,19 @@ public abstract class BasicLazyInitializer extends AbstractLazyInitializer {
|
|||
|
||||
private Object getReplacement() {
|
||||
final SessionImplementor session = getSession();
|
||||
if ( isUninitialized() && session != null && session.isOpen()) {
|
||||
if ( isUninitialized() && session != null && session.isOpen() ) {
|
||||
final EntityKey key = session.generateEntityKey(
|
||||
getIdentifier(),
|
||||
session.getFactory().getEntityPersister( getEntityName() )
|
||||
);
|
||||
final Object entity = session.getPersistenceContext().getEntity(key);
|
||||
if (entity!=null) {
|
||||
final Object entity = session.getPersistenceContext().getEntity( key );
|
||||
if ( entity != null ) {
|
||||
setImplementation( entity );
|
||||
}
|
||||
}
|
||||
|
||||
if ( isUninitialized() ) {
|
||||
if (replacement==null) {
|
||||
if ( replacement == null ) {
|
||||
replacement = serializableProxy();
|
||||
}
|
||||
return replacement;
|
||||
|
|
|
@ -28,10 +28,8 @@ import java.io.ObjectInputStream;
|
|||
import java.io.ObjectOutputStream;
|
||||
import java.sql.Connection;
|
||||
|
||||
import org.hibernate.engine.jdbc.connections.spi.JdbcConnectionAccess;
|
||||
import org.hibernate.resource.jdbc.LogicalConnection;
|
||||
import org.hibernate.resource.jdbc.ResourceRegistry;
|
||||
import org.hibernate.resource.jdbc.spi.JdbcSessionContext;
|
||||
import org.hibernate.resource.jdbc.spi.LogicalConnectionImplementor;
|
||||
|
||||
import org.jboss.logging.Logger;
|
||||
|
|
|
@ -27,27 +27,34 @@ import java.io.IOException;
|
|||
import java.io.ObjectOutputStream;
|
||||
import java.sql.Connection;
|
||||
|
||||
import org.hibernate.engine.jdbc.spi.ConnectionObserver;
|
||||
import org.hibernate.resource.jdbc.LogicalConnection;
|
||||
|
||||
/**
|
||||
* SPI contract for LogicalConnection
|
||||
*
|
||||
* @author Steve Ebersole
|
||||
*/
|
||||
public interface LogicalConnectionImplementor extends LogicalConnection {
|
||||
// todo : expose the Connection as below? or accept(WorkInConnection) where WorkInConnection is given access to Connection?
|
||||
public Connection getPhysicalConnection();
|
||||
/**
|
||||
* Exposes access to the "real" Connection.
|
||||
*
|
||||
* @todo : expose Connection as here? or accept(WorkInConnection) where WorkInConnection is given access to Connection?
|
||||
*
|
||||
* @return The connection
|
||||
*/
|
||||
Connection getPhysicalConnection();
|
||||
|
||||
/**
|
||||
* Notification indicating a JDBC statement has been executed to trigger
|
||||
* {@link org.hibernate.ConnectionReleaseMode#AFTER_STATEMENT} releasing if needed
|
||||
*/
|
||||
public void afterStatement();
|
||||
void afterStatement();
|
||||
|
||||
/**
|
||||
* Notification indicating a transaction has completed to trigger
|
||||
* {@link org.hibernate.ConnectionReleaseMode#AFTER_TRANSACTION} releasing if needed
|
||||
*/
|
||||
public void afterTransaction();
|
||||
void afterTransaction();
|
||||
|
||||
/**
|
||||
* Manually disconnect the underlying JDBC Connection. The assumption here
|
||||
|
@ -56,7 +63,7 @@ public interface LogicalConnectionImplementor extends LogicalConnection {
|
|||
* @return The connection maintained here at time of disconnect. {@code null} if
|
||||
* there was no connection cached internally.
|
||||
*/
|
||||
public Connection manualDisconnect();
|
||||
Connection manualDisconnect();
|
||||
|
||||
/**
|
||||
* Manually reconnect the underlying JDBC Connection. Should be called at some point after manualDisconnect().
|
||||
|
@ -64,16 +71,16 @@ public interface LogicalConnectionImplementor extends LogicalConnection {
|
|||
* @param suppliedConnection For user supplied connection strategy the user needs to hand us the connection
|
||||
* with which to reconnect. It is an error to pass a connection in the other strategies.
|
||||
*/
|
||||
public void manualReconnect(Connection suppliedConnection);
|
||||
void manualReconnect(Connection suppliedConnection);
|
||||
|
||||
/**
|
||||
* Creates a shareable copy of itself for use in "shared sessions"
|
||||
*
|
||||
* @return The shareable copy.
|
||||
*/
|
||||
public LogicalConnectionImplementor makeShareableCopy();
|
||||
LogicalConnectionImplementor makeShareableCopy();
|
||||
|
||||
public PhysicalJdbcTransaction getPhysicalJdbcTransaction();
|
||||
PhysicalJdbcTransaction getPhysicalJdbcTransaction();
|
||||
|
||||
/**
|
||||
* Serialization hook
|
||||
|
@ -82,5 +89,5 @@ public interface LogicalConnectionImplementor extends LogicalConnection {
|
|||
*
|
||||
* @throws java.io.IOException Problem accessing stream
|
||||
*/
|
||||
public void serialize(ObjectOutputStream oos) throws IOException;
|
||||
void serialize(ObjectOutputStream oos) throws IOException;
|
||||
}
|
||||
|
|
|
@ -23,10 +23,9 @@
|
|||
*/
|
||||
package org.hibernate.resource.transaction.backend.jdbc.internal;
|
||||
|
||||
import javax.transaction.Status;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import javax.transaction.Status;
|
||||
|
||||
import org.hibernate.engine.jdbc.spi.JdbcServices;
|
||||
import org.hibernate.engine.transaction.spi.IsolationDelegate;
|
||||
|
@ -38,7 +37,6 @@ import org.hibernate.resource.transaction.TransactionCoordinator;
|
|||
import org.hibernate.resource.transaction.TransactionCoordinatorBuilder;
|
||||
import org.hibernate.resource.transaction.backend.jdbc.spi.JdbcResourceTransaction;
|
||||
import org.hibernate.resource.transaction.backend.jdbc.spi.JdbcResourceTransactionAccess;
|
||||
import org.hibernate.resource.transaction.backend.jta.internal.synchronization.ExceptionMapper;
|
||||
import org.hibernate.resource.transaction.internal.SynchronizationRegistryStandardImpl;
|
||||
import org.hibernate.resource.transaction.spi.TransactionCoordinatorOwner;
|
||||
import org.hibernate.resource.transaction.spi.TransactionStatus;
|
||||
|
|
|
@ -23,11 +23,8 @@
|
|||
*/
|
||||
package org.hibernate.resource.transaction.backend.jta.internal.synchronization;
|
||||
|
||||
import javax.persistence.spi.PersistenceUnitTransactionType;
|
||||
import java.io.Serializable;
|
||||
|
||||
import org.hibernate.engine.spi.SessionImplementor;
|
||||
|
||||
/**
|
||||
* A pluggable strategy for defining any actions to be performed during
|
||||
* {@link javax.transaction.Synchronization#afterCompletion} processing from the the
|
||||
|
@ -36,5 +33,5 @@ import org.hibernate.engine.spi.SessionImplementor;
|
|||
* @author Steve Ebersole
|
||||
*/
|
||||
public interface AfterCompletionAction extends Serializable {
|
||||
public void doAction(boolean successful);
|
||||
void doAction(boolean successful);
|
||||
}
|
||||
|
|
|
@ -63,5 +63,5 @@ public enum TransactionStatus {
|
|||
* Status code indicating a transaction that is in the process of
|
||||
* rolling back.
|
||||
*/
|
||||
ROLLING_BACK
|
||||
ROLLING_BACK
|
||||
}
|
||||
|
|
|
@ -285,7 +285,7 @@ public abstract class AbstractServiceRegistryImpl
|
|||
}
|
||||
}
|
||||
catch (NullPointerException e) {
|
||||
log.error( "NPE injecting service deps : " + service.getClass().getName() );
|
||||
log.error( "NPE injecting service deps : " + service.getClass().getName() );
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -344,7 +344,7 @@ public abstract class AbstractServiceRegistryImpl
|
|||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings( {"unchecked"})
|
||||
@SuppressWarnings( {"unchecked"})
|
||||
public void destroy() {
|
||||
if ( !active ) {
|
||||
return;
|
||||
|
|
|
@ -29,7 +29,7 @@ import java.util.List;
|
|||
|
||||
/**
|
||||
* Since Service lookup is a very hot operation and essentially it's a read only
|
||||
* data structure, to achieve threadsafety we can use immutability.
|
||||
* data structure, to achieve thread-safety we can use immutability.
|
||||
* For our use case we just need reference equality, and the expectation is that a limited
|
||||
* number of elements will be contained in this custom collection (<32).
|
||||
* So the following structure is functionally equivalent to an Identity based ConcurrentMap,
|
||||
|
@ -123,6 +123,12 @@ public class ConcurrentServiceBinding<K,V> {
|
|||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return hash;
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings({"unchecked", "EqualsWhichDoesntCheckParameterClass"})
|
||||
public boolean equals(Object obj) {
|
||||
//A ClassCastException is really not expected here,
|
||||
//as it's an internal private class,
|
||||
|
|
|
@ -25,13 +25,12 @@ package org.hibernate.service.internal;
|
|||
|
||||
import org.hibernate.boot.spi.SessionFactoryOptions;
|
||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||
import org.hibernate.service.Service;
|
||||
import org.hibernate.service.spi.ServiceRegistryImplementor;
|
||||
import org.hibernate.service.spi.SessionFactoryServiceRegistryFactory;
|
||||
|
||||
/**
|
||||
* Acts as a {@link Service} in the {@link org.hibernate.boot.registry.internal.StandardServiceRegistryImpl} whose function is as a factory for
|
||||
* {@link SessionFactoryServiceRegistryImpl} implementations.
|
||||
* Acts as a service in the {@link org.hibernate.boot.registry.internal.StandardServiceRegistryImpl} whose
|
||||
* function is to act as a factory for {@link SessionFactoryServiceRegistryImpl} implementations.
|
||||
*
|
||||
* @author Steve Ebersole
|
||||
*/
|
||||
|
|
|
@ -23,6 +23,7 @@
|
|||
*
|
||||
*/
|
||||
package org.hibernate.sql;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
|
||||
|
@ -30,6 +31,7 @@ import org.hibernate.dialect.Dialect;
|
|||
|
||||
/**
|
||||
* A translated HQL query
|
||||
*
|
||||
* @author Gavin King
|
||||
*/
|
||||
public class QuerySelect {
|
||||
|
@ -44,37 +46,38 @@ public class QuerySelect {
|
|||
private boolean distinct;
|
||||
|
||||
private static final HashSet<String> DONT_SPACE_TOKENS = new HashSet<String>();
|
||||
|
||||
static {
|
||||
//dontSpace.add("'");
|
||||
DONT_SPACE_TOKENS.add(".");
|
||||
DONT_SPACE_TOKENS.add("+");
|
||||
DONT_SPACE_TOKENS.add("-");
|
||||
DONT_SPACE_TOKENS.add("/");
|
||||
DONT_SPACE_TOKENS.add("*");
|
||||
DONT_SPACE_TOKENS.add("<");
|
||||
DONT_SPACE_TOKENS.add(">");
|
||||
DONT_SPACE_TOKENS.add("=");
|
||||
DONT_SPACE_TOKENS.add("#");
|
||||
DONT_SPACE_TOKENS.add("~");
|
||||
DONT_SPACE_TOKENS.add("|");
|
||||
DONT_SPACE_TOKENS.add("&");
|
||||
DONT_SPACE_TOKENS.add("<=");
|
||||
DONT_SPACE_TOKENS.add(">=");
|
||||
DONT_SPACE_TOKENS.add("=>");
|
||||
DONT_SPACE_TOKENS.add("=<");
|
||||
DONT_SPACE_TOKENS.add("!=");
|
||||
DONT_SPACE_TOKENS.add("<>");
|
||||
DONT_SPACE_TOKENS.add("!#");
|
||||
DONT_SPACE_TOKENS.add("!~");
|
||||
DONT_SPACE_TOKENS.add("!<");
|
||||
DONT_SPACE_TOKENS.add("!>");
|
||||
DONT_SPACE_TOKENS.add("("); //for MySQL
|
||||
DONT_SPACE_TOKENS.add(")");
|
||||
DONT_SPACE_TOKENS.add( "." );
|
||||
DONT_SPACE_TOKENS.add( "+" );
|
||||
DONT_SPACE_TOKENS.add( "-" );
|
||||
DONT_SPACE_TOKENS.add( "/" );
|
||||
DONT_SPACE_TOKENS.add( "*" );
|
||||
DONT_SPACE_TOKENS.add( "<" );
|
||||
DONT_SPACE_TOKENS.add( ">" );
|
||||
DONT_SPACE_TOKENS.add( "=" );
|
||||
DONT_SPACE_TOKENS.add( "#" );
|
||||
DONT_SPACE_TOKENS.add( "~" );
|
||||
DONT_SPACE_TOKENS.add( "|" );
|
||||
DONT_SPACE_TOKENS.add( "&" );
|
||||
DONT_SPACE_TOKENS.add( "<=" );
|
||||
DONT_SPACE_TOKENS.add( ">=" );
|
||||
DONT_SPACE_TOKENS.add( "=>" );
|
||||
DONT_SPACE_TOKENS.add( "=<" );
|
||||
DONT_SPACE_TOKENS.add( "!=" );
|
||||
DONT_SPACE_TOKENS.add( "<>" );
|
||||
DONT_SPACE_TOKENS.add( "!#" );
|
||||
DONT_SPACE_TOKENS.add( "!~" );
|
||||
DONT_SPACE_TOKENS.add( "!<" );
|
||||
DONT_SPACE_TOKENS.add( "!>" );
|
||||
DONT_SPACE_TOKENS.add( "(" ); //for MySQL
|
||||
DONT_SPACE_TOKENS.add( ")" );
|
||||
}
|
||||
|
||||
public QuerySelect(Dialect dialect) {
|
||||
this.dialect = dialect;
|
||||
joins = new QueryJoinFragment(dialect, false);
|
||||
joins = new QueryJoinFragment( dialect, false );
|
||||
}
|
||||
|
||||
public JoinFragment getJoinFragment() {
|
||||
|
@ -82,20 +85,20 @@ public class QuerySelect {
|
|||
}
|
||||
|
||||
public void addSelectFragmentString(String fragment) {
|
||||
if ( fragment.length()>0 && fragment.charAt(0)==',' ) {
|
||||
fragment = fragment.substring(1);
|
||||
if ( fragment.length() > 0 && fragment.charAt( 0 ) == ',' ) {
|
||||
fragment = fragment.substring( 1 );
|
||||
}
|
||||
fragment = fragment.trim();
|
||||
if ( fragment.length()>0 ) {
|
||||
if ( select.length()>0 ) {
|
||||
select.append(", ");
|
||||
if ( fragment.length() > 0 ) {
|
||||
if ( select.length() > 0 ) {
|
||||
select.append( ", " );
|
||||
}
|
||||
select.append(fragment);
|
||||
select.append( fragment );
|
||||
}
|
||||
}
|
||||
|
||||
public void addSelectColumn(String columnName, String alias) {
|
||||
addSelectFragmentString(columnName + ' ' + alias);
|
||||
addSelectFragmentString( columnName + ' ' + alias );
|
||||
}
|
||||
|
||||
public void setDistinct(boolean distinct) {
|
||||
|
@ -104,107 +107,109 @@ public class QuerySelect {
|
|||
|
||||
public void setWhereTokens(Iterator tokens) {
|
||||
//if ( conjunctiveWhere.length()>0 ) conjunctiveWhere.append(" and ");
|
||||
appendTokens(where, tokens);
|
||||
appendTokens( where, tokens );
|
||||
}
|
||||
|
||||
public void prependWhereConditions(String conditions) {
|
||||
if (where.length() > 0) {
|
||||
where.insert(0, conditions + " and ");
|
||||
if ( where.length() > 0 ) {
|
||||
where.insert( 0, conditions + " and " );
|
||||
}
|
||||
else {
|
||||
where.append(conditions);
|
||||
where.append( conditions );
|
||||
}
|
||||
}
|
||||
|
||||
public void setGroupByTokens(Iterator tokens) {
|
||||
//if ( groupBy.length()>0 ) groupBy.append(" and ");
|
||||
appendTokens(groupBy, tokens);
|
||||
appendTokens( groupBy, tokens );
|
||||
}
|
||||
|
||||
public void setOrderByTokens(Iterator tokens) {
|
||||
//if ( orderBy.length()>0 ) orderBy.append(" and ");
|
||||
appendTokens(orderBy, tokens);
|
||||
appendTokens( orderBy, tokens );
|
||||
}
|
||||
|
||||
public void setHavingTokens(Iterator tokens) {
|
||||
//if ( having.length()>0 ) having.append(" and ");
|
||||
appendTokens(having, tokens);
|
||||
appendTokens( having, tokens );
|
||||
}
|
||||
|
||||
public void addOrderBy(String orderByString) {
|
||||
if ( orderBy.length() > 0 ) {
|
||||
orderBy.append(", ");
|
||||
orderBy.append( ", " );
|
||||
}
|
||||
orderBy.append(orderByString);
|
||||
orderBy.append( orderByString );
|
||||
}
|
||||
|
||||
public String toQueryString() {
|
||||
StringBuilder buf = new StringBuilder(50);
|
||||
if (comment!=null) {
|
||||
buf.append("/* ").append(comment).append(" */ ");
|
||||
StringBuilder buf = new StringBuilder( 50 );
|
||||
if ( comment != null ) {
|
||||
buf.append( "/* " ).append( comment ).append( " */ " );
|
||||
}
|
||||
buf.append("select ");
|
||||
if (distinct) {
|
||||
buf.append("distinct ");
|
||||
buf.append( "select " );
|
||||
if ( distinct ) {
|
||||
buf.append( "distinct " );
|
||||
}
|
||||
String from = joins.toFromFragmentString();
|
||||
if ( from.startsWith(",") ) {
|
||||
from = from.substring(1);
|
||||
if ( from.startsWith( "," ) ) {
|
||||
from = from.substring( 1 );
|
||||
}
|
||||
else if ( from.startsWith(" inner join") ){
|
||||
from = from.substring(11);
|
||||
else if ( from.startsWith( " inner join" ) ) {
|
||||
from = from.substring( 11 );
|
||||
}
|
||||
|
||||
buf.append( select.toString() )
|
||||
.append(" from")
|
||||
.append(from);
|
||||
.append( " from" )
|
||||
.append( from );
|
||||
|
||||
String outerJoinsAfterWhere = joins.toWhereFragmentString().trim();
|
||||
String whereConditions = where.toString().trim();
|
||||
boolean hasOuterJoinsAfterWhere = outerJoinsAfterWhere.length() > 0;
|
||||
boolean hasWhereConditions = whereConditions.length() > 0;
|
||||
if (hasOuterJoinsAfterWhere || hasWhereConditions) {
|
||||
buf.append(" where ");
|
||||
if (hasOuterJoinsAfterWhere) {
|
||||
buf.append( outerJoinsAfterWhere.substring(4) );
|
||||
if ( hasOuterJoinsAfterWhere || hasWhereConditions ) {
|
||||
buf.append( " where " );
|
||||
if ( hasOuterJoinsAfterWhere ) {
|
||||
buf.append( outerJoinsAfterWhere.substring( 4 ) );
|
||||
}
|
||||
if (hasWhereConditions) {
|
||||
if (hasOuterJoinsAfterWhere) {
|
||||
buf.append(" and (");
|
||||
if ( hasWhereConditions ) {
|
||||
if ( hasOuterJoinsAfterWhere ) {
|
||||
buf.append( " and (" );
|
||||
}
|
||||
buf.append(whereConditions);
|
||||
if (hasOuterJoinsAfterWhere) {
|
||||
buf.append(")");
|
||||
buf.append( whereConditions );
|
||||
if ( hasOuterJoinsAfterWhere ) {
|
||||
buf.append( ")" );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if ( groupBy.length() > 0 ) {
|
||||
buf.append(" group by ").append( groupBy.toString() );
|
||||
buf.append( " group by " ).append( groupBy.toString() );
|
||||
}
|
||||
if ( having.length() > 0 ) {
|
||||
buf.append(" having ").append( having.toString() );
|
||||
buf.append( " having " ).append( having.toString() );
|
||||
}
|
||||
if ( orderBy.length() > 0 ) {
|
||||
buf.append(" order by ").append( orderBy.toString() );
|
||||
buf.append( " order by " ).append( orderBy.toString() );
|
||||
}
|
||||
|
||||
return dialect.transformSelectString( buf.toString() );
|
||||
}
|
||||
|
||||
private static void appendTokens(StringBuilder buf, Iterator iter) {
|
||||
boolean lastSpaceable=true;
|
||||
boolean lastQuoted=false;
|
||||
boolean lastSpaceable = true;
|
||||
boolean lastQuoted = false;
|
||||
while ( iter.hasNext() ) {
|
||||
String token = (String) iter.next();
|
||||
boolean spaceable = !DONT_SPACE_TOKENS.contains(token);
|
||||
boolean quoted = token.startsWith("'");
|
||||
if (spaceable && lastSpaceable) {
|
||||
if ( !quoted || !lastQuoted ) buf.append(' ');
|
||||
boolean spaceable = !DONT_SPACE_TOKENS.contains( token );
|
||||
boolean quoted = token.startsWith( "'" );
|
||||
if ( spaceable && lastSpaceable ) {
|
||||
if ( !quoted || !lastQuoted ) {
|
||||
buf.append( ' ' );
|
||||
}
|
||||
}
|
||||
lastSpaceable = spaceable;
|
||||
buf.append(token);
|
||||
lastQuoted = token.endsWith("'");
|
||||
buf.append( token );
|
||||
lastQuoted = token.endsWith( "'" );
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -213,7 +218,7 @@ public class QuerySelect {
|
|||
}
|
||||
|
||||
public QuerySelect copy() {
|
||||
QuerySelect copy = new QuerySelect(dialect);
|
||||
QuerySelect copy = new QuerySelect( dialect );
|
||||
copy.joins = this.joins.copy();
|
||||
copy.select.append( this.select.toString() );
|
||||
copy.where.append( this.where.toString() );
|
||||
|
|
|
@ -39,15 +39,15 @@ import org.hibernate.internal.util.StringHelper;
|
|||
*/
|
||||
public class SelectFragment {
|
||||
private String suffix;
|
||||
private List columns = new ArrayList();
|
||||
private List<String> columns = new ArrayList<String>();
|
||||
//private List aliases = new ArrayList();
|
||||
private List columnAliases = new ArrayList();
|
||||
private List<String> columnAliases = new ArrayList<String>();
|
||||
private String extraSelectList;
|
||||
private String[] usedAliases;
|
||||
|
||||
public SelectFragment() {}
|
||||
|
||||
public List getColumns() {
|
||||
public List<String> getColumns() {
|
||||
return columns;
|
||||
}
|
||||
|
||||
|
@ -117,7 +117,9 @@ public class SelectFragment {
|
|||
|
||||
public SelectFragment addFormulas(String tableAlias, String[] formulas, String[] formulaAliases) {
|
||||
for ( int i=0; i<formulas.length; i++ ) {
|
||||
if ( formulas[i]!=null ) addFormula( tableAlias, formulas[i], formulaAliases[i] );
|
||||
if ( formulas[i]!=null ) {
|
||||
addFormula( tableAlias, formulas[i], formulaAliases[i] );
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
@ -140,16 +142,16 @@ public class SelectFragment {
|
|||
|
||||
public String toFragmentString() {
|
||||
StringBuilder buf = new StringBuilder( columns.size() * 10 );
|
||||
Iterator iter = columns.iterator();
|
||||
Iterator columnAliasIter = columnAliases.iterator();
|
||||
Iterator<String> iter = columns.iterator();
|
||||
Iterator<String> columnAliasIter = columnAliases.iterator();
|
||||
//HashMap columnsUnique = new HashMap();
|
||||
HashSet columnsUnique = new HashSet();
|
||||
HashSet<String> columnsUnique = new HashSet<String>();
|
||||
if (usedAliases!=null) {
|
||||
columnsUnique.addAll( Arrays.asList(usedAliases) );
|
||||
}
|
||||
while ( iter.hasNext() ) {
|
||||
String column = (String) iter.next();
|
||||
String columnAlias = (String) columnAliasIter.next();
|
||||
String column = iter.next();
|
||||
String columnAlias = columnAliasIter.next();
|
||||
//TODO: eventually put this back in, once we think all is fixed
|
||||
//Object otherAlias = columnsUnique.put(qualifiedColumn, columnAlias);
|
||||
/*if ( otherAlias!=null && !columnAlias.equals(otherAlias) ) {
|
||||
|
|
|
@ -23,9 +23,6 @@
|
|||
*
|
||||
*/
|
||||
package org.hibernate.sql;
|
||||
import org.hibernate.LockMode;
|
||||
import org.hibernate.LockOptions;
|
||||
import org.hibernate.dialect.Dialect;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
|
@ -35,6 +32,10 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.hibernate.LockMode;
|
||||
import org.hibernate.LockOptions;
|
||||
import org.hibernate.dialect.Dialect;
|
||||
|
||||
/**
|
||||
* An SQL <tt>SELECT</tt> statement with no table joins
|
||||
*
|
||||
|
@ -51,16 +52,16 @@ public class SimpleSelect {
|
|||
private String tableName;
|
||||
private String orderBy;
|
||||
private Dialect dialect;
|
||||
private LockOptions lockOptions = new LockOptions( LockMode.READ);
|
||||
private LockOptions lockOptions = new LockOptions( LockMode.READ );
|
||||
private String comment;
|
||||
|
||||
private List columns = new ArrayList();
|
||||
private Map aliases = new HashMap();
|
||||
private List whereTokens = new ArrayList();
|
||||
private List<String> columns = new ArrayList<String>();
|
||||
private Map<String, String> aliases = new HashMap<String, String>();
|
||||
private List<String> whereTokens = new ArrayList<String>();
|
||||
|
||||
public SimpleSelect addColumns(String[] columnNames, String[] columnAliases) {
|
||||
for ( int i=0; i<columnNames.length; i++ ) {
|
||||
if ( columnNames[i]!=null ) {
|
||||
for ( int i = 0; i < columnNames.length; i++ ) {
|
||||
if ( columnNames[i] != null ) {
|
||||
addColumn( columnNames[i], columnAliases[i] );
|
||||
}
|
||||
}
|
||||
|
@ -68,8 +69,8 @@ public class SimpleSelect {
|
|||
}
|
||||
|
||||
public SimpleSelect addColumns(String[] columns, String[] aliases, boolean[] ignore) {
|
||||
for ( int i=0; i<ignore.length; i++ ) {
|
||||
if ( !ignore[i] && columns[i]!=null ) {
|
||||
for ( int i = 0; i < ignore.length; i++ ) {
|
||||
if ( !ignore[i] && columns[i] != null ) {
|
||||
addColumn( columns[i], aliases[i] );
|
||||
}
|
||||
}
|
||||
|
@ -84,15 +85,16 @@ public class SimpleSelect {
|
|||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public SimpleSelect addColumn(String columnName) {
|
||||
columns.add(columnName);
|
||||
columns.add( columnName );
|
||||
//aliases.put( columnName, DEFAULT_ALIAS.toAliasString(columnName) );
|
||||
return this;
|
||||
}
|
||||
|
||||
public SimpleSelect addColumn(String columnName, String alias) {
|
||||
columns.add(columnName);
|
||||
aliases.put(columnName, alias);
|
||||
columns.add( columnName );
|
||||
aliases.put( columnName, alias );
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -101,8 +103,8 @@ public class SimpleSelect {
|
|||
return this;
|
||||
}
|
||||
|
||||
public SimpleSelect setLockOptions( LockOptions lockOptions ) {
|
||||
LockOptions.copy(lockOptions, this.lockOptions);
|
||||
public SimpleSelect setLockOptions(LockOptions lockOptions) {
|
||||
LockOptions.copy( lockOptions, this.lockOptions );
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -112,13 +114,13 @@ public class SimpleSelect {
|
|||
}
|
||||
|
||||
public SimpleSelect addWhereToken(String token) {
|
||||
whereTokens.add(token);
|
||||
whereTokens.add( token );
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
private void and() {
|
||||
if ( whereTokens.size()>0 ) {
|
||||
whereTokens.add("and");
|
||||
if ( whereTokens.size() > 0 ) {
|
||||
whereTokens.add( "and" );
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -135,7 +137,7 @@ public class SimpleSelect {
|
|||
}
|
||||
|
||||
public SimpleSelect addCondition(String[] lhs, String op, String[] rhs) {
|
||||
for ( int i=0; i<lhs.length; i++ ) {
|
||||
for ( int i = 0; i < lhs.length; i++ ) {
|
||||
addCondition( lhs[i], op, rhs[i] );
|
||||
}
|
||||
return this;
|
||||
|
@ -151,49 +153,51 @@ public class SimpleSelect {
|
|||
}
|
||||
|
||||
public String toStatementString() {
|
||||
StringBuilder buf = new StringBuilder(
|
||||
columns.size()*10 +
|
||||
tableName.length() +
|
||||
whereTokens.size() * 10 +
|
||||
10
|
||||
StringBuilder buf = new StringBuilder(
|
||||
columns.size() * 10 +
|
||||
tableName.length() +
|
||||
whereTokens.size() * 10 +
|
||||
10
|
||||
);
|
||||
|
||||
if ( comment!=null ) {
|
||||
buf.append("/* ").append(comment).append(" */ ");
|
||||
|
||||
if ( comment != null ) {
|
||||
buf.append( "/* " ).append( comment ).append( " */ " );
|
||||
}
|
||||
|
||||
buf.append("select ");
|
||||
Set uniqueColumns = new HashSet();
|
||||
Iterator iter = columns.iterator();
|
||||
|
||||
buf.append( "select " );
|
||||
Set<String> uniqueColumns = new HashSet<String>();
|
||||
Iterator<String> iter = columns.iterator();
|
||||
boolean appendComma = false;
|
||||
while ( iter.hasNext() ) {
|
||||
String col = (String) iter.next();
|
||||
String alias = (String) aliases.get(col);
|
||||
if ( uniqueColumns.add(alias==null ? col : alias) ) {
|
||||
if (appendComma) buf.append(", ");
|
||||
buf.append(col);
|
||||
if ( alias!=null && !alias.equals(col) ) {
|
||||
buf.append(" as ")
|
||||
.append(alias);
|
||||
String col = iter.next();
|
||||
String alias = aliases.get( col );
|
||||
if ( uniqueColumns.add( alias == null ? col : alias ) ) {
|
||||
if ( appendComma ) {
|
||||
buf.append( ", " );
|
||||
}
|
||||
buf.append( col );
|
||||
if ( alias != null && !alias.equals( col ) ) {
|
||||
buf.append( " as " )
|
||||
.append( alias );
|
||||
}
|
||||
appendComma = true;
|
||||
}
|
||||
}
|
||||
|
||||
buf.append(" from ")
|
||||
.append( dialect.appendLockHint(lockOptions, tableName) );
|
||||
|
||||
|
||||
buf.append( " from " )
|
||||
.append( dialect.appendLockHint( lockOptions, tableName ) );
|
||||
|
||||
if ( whereTokens.size() > 0 ) {
|
||||
buf.append(" where ")
|
||||
.append( toWhereClause() );
|
||||
buf.append( " where " )
|
||||
.append( toWhereClause() );
|
||||
}
|
||||
|
||||
if (orderBy!=null) {
|
||||
buf.append(orderBy);
|
||||
|
||||
if ( orderBy != null ) {
|
||||
buf.append( orderBy );
|
||||
}
|
||||
|
||||
if (lockOptions!=null) {
|
||||
buf.append( dialect.getForUpdateString(lockOptions) );
|
||||
|
||||
if ( lockOptions != null ) {
|
||||
buf.append( dialect.getForUpdateString( lockOptions ) );
|
||||
}
|
||||
|
||||
return dialect.transformSelectString( buf.toString() );
|
||||
|
@ -201,11 +205,11 @@ public class SimpleSelect {
|
|||
|
||||
public String toWhereClause() {
|
||||
StringBuilder buf = new StringBuilder( whereTokens.size() * 5 );
|
||||
Iterator iter = whereTokens.iterator();
|
||||
Iterator<String> iter = whereTokens.iterator();
|
||||
while ( iter.hasNext() ) {
|
||||
buf.append( iter.next() );
|
||||
if ( iter.hasNext() ) {
|
||||
buf.append(' ');
|
||||
buf.append( ' ' );
|
||||
}
|
||||
}
|
||||
return buf.toString();
|
||||
|
|
|
@ -119,7 +119,7 @@ public final class Template {
|
|||
* @deprecated Only intended for annotations usage; use {@link #renderWhereStringTemplate(String, String, Dialect, SQLFunctionRegistry)} instead
|
||||
*/
|
||||
@Deprecated
|
||||
@SuppressWarnings({ "JavaDoc" })
|
||||
@SuppressWarnings({ "JavaDoc" })
|
||||
public static String renderWhereStringTemplate(String sqlWhereString, String placeholder, Dialect dialect) {
|
||||
return renderWhereStringTemplate(
|
||||
sqlWhereString,
|
||||
|
@ -654,7 +654,7 @@ public final class Template {
|
|||
* @deprecated Use {@link #translateOrderBy} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public static String renderOrderByStringTemplate(
|
||||
public static String renderOrderByStringTemplate(
|
||||
String orderByFragment,
|
||||
Dialect dialect,
|
||||
SQLFunctionRegistry functionRegistry) {
|
||||
|
|
|
@ -46,7 +46,7 @@ import antlr.collections.AST;
|
|||
* @author Steve Ebersole
|
||||
*/
|
||||
public class OrderByFragmentParser extends GeneratedOrderByFragmentParser {
|
||||
private static final Logger LOG = Logger.getLogger(OrderByFragmentParser.class.getName());
|
||||
private static final Logger LOG = Logger.getLogger( OrderByFragmentParser.class.getName() );
|
||||
|
||||
private final TranslationContext context;
|
||||
|
||||
|
@ -63,7 +63,7 @@ public class OrderByFragmentParser extends GeneratedOrderByFragmentParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected AST quotedIdentifier(AST ident) {
|
||||
protected AST quotedIdentifier(AST ident) {
|
||||
/*
|
||||
* Semantic action used during recognition of quoted identifiers (quoted column names)
|
||||
*/
|
||||
|
@ -74,7 +74,7 @@ public class OrderByFragmentParser extends GeneratedOrderByFragmentParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected AST quotedString(AST ident) {
|
||||
protected AST quotedString(AST ident) {
|
||||
/*
|
||||
* Semantic action used during recognition of quoted strings (string literals)
|
||||
*/
|
||||
|
@ -103,19 +103,19 @@ public class OrderByFragmentParser extends GeneratedOrderByFragmentParser {
|
|||
}
|
||||
else {
|
||||
// if function.hasParenthesesIfNoArguments() is true, then assume the node is not a function
|
||||
return ! function.hasParenthesesIfNoArguments();
|
||||
return !function.hasParenthesesIfNoArguments();
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
protected AST resolveFunction(AST ast) {
|
||||
protected AST resolveFunction(AST ast) {
|
||||
/*
|
||||
* Semantic action used during recognition of a *known* function
|
||||
*/
|
||||
AST child = ast.getFirstChild();
|
||||
if ( child != null ) {
|
||||
assert "{param list}".equals( child.getText() );
|
||||
assert "{param list}".equals( child.getText() );
|
||||
child = child.getFirstChild();
|
||||
}
|
||||
|
||||
|
@ -173,7 +173,7 @@ public class OrderByFragmentParser extends GeneratedOrderByFragmentParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected AST resolveIdent(AST ident) {
|
||||
protected AST resolveIdent(AST ident) {
|
||||
/*
|
||||
* Semantic action used during recognition of an identifier. This identifier might be a column name, it might
|
||||
* be a property name.
|
||||
|
@ -183,7 +183,7 @@ public class OrderByFragmentParser extends GeneratedOrderByFragmentParser {
|
|||
try {
|
||||
sqlValueReferences = context.getColumnMapper().map( text );
|
||||
}
|
||||
catch( Throwable t ) {
|
||||
catch (Throwable t) {
|
||||
sqlValueReferences = null;
|
||||
}
|
||||
|
||||
|
@ -261,9 +261,9 @@ public class OrderByFragmentParser extends GeneratedOrderByFragmentParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected AST postProcessSortSpecification(AST sortSpec) {
|
||||
protected AST postProcessSortSpecification(AST sortSpec) {
|
||||
assert SORT_SPEC == sortSpec.getType();
|
||||
SortSpecification sortSpecification = ( SortSpecification ) sortSpec;
|
||||
SortSpecification sortSpecification = (SortSpecification) sortSpec;
|
||||
AST sortKey = sortSpecification.getSortKey();
|
||||
if ( IDENT_LIST == sortKey.getFirstChild().getType() ) {
|
||||
AST identList = sortKey.getFirstChild();
|
||||
|
@ -299,11 +299,10 @@ public class OrderByFragmentParser extends GeneratedOrderByFragmentParser {
|
|||
if ( orderingSpecification != null ) {
|
||||
sortSpecification.addChild( orderingSpecification );
|
||||
}
|
||||
return ( SortSpecification ) sortSpecification;
|
||||
return (SortSpecification) sortSpecification;
|
||||
}
|
||||
|
||||
|
||||
|
||||
// trace logging ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
private int traceDepth = 0;
|
||||
|
@ -314,8 +313,8 @@ public class OrderByFragmentParser extends GeneratedOrderByFragmentParser {
|
|||
if ( inputState.guessing > 0 ) {
|
||||
return;
|
||||
}
|
||||
String prefix = StringHelper.repeat( '-', (traceDepth++ * 2) ) + "-> ";
|
||||
LOG.trace(prefix + ruleName);
|
||||
String prefix = StringHelper.repeat( '-', ( traceDepth++ * 2 ) ) + "-> ";
|
||||
LOG.trace( prefix + ruleName );
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -323,8 +322,8 @@ public class OrderByFragmentParser extends GeneratedOrderByFragmentParser {
|
|||
if ( inputState.guessing > 0 ) {
|
||||
return;
|
||||
}
|
||||
String prefix = "<-" + StringHelper.repeat( '-', (--traceDepth * 2) ) + " ";
|
||||
LOG.trace(prefix + ruleName);
|
||||
String prefix = "<-" + StringHelper.repeat( '-', ( --traceDepth * 2 ) ) + " ";
|
||||
LOG.trace( prefix + ruleName );
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -51,22 +51,22 @@ public class OrderByFragmentRenderer extends GeneratedOrderByFragmentRenderer {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void out(AST ast) {
|
||||
out( ( ( Node ) ast ).getRenderableText() );
|
||||
protected void out(AST ast) {
|
||||
out( ( (Node) ast ).getRenderableText() );
|
||||
}
|
||||
|
||||
|
||||
// handle trace logging ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
private int traceDepth = 0;
|
||||
private int traceDepth = 0;
|
||||
|
||||
@Override
|
||||
public void traceIn(String ruleName, AST tree) {
|
||||
public void traceIn(String ruleName, AST tree) {
|
||||
if ( inputState.guessing > 0 ) {
|
||||
return;
|
||||
}
|
||||
String prefix = StringHelper.repeat( '-', (traceDepth++ * 2) ) + "-> ";
|
||||
String traceText = ruleName + " (" + buildTraceNodeName(tree) + ")";
|
||||
String prefix = StringHelper.repeat( '-', ( traceDepth++ * 2 ) ) + "-> ";
|
||||
String traceText = ruleName + " (" + buildTraceNodeName( tree ) + ")";
|
||||
LOG.trace( prefix + traceText );
|
||||
}
|
||||
|
||||
|
@ -77,17 +77,20 @@ public class OrderByFragmentRenderer extends GeneratedOrderByFragmentRenderer {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void traceOut(String ruleName, AST tree) {
|
||||
public void traceOut(String ruleName, AST tree) {
|
||||
if ( inputState.guessing > 0 ) {
|
||||
return;
|
||||
}
|
||||
String prefix = "<-" + StringHelper.repeat( '-', (--traceDepth * 2) ) + " ";
|
||||
String prefix = "<-" + StringHelper.repeat( '-', ( --traceDepth * 2 ) ) + " ";
|
||||
LOG.trace( prefix + ruleName );
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String renderOrderByElement(String expression, String collation, String order, String nulls) {
|
||||
final NullPrecedence nullPrecedence = NullPrecedence.parse( nulls, sessionFactory.getSettings().getDefaultNullPrecedence() );
|
||||
final NullPrecedence nullPrecedence = NullPrecedence.parse(
|
||||
nulls,
|
||||
sessionFactory.getSessionFactoryOptions().getDefaultNullPrecedence()
|
||||
);
|
||||
return sessionFactory.getDialect().renderOrderByElement( expression, collation, order, nullPrecedence );
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,7 +24,6 @@
|
|||
package org.hibernate.stat.internal;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.concurrent.locks.Lock;
|
||||
|
@ -140,11 +139,11 @@ public class ConcurrentNaturalIdCacheStatisticsImpl extends CategorizedStatistic
|
|||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public Map getEntries() {
|
||||
final Map map = new HashMap();
|
||||
final Iterator iter = this.region.toMap().entrySet().iterator();
|
||||
while ( iter.hasNext() ) {
|
||||
final Map.Entry me = (Map.Entry) iter.next();
|
||||
for ( Object o : this.region.toMap().entrySet() ) {
|
||||
final Map.Entry me = (Map.Entry) o;
|
||||
map.put( ( (NaturalIdCacheKey) me.getKey() ).getNaturalIdValues(), me.getValue() );
|
||||
}
|
||||
return map;
|
||||
|
@ -189,8 +188,12 @@ public class ConcurrentNaturalIdCacheStatisticsImpl extends CategorizedStatistic
|
|||
this.readLock.lock();
|
||||
try {
|
||||
// Less chances for a context switch
|
||||
for ( long old = this.executionMinTime.get(); time < old && !this.executionMinTime.compareAndSet( old, time ); old = this.executionMinTime.get() ) {;}
|
||||
for ( long old = this.executionMaxTime.get(); time > old && !this.executionMaxTime.compareAndSet( old, time ); old = this.executionMaxTime.get() ) {;}
|
||||
//noinspection StatementWithEmptyBody
|
||||
for ( long old = this.executionMinTime.get(); time < old && !this.executionMinTime.compareAndSet( old, time ); old = this.executionMinTime.get() ) {
|
||||
}
|
||||
//noinspection StatementWithEmptyBody
|
||||
for ( long old = this.executionMaxTime.get(); time > old && !this.executionMaxTime.compareAndSet( old, time ); old = this.executionMaxTime.get() ) {
|
||||
}
|
||||
this.executionCount.getAndIncrement();
|
||||
this.totalExecutionTime.addAndGet( time );
|
||||
}
|
||||
|
|
|
@ -39,7 +39,7 @@ import org.hibernate.stat.QueryStatistics;
|
|||
import org.hibernate.stat.SecondLevelCacheStatistics;
|
||||
import org.hibernate.stat.spi.StatisticsImplementor;
|
||||
|
||||
import org.jboss.logging.Logger;
|
||||
import static org.hibernate.internal.CoreLogging.messageLogger;
|
||||
|
||||
/**
|
||||
* Implementation of {@link org.hibernate.stat.Statistics} based on the {@link java.util.concurrent} package.
|
||||
|
@ -48,8 +48,7 @@ import org.jboss.logging.Logger;
|
|||
*/
|
||||
@SuppressWarnings({ "unchecked" })
|
||||
public class ConcurrentStatisticsImpl implements StatisticsImplementor, Service {
|
||||
|
||||
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, ConcurrentStatisticsImpl.class.getName());
|
||||
private static final CoreMessageLogger LOG = messageLogger( ConcurrentStatisticsImpl.class );
|
||||
|
||||
private SessionFactoryImplementor sessionFactory;
|
||||
|
||||
|
@ -393,7 +392,8 @@ public class ConcurrentStatisticsImpl implements StatisticsImplementor, Service
|
|||
@Override
|
||||
public void naturalIdQueryExecuted(String regionName, long time) {
|
||||
naturalIdQueryExecutionCount.getAndIncrement();
|
||||
boolean isLongestQuery = false;
|
||||
boolean isLongestQuery;
|
||||
//noinspection StatementWithEmptyBody
|
||||
for ( long old = naturalIdQueryExecutionMaxTime.get();
|
||||
( isLongestQuery = time > old ) && ( !naturalIdQueryExecutionMaxTime.compareAndSet( old, time ) );
|
||||
old = naturalIdQueryExecutionMaxTime.get() ) {
|
||||
|
@ -411,7 +411,8 @@ public class ConcurrentStatisticsImpl implements StatisticsImplementor, Service
|
|||
public void queryExecuted(String hql, int rows, long time) {
|
||||
LOG.hql(hql, time, (long) rows );
|
||||
queryExecutionCount.getAndIncrement();
|
||||
boolean isLongestQuery = false;
|
||||
boolean isLongestQuery;
|
||||
//noinspection StatementWithEmptyBody
|
||||
for ( long old = queryExecutionMaxTime.get();
|
||||
( isLongestQuery = time > old ) && ( !queryExecutionMaxTime.compareAndSet( old, time ) );
|
||||
old = queryExecutionMaxTime.get() ) {
|
||||
|
|
|
@ -29,12 +29,13 @@ public class ImportSqlCommandExtractorInitiator implements StandardServiceInitia
|
|||
return instantiateExplicitCommandExtractor( extractorClassName, classLoaderService );
|
||||
}
|
||||
|
||||
private ImportSqlCommandExtractor instantiateExplicitCommandExtractor(String extractorClassName,
|
||||
ClassLoaderService classLoaderService) {
|
||||
private ImportSqlCommandExtractor instantiateExplicitCommandExtractor(
|
||||
String extractorClassName,
|
||||
ClassLoaderService classLoaderService) {
|
||||
try {
|
||||
return (ImportSqlCommandExtractor) classLoaderService.classForName( extractorClassName ).newInstance();
|
||||
}
|
||||
catch ( Exception e ) {
|
||||
catch (Exception e) {
|
||||
throw new HibernateException(
|
||||
"Could not instantiate import sql command extractor [" + extractorClassName + "]", e
|
||||
);
|
||||
|
|
|
@ -73,7 +73,7 @@ import org.hibernate.tool.schema.spi.SchemaManagementTool;
|
|||
* @author Steve Ebersole
|
||||
*/
|
||||
public class SchemaExport {
|
||||
private static final CoreMessageLogger LOG = CoreLogging.messageLogger( SchemaExport.class );
|
||||
private static final CoreMessageLogger LOG = CoreLogging.messageLogger( SchemaExport.class );
|
||||
|
||||
private static final String DEFAULT_IMPORT_FILE = "/import.sql";
|
||||
|
||||
|
@ -236,6 +236,7 @@ public class SchemaExport {
|
|||
*
|
||||
* @param metadata The metadata object holding the mapping info to be exported
|
||||
* @param connection The JDBC connection to use.
|
||||
*
|
||||
* @throws HibernateException Indicates problem preparing for schema export.
|
||||
*/
|
||||
public SchemaExport(MetadataImplementor metadata, Connection connection) throws HibernateException {
|
||||
|
@ -295,6 +296,7 @@ public class SchemaExport {
|
|||
* For generating a export script file, this is the file which will be written.
|
||||
*
|
||||
* @param filename The name of the file to which to write the export script.
|
||||
*
|
||||
* @return this
|
||||
*/
|
||||
public SchemaExport setOutputFile(String filename) {
|
||||
|
@ -306,6 +308,7 @@ public class SchemaExport {
|
|||
* Set the end of statement delimiter
|
||||
*
|
||||
* @param delimiter The delimiter
|
||||
*
|
||||
* @return this
|
||||
*/
|
||||
public SchemaExport setDelimiter(String delimiter) {
|
||||
|
@ -317,6 +320,7 @@ public class SchemaExport {
|
|||
* Should we format the sql strings?
|
||||
*
|
||||
* @param format Should we format SQL strings
|
||||
*
|
||||
* @return this
|
||||
*/
|
||||
public SchemaExport setFormat(boolean format) {
|
||||
|
@ -328,6 +332,7 @@ public class SchemaExport {
|
|||
* Set <i>import.sql</i> command extractor. By default {@link SingleLineSqlCommandExtractor} is used.
|
||||
*
|
||||
* @param importSqlCommandExtractor <i>import.sql</i> command extractor.
|
||||
*
|
||||
* @return this
|
||||
*/
|
||||
public SchemaExport setImportSqlCommandExtractor(ImportSqlCommandExtractor importSqlCommandExtractor) {
|
||||
|
@ -339,6 +344,7 @@ public class SchemaExport {
|
|||
* Should we stop once an error occurs?
|
||||
*
|
||||
* @param haltOnError True if export should stop after error.
|
||||
*
|
||||
* @return this
|
||||
*/
|
||||
public SchemaExport setHaltOnError(boolean haltOnError) {
|
||||
|
@ -399,7 +405,7 @@ public class SchemaExport {
|
|||
}
|
||||
|
||||
public void execute(Target output, Type type) {
|
||||
if ( (outputFile == null && output == Target.NONE) || type == SchemaExport.Type.NONE ) {
|
||||
if ( ( outputFile == null && output == Target.NONE ) || type == SchemaExport.Type.NONE ) {
|
||||
return;
|
||||
}
|
||||
exceptions.clear();
|
||||
|
@ -407,14 +413,14 @@ public class SchemaExport {
|
|||
LOG.runningHbm2ddlSchemaExport();
|
||||
|
||||
final List<NamedReader> importFileReaders = new ArrayList<NamedReader>();
|
||||
for ( String currentFile : importFiles.split(",") ) {
|
||||
for ( String currentFile : importFiles.split( "," ) ) {
|
||||
try {
|
||||
final String resourceName = currentFile.trim();
|
||||
InputStream stream = ConfigHelper.getResourceAsStream( resourceName );
|
||||
importFileReaders.add( new NamedReader( resourceName, stream ) );
|
||||
}
|
||||
catch ( HibernateException e ) {
|
||||
LOG.debugf("Import file not found: %s", currentFile);
|
||||
catch (HibernateException e) {
|
||||
LOG.debugf( "Import file not found: %s", currentFile );
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -437,7 +443,7 @@ public class SchemaExport {
|
|||
}
|
||||
if ( type.doCreate() ) {
|
||||
perform( createSQL, exporters );
|
||||
if ( ! importFileReaders.isEmpty() ) {
|
||||
if ( !importFileReaders.isEmpty() ) {
|
||||
for ( NamedReader namedReader : importFileReaders ) {
|
||||
importScript( namedReader, exporters );
|
||||
}
|
||||
|
@ -466,14 +472,14 @@ public class SchemaExport {
|
|||
catch (Exception ignore) {
|
||||
}
|
||||
}
|
||||
LOG.schemaExportComplete();
|
||||
LOG.schemaExportComplete();
|
||||
}
|
||||
}
|
||||
|
||||
private void perform(String[] sqlCommands, List<Exporter> exporters) {
|
||||
for ( String sqlCommand : sqlCommands ) {
|
||||
String formatted = formatter.format( sqlCommand );
|
||||
if ( delimiter != null ) {
|
||||
if ( delimiter != null ) {
|
||||
formatted += delimiter;
|
||||
}
|
||||
sqlStatementLogger.logStatement( sqlCommand, formatter );
|
||||
|
@ -496,11 +502,11 @@ public class SchemaExport {
|
|||
private void importScript(NamedReader namedReader, List<Exporter> exporters) throws Exception {
|
||||
BufferedReader reader = new BufferedReader( namedReader.getReader() );
|
||||
String[] statements = importSqlCommandExtractor.extractCommands( reader );
|
||||
if (statements != null) {
|
||||
if ( statements != null ) {
|
||||
for ( String statement : statements ) {
|
||||
if ( statement != null ) {
|
||||
String trimmedSql = statement.trim();
|
||||
if ( trimmedSql.endsWith( ";" )) {
|
||||
if ( trimmedSql.endsWith( ";" ) ) {
|
||||
trimmedSql = trimmedSql.substring( 0, statement.length() - 1 );
|
||||
}
|
||||
if ( !StringHelper.isEmpty( trimmedSql ) ) {
|
||||
|
@ -511,14 +517,16 @@ public class SchemaExport {
|
|||
}
|
||||
}
|
||||
}
|
||||
catch ( Exception e ) {
|
||||
if (haltOnError) {
|
||||
throw new ImportScriptException( "Error during statement execution (file: '"
|
||||
+ namedReader.getName() + "'): " + trimmedSql, e );
|
||||
catch (Exception e) {
|
||||
if ( haltOnError ) {
|
||||
throw new ImportScriptException(
|
||||
"Error during statement execution (file: '"
|
||||
+ namedReader.getName() + "'): " + trimmedSql, e
|
||||
);
|
||||
}
|
||||
exceptions.add(e);
|
||||
LOG.unsuccessful(trimmedSql);
|
||||
LOG.error(e.getMessage());
|
||||
exceptions.add( e );
|
||||
LOG.unsuccessful( trimmedSql );
|
||||
LOG.error( e.getMessage() );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -557,14 +565,19 @@ public class SchemaExport {
|
|||
.setDelimiter( commandLineArgs.delimiter )
|
||||
.setImportSqlCommandExtractor( serviceRegistry.getService( ImportSqlCommandExtractor.class ) )
|
||||
.setFormat( commandLineArgs.format );
|
||||
schemaExport.execute( commandLineArgs.script, commandLineArgs.export, commandLineArgs.drop, commandLineArgs.create );
|
||||
schemaExport.execute(
|
||||
commandLineArgs.script,
|
||||
commandLineArgs.export,
|
||||
commandLineArgs.drop,
|
||||
commandLineArgs.create
|
||||
);
|
||||
}
|
||||
finally {
|
||||
StandardServiceRegistryBuilder.destroy( serviceRegistry );
|
||||
}
|
||||
}
|
||||
catch ( Exception e ) {
|
||||
LOG.unableToCreateSchema( e );
|
||||
catch (Exception e) {
|
||||
LOG.unableToCreateSchema( e );
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -197,7 +197,7 @@ public class SchemaExportTask extends MatchingTask {
|
|||
* Execute the task
|
||||
*/
|
||||
@Override
|
||||
public void execute() throws BuildException {
|
||||
public void execute() throws BuildException {
|
||||
try {
|
||||
buildSchemaExport().execute( !quiet, !text, drop, create );
|
||||
}
|
||||
|
|
|
@ -64,7 +64,7 @@ import org.hibernate.tool.schema.spi.SchemaMigrator;
|
|||
* @author Steve Ebersole
|
||||
*/
|
||||
public class SchemaUpdate {
|
||||
private static final CoreMessageLogger LOG = CoreLogging.messageLogger( SchemaUpdate.class );
|
||||
private static final CoreMessageLogger LOG = CoreLogging.messageLogger( SchemaUpdate.class );
|
||||
|
||||
private final MetadataImplementor metadata;
|
||||
private final ServiceRegistry serviceRegistry;
|
||||
|
@ -113,9 +113,9 @@ public class SchemaUpdate {
|
|||
public void execute(boolean script, boolean doUpdate) {
|
||||
execute( Target.interpret( script, doUpdate ) );
|
||||
}
|
||||
|
||||
|
||||
public void execute(Target target) {
|
||||
LOG.runningHbm2ddlSchemaUpdate();
|
||||
LOG.runningHbm2ddlSchemaUpdate();
|
||||
|
||||
exceptions.clear();
|
||||
|
||||
|
@ -203,8 +203,8 @@ public class SchemaUpdate {
|
|||
StandardServiceRegistryBuilder.destroy( serviceRegistry );
|
||||
}
|
||||
}
|
||||
catch ( Exception e ) {
|
||||
LOG.unableToRunSchemaUpdate(e);
|
||||
catch (Exception e) {
|
||||
LOG.unableToRunSchemaUpdate( e );
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -51,7 +51,7 @@ import org.apache.tools.ant.types.FileSet;
|
|||
|
||||
/**
|
||||
* An Ant task for <tt>SchemaUpdate</tt>.
|
||||
*
|
||||
* <p/>
|
||||
* <pre>
|
||||
* <taskdef name="schemaupdate"
|
||||
* classname="org.hibernate.tool.hbm2ddl.SchemaUpdateTask"
|
||||
|
@ -66,8 +66,8 @@ import org.apache.tools.ant.types.FileSet;
|
|||
* </schemaupdate>
|
||||
* </pre>
|
||||
*
|
||||
* @see SchemaUpdate
|
||||
* @author Rong C Ou, Gavin King
|
||||
* @see SchemaUpdate
|
||||
*/
|
||||
public class SchemaUpdateTask extends MatchingTask {
|
||||
private List<FileSet> fileSets = new LinkedList<FileSet>();
|
||||
|
@ -81,7 +81,7 @@ public class SchemaUpdateTask extends MatchingTask {
|
|||
|
||||
private String implicitNamingStrategy = null;
|
||||
private String physicalNamingStrategy = null;
|
||||
|
||||
|
||||
@SuppressWarnings("UnusedDeclaration")
|
||||
public void addFileset(FileSet fileSet) {
|
||||
fileSets.add( fileSet );
|
||||
|
@ -95,10 +95,10 @@ public class SchemaUpdateTask extends MatchingTask {
|
|||
@SuppressWarnings("UnusedDeclaration")
|
||||
public void setProperties(File propertiesFile) {
|
||||
if ( !propertiesFile.exists() ) {
|
||||
throw new BuildException("Properties file: " + propertiesFile + " does not exist.");
|
||||
throw new BuildException( "Properties file: " + propertiesFile + " does not exist." );
|
||||
}
|
||||
|
||||
log("Using properties file " + propertiesFile, Project.MSG_DEBUG);
|
||||
log( "Using properties file " + propertiesFile, Project.MSG_DEBUG );
|
||||
this.propertiesFile = propertiesFile;
|
||||
}
|
||||
|
||||
|
@ -113,14 +113,14 @@ public class SchemaUpdateTask extends MatchingTask {
|
|||
}
|
||||
|
||||
/**
|
||||
* Enable "text-only" mode. The schema will not be updated in the database.
|
||||
* Enable "text-only" mode. The schema will not be updated in the database.
|
||||
*
|
||||
* @param text true to enable text-only mode
|
||||
*/
|
||||
*/
|
||||
@SuppressWarnings("UnusedDeclaration")
|
||||
public void setText(boolean text) {
|
||||
this.text = text;
|
||||
}
|
||||
public void setText(boolean text) {
|
||||
this.text = text;
|
||||
}
|
||||
|
||||
/**
|
||||
* Enable "quiet" mode. The schema will not be written to standard out.
|
||||
|
@ -181,9 +181,9 @@ public class SchemaUpdateTask extends MatchingTask {
|
|||
* Execute the task
|
||||
*/
|
||||
@Override
|
||||
public void execute() throws BuildException {
|
||||
log("Running Hibernate Core SchemaUpdate.");
|
||||
log("This is an Ant task supporting only mapping files, if you want to use annotations see http://tools.hibernate.org.");
|
||||
public void execute() throws BuildException {
|
||||
log( "Running Hibernate Core SchemaUpdate." );
|
||||
log( "This is an Ant task supporting only mapping files, if you want to use annotations see http://tools.hibernate.org." );
|
||||
|
||||
try {
|
||||
final StandardServiceRegistryBuilder ssrBuilder = new StandardServiceRegistryBuilder();
|
||||
|
@ -204,19 +204,19 @@ public class SchemaUpdateTask extends MatchingTask {
|
|||
su.execute( !quiet, !text );
|
||||
}
|
||||
catch (HibernateException e) {
|
||||
throw new BuildException("Schema text failed: " + e.getMessage(), e);
|
||||
throw new BuildException( "Schema text failed: " + e.getMessage(), e );
|
||||
}
|
||||
catch (FileNotFoundException e) {
|
||||
throw new BuildException("File not found: " + e.getMessage(), e);
|
||||
throw new BuildException( "File not found: " + e.getMessage(), e );
|
||||
}
|
||||
catch (IOException e) {
|
||||
throw new BuildException("IOException : " + e.getMessage(), e);
|
||||
throw new BuildException( "IOException : " + e.getMessage(), e );
|
||||
}
|
||||
catch (BuildException e) {
|
||||
throw e;
|
||||
}
|
||||
catch (Exception e) {
|
||||
throw new BuildException(e);
|
||||
throw new BuildException( e );
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -238,7 +238,7 @@ public class SchemaUpdateTask extends MatchingTask {
|
|||
|
||||
private void configure(MetadataSources metadataSources) {
|
||||
for ( String filename : collectFiles() ) {
|
||||
if ( filename.endsWith(".jar") ) {
|
||||
if ( filename.endsWith( ".jar" ) ) {
|
||||
metadataSources.addJar( new File( filename ) );
|
||||
}
|
||||
else {
|
||||
|
@ -273,7 +273,10 @@ public class SchemaUpdateTask extends MatchingTask {
|
|||
);
|
||||
}
|
||||
catch (Exception e) {
|
||||
throw new BuildException( "Unable to instantiate specified ImplicitNamingStrategy [" + implicitNamingStrategy + "]", e );
|
||||
throw new BuildException(
|
||||
"Unable to instantiate specified ImplicitNamingStrategy [" + implicitNamingStrategy + "]",
|
||||
e
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -284,7 +287,10 @@ public class SchemaUpdateTask extends MatchingTask {
|
|||
);
|
||||
}
|
||||
catch (Exception e) {
|
||||
throw new BuildException( "Unable to instantiate specified PhysicalNamingStrategy [" + physicalNamingStrategy + "]", e );
|
||||
throw new BuildException(
|
||||
"Unable to instantiate specified PhysicalNamingStrategy [" + physicalNamingStrategy + "]",
|
||||
e
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -239,4 +239,4 @@ public class SchemaValidator {
|
|||
StandardServiceRegistryBuilder.destroy( serviceRegistry );
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -130,7 +130,7 @@ public class SchemaValidatorTask extends MatchingTask {
|
|||
* Execute the task
|
||||
*/
|
||||
@Override
|
||||
public void execute() throws BuildException {
|
||||
public void execute() throws BuildException {
|
||||
try {
|
||||
final StandardServiceRegistryBuilder ssrBuilder = new StandardServiceRegistryBuilder();
|
||||
configure( ssrBuilder );
|
||||
|
|
|
@ -23,18 +23,18 @@
|
|||
*/
|
||||
package org.hibernate.tool.hbm2ddl;
|
||||
|
||||
import org.hibernate.internal.CoreMessageLogger;
|
||||
import org.hibernate.mapping.ForeignKey;
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
import java.sql.DatabaseMetaData;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
||||
import org.hibernate.internal.CoreMessageLogger;
|
||||
import org.hibernate.mapping.ForeignKey;
|
||||
|
||||
import static org.hibernate.internal.CoreLogging.messageLogger;
|
||||
|
||||
/**
|
||||
* JDBC table metadata
|
||||
*
|
||||
|
@ -42,32 +42,31 @@ import java.util.Map;
|
|||
* @author Max Rydahl Andersen
|
||||
*/
|
||||
public class TableMetadata {
|
||||
|
||||
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, TableMetadata.class.getName());
|
||||
private static final CoreMessageLogger LOG = messageLogger( TableMetadata.class );
|
||||
|
||||
private final String catalog;
|
||||
private final String schema;
|
||||
private final String name;
|
||||
private final Map columns = new HashMap();
|
||||
private final Map foreignKeys = new HashMap();
|
||||
private final Map indexes = new HashMap();
|
||||
private final Map<String, ColumnMetadata> columns = new HashMap<String, ColumnMetadata>();
|
||||
private final Map<String,ForeignKeyMetadata> foreignKeys = new HashMap<String,ForeignKeyMetadata>();
|
||||
private final Map<String, IndexMetadata> indexes = new HashMap<String, IndexMetadata>();
|
||||
|
||||
TableMetadata(ResultSet rs, DatabaseMetaData meta, boolean extras) throws SQLException {
|
||||
catalog = rs.getString("TABLE_CAT");
|
||||
schema = rs.getString("TABLE_SCHEM");
|
||||
name = rs.getString("TABLE_NAME");
|
||||
initColumns(meta);
|
||||
if (extras) {
|
||||
initForeignKeys(meta);
|
||||
initIndexes(meta);
|
||||
catalog = rs.getString( "TABLE_CAT" );
|
||||
schema = rs.getString( "TABLE_SCHEM" );
|
||||
name = rs.getString( "TABLE_NAME" );
|
||||
initColumns( meta );
|
||||
if ( extras ) {
|
||||
initForeignKeys( meta );
|
||||
initIndexes( meta );
|
||||
}
|
||||
String cat = catalog==null ? "" : catalog + '.';
|
||||
String schem = schema==null ? "" : schema + '.';
|
||||
LOG.tableFound( cat + schem + name );
|
||||
LOG.columns( columns.keySet() );
|
||||
if (extras) {
|
||||
LOG.foreignKeys( foreignKeys.keySet() );
|
||||
LOG.indexes( indexes.keySet() );
|
||||
String cat = catalog == null ? "" : catalog + '.';
|
||||
String schem = schema == null ? "" : schema + '.';
|
||||
LOG.tableFound( cat + schem + name );
|
||||
LOG.columns( columns.keySet() );
|
||||
if ( extras ) {
|
||||
LOG.foreignKeys( foreignKeys.keySet() );
|
||||
LOG.indexes( indexes.keySet() );
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -84,22 +83,20 @@ public class TableMetadata {
|
|||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
public String toString() {
|
||||
return "TableMetadata(" + name + ')';
|
||||
}
|
||||
|
||||
public ColumnMetadata getColumnMetadata(String columnName) {
|
||||
return (ColumnMetadata) columns.get( columnName.toLowerCase(Locale.ROOT) );
|
||||
return columns.get( columnName.toLowerCase( Locale.ROOT ) );
|
||||
}
|
||||
|
||||
public ForeignKeyMetadata getForeignKeyMetadata(String keyName) {
|
||||
return (ForeignKeyMetadata) foreignKeys.get( keyName.toLowerCase(Locale.ROOT) );
|
||||
return foreignKeys.get( keyName.toLowerCase( Locale.ROOT ) );
|
||||
}
|
||||
|
||||
public ForeignKeyMetadata getForeignKeyMetadata(ForeignKey fk) {
|
||||
Iterator it = foreignKeys.values().iterator();
|
||||
while ( it.hasNext() ) {
|
||||
ForeignKeyMetadata existingFk = ( ForeignKeyMetadata ) it.next();
|
||||
for ( ForeignKeyMetadata existingFk : foreignKeys.values() ) {
|
||||
if ( existingFk.matches( fk ) ) {
|
||||
return existingFk;
|
||||
}
|
||||
|
@ -108,51 +105,51 @@ public class TableMetadata {
|
|||
}
|
||||
|
||||
public IndexMetadata getIndexMetadata(String indexName) {
|
||||
return (IndexMetadata) indexes.get( indexName.toLowerCase(Locale.ROOT) );
|
||||
return indexes.get( indexName.toLowerCase( Locale.ROOT ) );
|
||||
}
|
||||
|
||||
private void addForeignKey(ResultSet rs) throws SQLException {
|
||||
String fk = rs.getString("FK_NAME");
|
||||
String fk = rs.getString( "FK_NAME" );
|
||||
|
||||
if (fk == null) {
|
||||
if ( fk == null ) {
|
||||
return;
|
||||
}
|
||||
|
||||
ForeignKeyMetadata info = getForeignKeyMetadata(fk);
|
||||
if (info == null) {
|
||||
info = new ForeignKeyMetadata(rs);
|
||||
foreignKeys.put( info.getName().toLowerCase(Locale.ROOT), info );
|
||||
ForeignKeyMetadata info = getForeignKeyMetadata( fk );
|
||||
if ( info == null ) {
|
||||
info = new ForeignKeyMetadata( rs );
|
||||
foreignKeys.put( info.getName().toLowerCase( Locale.ROOT ), info );
|
||||
}
|
||||
|
||||
info.addReference( rs );
|
||||
}
|
||||
|
||||
private void addIndex(ResultSet rs) throws SQLException {
|
||||
String index = rs.getString("INDEX_NAME");
|
||||
String index = rs.getString( "INDEX_NAME" );
|
||||
|
||||
if (index == null) {
|
||||
if ( index == null ) {
|
||||
return;
|
||||
}
|
||||
|
||||
IndexMetadata info = getIndexMetadata(index);
|
||||
if (info == null) {
|
||||
info = new IndexMetadata(rs);
|
||||
indexes.put( info.getName().toLowerCase(Locale.ROOT), info );
|
||||
IndexMetadata info = getIndexMetadata( index );
|
||||
if ( info == null ) {
|
||||
info = new IndexMetadata( rs );
|
||||
indexes.put( info.getName().toLowerCase( Locale.ROOT ), info );
|
||||
}
|
||||
|
||||
info.addColumn( getColumnMetadata( rs.getString("COLUMN_NAME") ) );
|
||||
info.addColumn( getColumnMetadata( rs.getString( "COLUMN_NAME" ) ) );
|
||||
}
|
||||
|
||||
public void addColumn(ResultSet rs) throws SQLException {
|
||||
String column = rs.getString("COLUMN_NAME");
|
||||
String column = rs.getString( "COLUMN_NAME" );
|
||||
|
||||
if (column==null) {
|
||||
if ( column == null ) {
|
||||
return;
|
||||
}
|
||||
|
||||
if ( getColumnMetadata(column) == null ) {
|
||||
ColumnMetadata info = new ColumnMetadata(rs);
|
||||
columns.put( info.getName().toLowerCase(Locale.ROOT), info );
|
||||
if ( getColumnMetadata( column ) == null ) {
|
||||
ColumnMetadata info = new ColumnMetadata( rs );
|
||||
columns.put( info.getName().toLowerCase( Locale.ROOT ), info );
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -160,13 +157,13 @@ public class TableMetadata {
|
|||
ResultSet rs = null;
|
||||
|
||||
try {
|
||||
rs = meta.getImportedKeys(catalog, schema, name);
|
||||
rs = meta.getImportedKeys( catalog, schema, name );
|
||||
while ( rs.next() ) {
|
||||
addForeignKey(rs);
|
||||
addForeignKey( rs );
|
||||
}
|
||||
}
|
||||
finally {
|
||||
if (rs != null) {
|
||||
if ( rs != null ) {
|
||||
rs.close();
|
||||
}
|
||||
}
|
||||
|
@ -176,17 +173,17 @@ public class TableMetadata {
|
|||
ResultSet rs = null;
|
||||
|
||||
try {
|
||||
rs = meta.getIndexInfo(catalog, schema, name, false, true);
|
||||
rs = meta.getIndexInfo( catalog, schema, name, false, true );
|
||||
|
||||
while ( rs.next() ) {
|
||||
if ( rs.getShort("TYPE") == DatabaseMetaData.tableIndexStatistic ) {
|
||||
if ( rs.getShort( "TYPE" ) == DatabaseMetaData.tableIndexStatistic ) {
|
||||
continue;
|
||||
}
|
||||
addIndex(rs);
|
||||
addIndex( rs );
|
||||
}
|
||||
}
|
||||
finally {
|
||||
if (rs != null) {
|
||||
if ( rs != null ) {
|
||||
rs.close();
|
||||
}
|
||||
}
|
||||
|
@ -196,13 +193,13 @@ public class TableMetadata {
|
|||
ResultSet rs = null;
|
||||
|
||||
try {
|
||||
rs = meta.getColumns(catalog, schema, name, "%");
|
||||
rs = meta.getColumns( catalog, schema, name, "%" );
|
||||
while ( rs.next() ) {
|
||||
addColumn(rs);
|
||||
addColumn( rs );
|
||||
}
|
||||
}
|
||||
finally {
|
||||
if (rs != null) {
|
||||
finally {
|
||||
if ( rs != null ) {
|
||||
rs.close();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -86,7 +86,7 @@ public abstract class BasicInstrumentationTask extends Task implements Instrumen
|
|||
protected abstract Instrumenter buildInstrumenter(Logger logger, Instrumenter.Options options);
|
||||
|
||||
@Override
|
||||
public void execute() throws BuildException {
|
||||
public void execute() throws BuildException {
|
||||
try {
|
||||
buildInstrumenter( logger, this )
|
||||
.execute( collectSpecifiedFiles() );
|
||||
|
|
|
@ -65,7 +65,7 @@ import org.hibernate.tool.instrument.BasicInstrumentationTask;
|
|||
*/
|
||||
public class InstrumentTask extends BasicInstrumentationTask {
|
||||
@Override
|
||||
protected Instrumenter buildInstrumenter(Logger logger, Instrumenter.Options options) {
|
||||
protected Instrumenter buildInstrumenter(Logger logger, Instrumenter.Options options) {
|
||||
return new JavassistInstrumenter( logger, options );
|
||||
}
|
||||
}
|
||||
|
|
|
@ -31,7 +31,7 @@ import java.util.Set;
|
|||
|
||||
import org.hibernate.internal.CoreMessageLogger;
|
||||
|
||||
import org.jboss.logging.Logger;
|
||||
import static org.hibernate.internal.CoreLogging.messageLogger;
|
||||
|
||||
/**
|
||||
* Distinctions the result tuples in the final result based on the defined
|
||||
|
@ -43,11 +43,9 @@ import org.jboss.logging.Logger;
|
|||
* @author Steve Ebersole
|
||||
*/
|
||||
public class DistinctResultTransformer extends BasicTransformerAdapter {
|
||||
|
||||
public static final DistinctResultTransformer INSTANCE = new DistinctResultTransformer();
|
||||
|
||||
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class,
|
||||
DistinctResultTransformer.class.getName());
|
||||
private static final CoreMessageLogger LOG = messageLogger( DistinctResultTransformer.class );
|
||||
|
||||
/**
|
||||
* Helper class to handle distincting
|
||||
|
@ -59,20 +57,14 @@ public class DistinctResultTransformer extends BasicTransformerAdapter {
|
|||
this.entity = entity;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
public boolean equals(Object other) {
|
||||
return Identity.class.isInstance( other )
|
||||
&& this.entity == ( ( Identity ) other ).entity;
|
||||
&& this.entity == ( (Identity) other ).entity;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
public int hashCode() {
|
||||
return System.identityHashCode( entity );
|
||||
}
|
||||
}
|
||||
|
@ -87,11 +79,10 @@ public class DistinctResultTransformer extends BasicTransformerAdapter {
|
|||
* Uniquely distinct each tuple row here.
|
||||
*/
|
||||
@Override
|
||||
public List transformList(List list) {
|
||||
List result = new ArrayList( list.size() );
|
||||
Set distinct = new HashSet();
|
||||
for ( int i = 0; i < list.size(); i++ ) {
|
||||
Object entity = list.get( i );
|
||||
public List transformList(List list) {
|
||||
List<Object> result = new ArrayList<Object>( list.size() );
|
||||
Set<Identity> distinct = new HashSet<Identity>();
|
||||
for ( Object entity : list ) {
|
||||
if ( distinct.add( new Identity( entity ) ) ) {
|
||||
result.add( entity );
|
||||
}
|
||||
|
|
|
@ -50,7 +50,7 @@ public final class RootEntityResultTransformer extends BasicTransformerAdapter i
|
|||
* Return just the root entity from the row tuple.
|
||||
*/
|
||||
@Override
|
||||
public Object transformTuple(Object[] tuple, String[] aliases) {
|
||||
public Object transformTuple(Object[] tuple, String[] aliases) {
|
||||
return tuple[ tuple.length-1 ];
|
||||
}
|
||||
|
||||
|
|
|
@ -25,11 +25,9 @@
|
|||
package org.hibernate.transform;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Tranforms each result row from a tuple into a {@link List}, such that what
|
||||
* you end up with is a {@link List} of {@link List Lists}.
|
||||
* Tranforms each result row from a tuple into a {@link java.util.List} whose elements are each tuple value
|
||||
*/
|
||||
public class ToListResultTransformer extends BasicTransformerAdapter {
|
||||
public static final ToListResultTransformer INSTANCE = new ToListResultTransformer();
|
||||
|
|
|
@ -25,7 +25,6 @@ package org.hibernate.tuple;
|
|||
|
||||
import java.lang.annotation.Annotation;
|
||||
|
||||
import org.hibernate.HibernateException;
|
||||
|
||||
/**
|
||||
* A {@link ValueGeneration} based on a custom Java generator annotation type.
|
||||
|
@ -44,8 +43,8 @@ public interface AnnotationValueGeneration<A extends Annotation> extends ValueGe
|
|||
* @param propertyType the type of the property annotated with the generator annotation. Implementations may use
|
||||
* the type to determine the right {@link ValueGenerator} to be applied.
|
||||
*
|
||||
* @throws HibernateException in case an error occurred during initialization, e.g. if an implementation can't
|
||||
* create a value for the given property type.
|
||||
* @throws org.hibernate.HibernateException in case an error occurred during initialization, e.g. if
|
||||
* an implementation can't create a value for the given property type.
|
||||
*/
|
||||
void initialize(A annotation, Class<?> propertyType);
|
||||
}
|
||||
|
|
|
@ -299,7 +299,7 @@ public class ElementWrapper implements Element, Serializable {
|
|||
}
|
||||
|
||||
public Node selectSingleNode(String xpath) {
|
||||
return element.selectSingleNode( xpath );
|
||||
return element.selectSingleNode( xpath );
|
||||
}
|
||||
|
||||
public String valueOf(String xpath) {
|
||||
|
|
|
@ -35,7 +35,6 @@ import org.hibernate.type.Type;
|
|||
* @author Steve Ebersole
|
||||
*/
|
||||
public class IdentifierProperty extends AbstractAttribute implements IdentifierAttribute {
|
||||
|
||||
private boolean virtual;
|
||||
private boolean embedded;
|
||||
private IdentifierValue unsavedValue;
|
||||
|
@ -82,11 +81,11 @@ public class IdentifierProperty extends AbstractAttribute implements IdentifierA
|
|||
* @param identifierGenerator The generator to use for id value generation.
|
||||
*/
|
||||
public IdentifierProperty(
|
||||
Type type,
|
||||
boolean embedded,
|
||||
Type type,
|
||||
boolean embedded,
|
||||
boolean hasIdentifierMapper,
|
||||
IdentifierValue unsavedValue,
|
||||
IdentifierGenerator identifierGenerator) {
|
||||
IdentifierGenerator identifierGenerator) {
|
||||
super( null, type );
|
||||
this.virtual = true;
|
||||
this.embedded = embedded;
|
||||
|
|
|
@ -27,6 +27,8 @@ package org.hibernate.tuple;
|
|||
* Defines the basic contract of a Property within the runtime metamodel.
|
||||
*
|
||||
* @author Steve Ebersole
|
||||
*
|
||||
* @deprecated Use the direct {@link Attribute} hierarchy
|
||||
*/
|
||||
@Deprecated
|
||||
public interface Property extends Attribute {
|
||||
|
|
|
@ -64,6 +64,7 @@ public final class PropertyFactory {
|
|||
*
|
||||
* @param mappedEntity The mapping definition of the entity.
|
||||
* @param generator The identifier value generator to use for this identifier.
|
||||
*
|
||||
* @return The appropriate IdentifierProperty definition.
|
||||
*/
|
||||
public static IdentifierProperty buildIdentifierAttribute(
|
||||
|
@ -72,23 +73,23 @@ public final class PropertyFactory {
|
|||
String mappedUnsavedValue = mappedEntity.getIdentifier().getNullValue();
|
||||
Type type = mappedEntity.getIdentifier().getType();
|
||||
Property property = mappedEntity.getIdentifierProperty();
|
||||
|
||||
|
||||
IdentifierValue unsavedValue = UnsavedValueFactory.getUnsavedIdentifierValue(
|
||||
mappedUnsavedValue,
|
||||
getGetter( property ),
|
||||
type,
|
||||
getConstructor(mappedEntity)
|
||||
);
|
||||
getConstructor( mappedEntity )
|
||||
);
|
||||
|
||||
if ( property == null ) {
|
||||
// this is a virtual id property...
|
||||
return new IdentifierProperty(
|
||||
type,
|
||||
type,
|
||||
mappedEntity.hasEmbeddedIdentifier(),
|
||||
mappedEntity.hasIdentifierMapper(),
|
||||
unsavedValue,
|
||||
generator
|
||||
);
|
||||
);
|
||||
}
|
||||
else {
|
||||
return new IdentifierProperty(
|
||||
|
@ -98,7 +99,7 @@ public final class PropertyFactory {
|
|||
mappedEntity.hasEmbeddedIdentifier(),
|
||||
unsavedValue,
|
||||
generator
|
||||
);
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -108,6 +109,7 @@ public final class PropertyFactory {
|
|||
*
|
||||
* @param property The version mapping Property.
|
||||
* @param lazyAvailable Is property lazy loading currently available.
|
||||
*
|
||||
* @return The appropriate VersionProperty definition.
|
||||
*/
|
||||
public static VersionProperty buildVersionProperty(
|
||||
|
@ -117,7 +119,7 @@ public final class PropertyFactory {
|
|||
Property property,
|
||||
boolean lazyAvailable) {
|
||||
String mappedUnsavedValue = ( (KeyValue) property.getValue() ).getNullValue();
|
||||
|
||||
|
||||
VersionValue unsavedValue = UnsavedValueFactory.getUnsavedVersionValue(
|
||||
mappedUnsavedValue,
|
||||
getGetter( property ),
|
||||
|
@ -131,8 +133,8 @@ public final class PropertyFactory {
|
|||
persister,
|
||||
sessionFactory,
|
||||
attributeNumber,
|
||||
property.getName(),
|
||||
property.getValue().getType(),
|
||||
property.getName(),
|
||||
property.getValue().getType(),
|
||||
new BaselineAttributeInformation.Builder()
|
||||
.setLazy( lazy )
|
||||
.setInsertable( property.isInsertable() )
|
||||
|
@ -143,8 +145,8 @@ public final class PropertyFactory {
|
|||
.setVersionable( property.isOptimisticLocked() )
|
||||
.setCascadeStyle( property.getCascadeStyle() )
|
||||
.createInformation(),
|
||||
unsavedValue
|
||||
);
|
||||
unsavedValue
|
||||
);
|
||||
}
|
||||
|
||||
public static enum NonIdentifierAttributeNature {
|
||||
|
@ -160,6 +162,7 @@ public final class PropertyFactory {
|
|||
*
|
||||
* @param property The mapped property.
|
||||
* @param lazyAvailable Is property lazy loading currently available.
|
||||
*
|
||||
* @return The appropriate NonIdentifierProperty definition.
|
||||
*/
|
||||
public static NonIdentifierAttribute buildEntityBasedAttribute(
|
||||
|
@ -174,13 +177,13 @@ public final class PropertyFactory {
|
|||
|
||||
// we need to dirty check collections, since they can cause an owner
|
||||
// version number increment
|
||||
|
||||
|
||||
// we need to dirty check many-to-ones with not-found="ignore" in order
|
||||
// to update the cache (not the database), since in this case a null
|
||||
// entity reference can lose information
|
||||
|
||||
boolean alwaysDirtyCheck = type.isAssociationType() &&
|
||||
( (AssociationType) type ).isAlwaysDirtyChecked();
|
||||
|
||||
boolean alwaysDirtyCheck = type.isAssociationType() &&
|
||||
( (AssociationType) type ).isAlwaysDirtyChecked();
|
||||
|
||||
switch ( nature ) {
|
||||
case BASIC: {
|
||||
|
@ -314,7 +317,7 @@ public final class PropertyFactory {
|
|||
try {
|
||||
return ReflectHelper.getDefaultConstructor( persistentClass.getMappedClass() );
|
||||
}
|
||||
catch( Throwable t ) {
|
||||
catch (Throwable t) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -59,7 +59,7 @@ import org.hibernate.type.CompositeType;
|
|||
import org.hibernate.type.EntityType;
|
||||
import org.hibernate.type.Type;
|
||||
|
||||
import org.jboss.logging.Logger;
|
||||
import static org.hibernate.internal.CoreLogging.messageLogger;
|
||||
|
||||
|
||||
/**
|
||||
|
@ -69,11 +69,7 @@ import org.jboss.logging.Logger;
|
|||
* @author Gavin King
|
||||
*/
|
||||
public abstract class AbstractEntityTuplizer implements EntityTuplizer {
|
||||
|
||||
private static final CoreMessageLogger LOG = Logger.getMessageLogger(
|
||||
CoreMessageLogger.class,
|
||||
AbstractEntityTuplizer.class.getName()
|
||||
);
|
||||
private static final CoreMessageLogger LOG = messageLogger( AbstractEntityTuplizer.class );
|
||||
|
||||
//TODO: currently keeps Getters and Setters (instead of PropertyAccessors) because of the way getGetter() and getSetter() are implemented currently; yuck!
|
||||
|
||||
|
@ -99,6 +95,7 @@ public abstract class AbstractEntityTuplizer implements EntityTuplizer {
|
|||
*
|
||||
* @param mappedProperty The property to be accessed via the built Getter.
|
||||
* @param mappedEntity The entity information regarding the mapped entity owning this property.
|
||||
*
|
||||
* @return An appropriate Getter instance.
|
||||
*/
|
||||
protected abstract Getter buildPropertyGetter(Property mappedProperty, PersistentClass mappedEntity);
|
||||
|
@ -108,6 +105,7 @@ public abstract class AbstractEntityTuplizer implements EntityTuplizer {
|
|||
*
|
||||
* @param mappedProperty The property to be accessed via the built Setter.
|
||||
* @param mappedEntity The entity information regarding the mapped entity owning this property.
|
||||
*
|
||||
* @return An appropriate Setter instance.
|
||||
*/
|
||||
protected abstract Setter buildPropertySetter(Property mappedProperty, PersistentClass mappedEntity);
|
||||
|
@ -116,6 +114,7 @@ public abstract class AbstractEntityTuplizer implements EntityTuplizer {
|
|||
* Build an appropriate Instantiator for the given mapped entity.
|
||||
*
|
||||
* @param mappingInfo The mapping information regarding the mapped entity.
|
||||
*
|
||||
* @return An appropriate Instantiator instance.
|
||||
*/
|
||||
protected abstract Instantiator buildInstantiator(PersistentClass mappingInfo);
|
||||
|
@ -126,6 +125,7 @@ public abstract class AbstractEntityTuplizer implements EntityTuplizer {
|
|||
* @param mappingInfo The mapping information regarding the mapped entity.
|
||||
* @param idGetter The constructed Getter relating to the entity's id property.
|
||||
* @param idSetter The constructed Setter relating to the entity's id property.
|
||||
*
|
||||
* @return An appropriate ProxyFactory instance.
|
||||
*/
|
||||
protected abstract ProxyFactory buildProxyFactory(PersistentClass mappingInfo, Getter idGetter, Setter idSetter);
|
||||
|
@ -150,17 +150,17 @@ public abstract class AbstractEntityTuplizer implements EntityTuplizer {
|
|||
|
||||
propertySpan = entityMetamodel.getPropertySpan();
|
||||
|
||||
getters = new Getter[propertySpan];
|
||||
getters = new Getter[propertySpan];
|
||||
setters = new Setter[propertySpan];
|
||||
|
||||
Iterator itr = mappingInfo.getPropertyClosureIterator();
|
||||
boolean foundCustomAccessor=false;
|
||||
int i=0;
|
||||
boolean foundCustomAccessor = false;
|
||||
int i = 0;
|
||||
while ( itr.hasNext() ) {
|
||||
//TODO: redesign how PropertyAccessors are acquired...
|
||||
Property property = (Property) itr.next();
|
||||
getters[i] = buildPropertyGetter(property, mappingInfo);
|
||||
setters[i] = buildPropertySetter(property, mappingInfo);
|
||||
getters[i] = buildPropertyGetter( property, mappingInfo );
|
||||
setters[i] = buildPropertySetter( property, mappingInfo );
|
||||
if ( !property.isBasicPropertyAccessor() ) {
|
||||
foundCustomAccessor = true;
|
||||
}
|
||||
|
@ -168,11 +168,11 @@ public abstract class AbstractEntityTuplizer implements EntityTuplizer {
|
|||
}
|
||||
hasCustomAccessors = foundCustomAccessor;
|
||||
|
||||
instantiator = buildInstantiator( mappingInfo );
|
||||
instantiator = buildInstantiator( mappingInfo );
|
||||
|
||||
if ( entityMetamodel.isLazy() ) {
|
||||
proxyFactory = buildProxyFactory( mappingInfo, idGetter, idSetter );
|
||||
if (proxyFactory == null) {
|
||||
if ( proxyFactory == null ) {
|
||||
entityMetamodel.setLazy( false );
|
||||
}
|
||||
}
|
||||
|
@ -194,7 +194,8 @@ public abstract class AbstractEntityTuplizer implements EntityTuplizer {
|
|||
}
|
||||
}
|
||||
|
||||
/** Retreives the defined entity-name for the tuplized entity.
|
||||
/**
|
||||
* Retreives the defined entity-name for the tuplized entity.
|
||||
*
|
||||
* @return The entity-name.
|
||||
*/
|
||||
|
@ -228,7 +229,7 @@ public abstract class AbstractEntityTuplizer implements EntityTuplizer {
|
|||
}
|
||||
else {
|
||||
if ( idGetter == null ) {
|
||||
if (identifierMapperType==null) {
|
||||
if ( identifierMapperType == null ) {
|
||||
throw new HibernateException( "The class has no identifier property: " + getEntityName() );
|
||||
}
|
||||
else {
|
||||
|
@ -236,14 +237,14 @@ public abstract class AbstractEntityTuplizer implements EntityTuplizer {
|
|||
}
|
||||
}
|
||||
else {
|
||||
id = idGetter.get( entity );
|
||||
}
|
||||
}
|
||||
id = idGetter.get( entity );
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
return (Serializable) id;
|
||||
}
|
||||
catch ( ClassCastException cce ) {
|
||||
catch (ClassCastException cce) {
|
||||
StringBuilder msg = new StringBuilder( "Identifier classes must be serializable. " );
|
||||
if ( id != null ) {
|
||||
msg.append( id.getClass().getName() ).append( " is not serializable. " );
|
||||
|
@ -280,6 +281,7 @@ public abstract class AbstractEntityTuplizer implements EntityTuplizer {
|
|||
|
||||
private static interface MappedIdentifierValueMarshaller {
|
||||
public Object getIdentifier(Object entity, EntityMode entityMode, SessionImplementor session);
|
||||
|
||||
public void setIdentifier(Object entity, Serializable id, EntityMode entityMode, SessionImplementor session);
|
||||
}
|
||||
|
||||
|
@ -303,7 +305,7 @@ public abstract class AbstractEntityTuplizer implements EntityTuplizer {
|
|||
// the sizes being off is a much bigger problem that should have been caught already...
|
||||
for ( int i = 0; i < virtualIdComponent.getSubtypes().length; i++ ) {
|
||||
if ( virtualIdComponent.getSubtypes()[i].isEntityType()
|
||||
&& ! mappedIdClassComponentType.getSubtypes()[i].isEntityType() ) {
|
||||
&& !mappedIdClassComponentType.getSubtypes()[i].isEntityType() ) {
|
||||
wereAllEquivalent = false;
|
||||
break;
|
||||
}
|
||||
|
@ -311,14 +313,19 @@ public abstract class AbstractEntityTuplizer implements EntityTuplizer {
|
|||
|
||||
return wereAllEquivalent
|
||||
? new NormalMappedIdentifierValueMarshaller( virtualIdComponent, mappedIdClassComponentType )
|
||||
: new IncrediblySillyJpaMapsIdMappedIdentifierValueMarshaller( virtualIdComponent, mappedIdClassComponentType );
|
||||
: new IncrediblySillyJpaMapsIdMappedIdentifierValueMarshaller(
|
||||
virtualIdComponent,
|
||||
mappedIdClassComponentType
|
||||
);
|
||||
}
|
||||
|
||||
private static class NormalMappedIdentifierValueMarshaller implements MappedIdentifierValueMarshaller {
|
||||
private final ComponentType virtualIdComponent;
|
||||
private final ComponentType mappedIdentifierType;
|
||||
|
||||
private NormalMappedIdentifierValueMarshaller(ComponentType virtualIdComponent, ComponentType mappedIdentifierType) {
|
||||
private NormalMappedIdentifierValueMarshaller(
|
||||
ComponentType virtualIdComponent,
|
||||
ComponentType mappedIdentifierType) {
|
||||
this.virtualIdComponent = virtualIdComponent;
|
||||
this.mappedIdentifierType = mappedIdentifierType;
|
||||
}
|
||||
|
@ -341,11 +348,14 @@ public abstract class AbstractEntityTuplizer implements EntityTuplizer {
|
|||
}
|
||||
}
|
||||
|
||||
private static class IncrediblySillyJpaMapsIdMappedIdentifierValueMarshaller implements MappedIdentifierValueMarshaller {
|
||||
private static class IncrediblySillyJpaMapsIdMappedIdentifierValueMarshaller
|
||||
implements MappedIdentifierValueMarshaller {
|
||||
private final ComponentType virtualIdComponent;
|
||||
private final ComponentType mappedIdentifierType;
|
||||
|
||||
private IncrediblySillyJpaMapsIdMappedIdentifierValueMarshaller(ComponentType virtualIdComponent, ComponentType mappedIdentifierType) {
|
||||
private IncrediblySillyJpaMapsIdMappedIdentifierValueMarshaller(
|
||||
ComponentType virtualIdComponent,
|
||||
ComponentType mappedIdentifierType) {
|
||||
this.virtualIdComponent = virtualIdComponent;
|
||||
this.mappedIdentifierType = mappedIdentifierType;
|
||||
}
|
||||
|
@ -359,12 +369,12 @@ public abstract class AbstractEntityTuplizer implements EntityTuplizer {
|
|||
final Iterable<PersistEventListener> persistEventListeners = persistEventListeners( session );
|
||||
final PersistenceContext persistenceContext = session.getPersistenceContext();
|
||||
final int length = subTypes.length;
|
||||
for ( int i = 0 ; i < length; i++ ) {
|
||||
for ( int i = 0; i < length; i++ ) {
|
||||
if ( propertyValues[i] == null ) {
|
||||
throw new HibernateException( "No part of a composite identifier may be null" );
|
||||
}
|
||||
//JPA 2 @MapsId + @IdClass points to the pk of the entity
|
||||
if ( subTypes[i].isAssociationType() && ! copierSubTypes[i].isAssociationType() ) {
|
||||
if ( subTypes[i].isAssociationType() && !copierSubTypes[i].isAssociationType() ) {
|
||||
// we need a session to handle this use case
|
||||
if ( session == null ) {
|
||||
throw new AssertionError(
|
||||
|
@ -382,7 +392,11 @@ public abstract class AbstractEntityTuplizer implements EntityTuplizer {
|
|||
}
|
||||
else {
|
||||
LOG.debug( "Performing implicit derived identity cascade" );
|
||||
final PersistEvent event = new PersistEvent( null, propertyValues[i], (EventSource) session );
|
||||
final PersistEvent event = new PersistEvent(
|
||||
null,
|
||||
propertyValues[i],
|
||||
(EventSource) session
|
||||
);
|
||||
for ( PersistEventListener listener : persistEventListeners ) {
|
||||
listener.onPersist( event );
|
||||
}
|
||||
|
@ -405,12 +419,12 @@ public abstract class AbstractEntityTuplizer implements EntityTuplizer {
|
|||
@Override
|
||||
public void setIdentifier(Object entity, Serializable id, EntityMode entityMode, SessionImplementor session) {
|
||||
final Object[] extractedValues = mappedIdentifierType.getPropertyValues( id, entityMode );
|
||||
final Object[] injectionValues = new Object[ extractedValues.length ];
|
||||
final Object[] injectionValues = new Object[extractedValues.length];
|
||||
final PersistenceContext persistenceContext = session.getPersistenceContext();
|
||||
for ( int i = 0; i < virtualIdComponent.getSubtypes().length; i++ ) {
|
||||
final Type virtualPropertyType = virtualIdComponent.getSubtypes()[i];
|
||||
final Type idClassPropertyType = mappedIdentifierType.getSubtypes()[i];
|
||||
if ( virtualPropertyType.isEntityType() && ! idClassPropertyType.isEntityType() ) {
|
||||
if ( virtualPropertyType.isEntityType() && !idClassPropertyType.isEntityType() ) {
|
||||
if ( session == null ) {
|
||||
throw new AssertionError(
|
||||
"Deprecated version of getIdentifier (no session) was used but session was required"
|
||||
|
@ -459,6 +473,7 @@ public abstract class AbstractEntityTuplizer implements EntityTuplizer {
|
|||
Serializable currentId,
|
||||
Object currentVersion,
|
||||
SessionImplementor session) {
|
||||
//noinspection StatementWithEmptyBody
|
||||
if ( entityMetamodel.getIdentifierProperty().getIdentifierGenerator() instanceof Assigned ) {
|
||||
}
|
||||
else {
|
||||
|
@ -471,8 +486,8 @@ public abstract class AbstractEntityTuplizer implements EntityTuplizer {
|
|||
VersionProperty versionProperty = entityMetamodel.getVersionProperty();
|
||||
if ( entityMetamodel.isVersioned() ) {
|
||||
setPropertyValue(
|
||||
entity,
|
||||
entityMetamodel.getVersionPropertyIndex(),
|
||||
entity,
|
||||
entityMetamodel.getVersionPropertyIndex(),
|
||||
versionProperty.getUnsavedValue().getDefaultValue( currentVersion )
|
||||
);
|
||||
}
|
||||
|
@ -481,8 +496,10 @@ public abstract class AbstractEntityTuplizer implements EntityTuplizer {
|
|||
|
||||
@Override
|
||||
public Object getVersion(Object entity) throws HibernateException {
|
||||
if ( !entityMetamodel.isVersioned() ) return null;
|
||||
return getters[ entityMetamodel.getVersionPropertyIndex() ].get( entity );
|
||||
if ( !entityMetamodel.isVersioned() ) {
|
||||
return null;
|
||||
}
|
||||
return getters[entityMetamodel.getVersionPropertyIndex()].get( entity );
|
||||
}
|
||||
|
||||
protected boolean shouldGetAllProperties(Object entity) {
|
||||
|
@ -526,18 +543,18 @@ public abstract class AbstractEntityTuplizer implements EntityTuplizer {
|
|||
|
||||
@Override
|
||||
public Object getPropertyValue(Object entity, String propertyPath) throws HibernateException {
|
||||
int loc = propertyPath.indexOf('.');
|
||||
int loc = propertyPath.indexOf( '.' );
|
||||
String basePropertyName = loc > 0
|
||||
? propertyPath.substring( 0, loc )
|
||||
: propertyPath;
|
||||
//final int index = entityMetamodel.getPropertyIndexOrNull( basePropertyName );
|
||||
Integer index = entityMetamodel.getPropertyIndexOrNull( basePropertyName );
|
||||
if (index == null) {
|
||||
if ( index == null ) {
|
||||
propertyPath = PropertyPath.IDENTIFIER_MAPPER_PROPERTY + "." + propertyPath;
|
||||
loc = propertyPath.indexOf('.');
|
||||
loc = propertyPath.indexOf( '.' );
|
||||
basePropertyName = loc > 0
|
||||
? propertyPath.substring( 0, loc )
|
||||
: propertyPath;
|
||||
? propertyPath.substring( 0, loc )
|
||||
: propertyPath;
|
||||
}
|
||||
index = entityMetamodel.getPropertyIndexOrNull( basePropertyName );
|
||||
final Object baseValue = getPropertyValue( entity, index );
|
||||
|
@ -548,7 +565,7 @@ public abstract class AbstractEntityTuplizer implements EntityTuplizer {
|
|||
return getComponentValue(
|
||||
(ComponentType) entityMetamodel.getPropertyTypes()[index],
|
||||
baseValue,
|
||||
propertyPath.substring(loc+1)
|
||||
propertyPath.substring( loc + 1 )
|
||||
);
|
||||
}
|
||||
else {
|
||||
|
@ -562,6 +579,7 @@ public abstract class AbstractEntityTuplizer implements EntityTuplizer {
|
|||
* @param type The component property types.
|
||||
* @param component The component instance itself.
|
||||
* @param propertyPath The property path for the property to be extracted.
|
||||
*
|
||||
* @return The property value extracted.
|
||||
*/
|
||||
protected Object getComponentValue(ComponentType type, Object component, String propertyPath) {
|
||||
|
@ -578,7 +596,7 @@ public abstract class AbstractEntityTuplizer implements EntityTuplizer {
|
|||
return getComponentValue(
|
||||
(ComponentType) type.getSubtypes()[index],
|
||||
baseValue,
|
||||
propertyPath.substring(loc+1)
|
||||
propertyPath.substring( loc + 1 )
|
||||
);
|
||||
}
|
||||
else {
|
||||
|
@ -589,7 +607,7 @@ public abstract class AbstractEntityTuplizer implements EntityTuplizer {
|
|||
|
||||
private int findSubPropertyIndex(ComponentType type, String subPropertyName) {
|
||||
final String[] propertyNames = type.getPropertyNames();
|
||||
for ( int index = 0; index<propertyNames.length; index++ ) {
|
||||
for ( int index = 0; index < propertyNames.length; index++ ) {
|
||||
if ( subPropertyName.equals( propertyNames[index] ) ) {
|
||||
return index;
|
||||
}
|
||||
|
@ -615,7 +633,7 @@ public abstract class AbstractEntityTuplizer implements EntityTuplizer {
|
|||
|
||||
@Override
|
||||
public void setPropertyValue(Object entity, String propertyName, Object value) throws HibernateException {
|
||||
setters[ entityMetamodel.getPropertyIndex( propertyName ) ].set( entity, value, getFactory() );
|
||||
setters[entityMetamodel.getPropertyIndex( propertyName )].set( entity, value, getFactory() );
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -640,7 +658,8 @@ public abstract class AbstractEntityTuplizer implements EntityTuplizer {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void afterInitialize(Object entity, boolean lazyPropertiesAreUnfetched, SessionImplementor session) {}
|
||||
public void afterInitialize(Object entity, boolean lazyPropertiesAreUnfetched, SessionImplementor session) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasUninitializedLazyProperties(Object entity) {
|
||||
|
@ -650,7 +669,7 @@ public abstract class AbstractEntityTuplizer implements EntityTuplizer {
|
|||
|
||||
@Override
|
||||
public final boolean isInstance(Object object) {
|
||||
return getInstantiator().isInstance( object );
|
||||
return getInstantiator().isInstance( object );
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -660,7 +679,7 @@ public abstract class AbstractEntityTuplizer implements EntityTuplizer {
|
|||
|
||||
@Override
|
||||
public final Object createProxy(Serializable id, SessionImplementor session)
|
||||
throws HibernateException {
|
||||
throws HibernateException {
|
||||
return getProxyFactory().getProxy( id, session );
|
||||
}
|
||||
|
||||
|
@ -686,7 +705,7 @@ public abstract class AbstractEntityTuplizer implements EntityTuplizer {
|
|||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
public String toString() {
|
||||
return getClass().getName() + '(' + getEntityMetamodel().getName() + ')';
|
||||
}
|
||||
|
||||
|
|
|
@ -52,7 +52,7 @@ public class DynamicMapEntityTuplizer extends AbstractEntityTuplizer {
|
|||
private static final CoreMessageLogger LOG = CoreLogging.messageLogger( DynamicMapEntityTuplizer.class );
|
||||
|
||||
DynamicMapEntityTuplizer(EntityMetamodel entityMetamodel, PersistentClass mappedEntity) {
|
||||
super(entityMetamodel, mappedEntity);
|
||||
super( entityMetamodel, mappedEntity );
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -62,7 +62,7 @@ public class DynamicMapEntityTuplizer extends AbstractEntityTuplizer {
|
|||
|
||||
private PropertyAccessor buildPropertyAccessor(Property mappedProperty) {
|
||||
if ( mappedProperty.isBackRef() ) {
|
||||
return mappedProperty.getPropertyAccessor(null);
|
||||
return mappedProperty.getPropertyAccessor( null );
|
||||
}
|
||||
else {
|
||||
return PropertyAccessorFactory.getDynamicMapPropertyAccessor();
|
||||
|
@ -70,22 +70,22 @@ public class DynamicMapEntityTuplizer extends AbstractEntityTuplizer {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected Getter buildPropertyGetter(Property mappedProperty, PersistentClass mappedEntity) {
|
||||
return buildPropertyAccessor(mappedProperty).getGetter( null, mappedProperty.getName() );
|
||||
protected Getter buildPropertyGetter(Property mappedProperty, PersistentClass mappedEntity) {
|
||||
return buildPropertyAccessor( mappedProperty ).getGetter( null, mappedProperty.getName() );
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Setter buildPropertySetter(Property mappedProperty, PersistentClass mappedEntity) {
|
||||
return buildPropertyAccessor(mappedProperty).getSetter( null, mappedProperty.getName() );
|
||||
protected Setter buildPropertySetter(Property mappedProperty, PersistentClass mappedEntity) {
|
||||
return buildPropertyAccessor( mappedProperty ).getSetter( null, mappedProperty.getName() );
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Instantiator buildInstantiator(PersistentClass mappingInfo) {
|
||||
return new DynamicMapInstantiator( mappingInfo );
|
||||
protected Instantiator buildInstantiator(PersistentClass mappingInfo) {
|
||||
return new DynamicMapInstantiator( mappingInfo );
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ProxyFactory buildProxyFactory(PersistentClass mappingInfo, Getter idGetter, Setter idSetter) {
|
||||
protected ProxyFactory buildProxyFactory(PersistentClass mappingInfo, Getter idGetter, Setter idSetter) {
|
||||
|
||||
ProxyFactory pf = new MapProxyFactory();
|
||||
try {
|
||||
|
@ -99,7 +99,7 @@ public class DynamicMapEntityTuplizer extends AbstractEntityTuplizer {
|
|||
null
|
||||
);
|
||||
}
|
||||
catch ( HibernateException he ) {
|
||||
catch (HibernateException he) {
|
||||
LOG.unableToCreateProxyFactory( getEntityName(), he );
|
||||
pf = null;
|
||||
}
|
||||
|
@ -123,16 +123,16 @@ public class DynamicMapEntityTuplizer extends AbstractEntityTuplizer {
|
|||
|
||||
@Override
|
||||
public EntityNameResolver[] getEntityNameResolvers() {
|
||||
return new EntityNameResolver[] { BasicEntityNameResolver.INSTANCE };
|
||||
return new EntityNameResolver[] {BasicEntityNameResolver.INSTANCE};
|
||||
}
|
||||
|
||||
@Override
|
||||
public String determineConcreteSubclassEntityName(Object entityInstance, SessionFactoryImplementor factory) {
|
||||
return extractEmbeddedEntityName( ( Map ) entityInstance );
|
||||
return extractEmbeddedEntityName( (Map) entityInstance );
|
||||
}
|
||||
|
||||
public static String extractEmbeddedEntityName(Map entity) {
|
||||
return ( String ) entity.get( DynamicMapInstantiator.KEY );
|
||||
return (String) entity.get( DynamicMapInstantiator.KEY );
|
||||
}
|
||||
|
||||
public static class BasicEntityNameResolver implements EntityNameResolver {
|
||||
|
@ -140,10 +140,10 @@ public class DynamicMapEntityTuplizer extends AbstractEntityTuplizer {
|
|||
|
||||
@Override
|
||||
public String resolveEntityName(Object entity) {
|
||||
if ( ! Map.class.isInstance( entity ) ) {
|
||||
if ( !Map.class.isInstance( entity ) ) {
|
||||
return null;
|
||||
}
|
||||
final String entityName = extractEmbeddedEntityName( ( Map ) entity );
|
||||
final String entityName = extractEmbeddedEntityName( (Map) entity );
|
||||
if ( entityName == null ) {
|
||||
throw new HibernateException( "Could not determine type of dynamic map entity" );
|
||||
}
|
||||
|
@ -151,12 +151,12 @@ public class DynamicMapEntityTuplizer extends AbstractEntityTuplizer {
|
|||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
public boolean equals(Object obj) {
|
||||
return getClass().equals( obj.getClass() );
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
public int hashCode() {
|
||||
return getClass().hashCode();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,7 +25,6 @@ package org.hibernate.tuple.entity;
|
|||
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
|
@ -345,13 +344,13 @@ public class EntityMetamodel implements Serializable {
|
|||
if ( persistentClass.isAbstract() == null ) {
|
||||
// legacy behavior (with no abstract attribute specified)
|
||||
isAbstract = persistentClass.hasPojoRepresentation() &&
|
||||
ReflectHelper.isAbstractClass( persistentClass.getMappedClass() );
|
||||
ReflectHelper.isAbstractClass( persistentClass.getMappedClass() );
|
||||
}
|
||||
else {
|
||||
isAbstract = persistentClass.isAbstract().booleanValue();
|
||||
if ( !isAbstract && persistentClass.hasPojoRepresentation() &&
|
||||
ReflectHelper.isAbstractClass( persistentClass.getMappedClass() ) ) {
|
||||
LOG.entityMappedAsNonAbstract(name);
|
||||
ReflectHelper.isAbstractClass( persistentClass.getMappedClass() ) ) {
|
||||
LOG.entityMappedAsNonAbstract(name);
|
||||
}
|
||||
}
|
||||
selectBeforeUpdate = persistentClass.hasSelectBeforeUpdate();
|
||||
|
@ -1033,7 +1032,7 @@ public class EntityMetamodel implements Serializable {
|
|||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
public String toString() {
|
||||
return "EntityMetamodel(" + name + ':' + ArrayHelper.toString(properties) + ')';
|
||||
}
|
||||
|
||||
|
|
|
@ -67,7 +67,7 @@ public class PojoEntityTuplizer extends AbstractEntityTuplizer {
|
|||
private final Class mappedClass;
|
||||
private final Class proxyInterface;
|
||||
private final boolean lifecycleImplementor;
|
||||
private final Set lazyPropertyNames = new HashSet();
|
||||
private final Set<String> lazyPropertyNames = new HashSet<String>();
|
||||
private final ReflectionOptimizer optimizer;
|
||||
private final boolean isInstrumented;
|
||||
|
||||
|
@ -100,7 +100,12 @@ public class PojoEntityTuplizer extends AbstractEntityTuplizer {
|
|||
}
|
||||
else {
|
||||
// todo : YUCK!!!
|
||||
optimizer = Environment.getBytecodeProvider().getReflectionOptimizer( mappedClass, getterNames, setterNames, propTypes );
|
||||
optimizer = Environment.getBytecodeProvider().getReflectionOptimizer(
|
||||
mappedClass,
|
||||
getterNames,
|
||||
setterNames,
|
||||
propTypes
|
||||
);
|
||||
// optimizer = getFactory().getSettings().getBytecodeProvider().getReflectionOptimizer(
|
||||
// mappedClass, getterNames, setterNames, propTypes
|
||||
// );
|
||||
|
@ -108,9 +113,9 @@ public class PojoEntityTuplizer extends AbstractEntityTuplizer {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected ProxyFactory buildProxyFactory(PersistentClass persistentClass, Getter idGetter, Setter idSetter) {
|
||||
protected ProxyFactory buildProxyFactory(PersistentClass persistentClass, Getter idGetter, Setter idSetter) {
|
||||
// determine the id getter and setter methods from the proxy interface (if any)
|
||||
// determine all interfaces needed by the resulting proxy
|
||||
// determine all interfaces needed by the resulting proxy
|
||||
|
||||
/*
|
||||
* We need to preserve the order of the interfaces they were put into the set, since javassist will choose the
|
||||
|
@ -123,7 +128,7 @@ public class PojoEntityTuplizer extends AbstractEntityTuplizer {
|
|||
Class mappedClass = persistentClass.getMappedClass();
|
||||
Class proxyInterface = persistentClass.getProxyInterface();
|
||||
|
||||
if ( proxyInterface!=null && !mappedClass.equals( proxyInterface ) ) {
|
||||
if ( proxyInterface != null && !mappedClass.equals( proxyInterface ) ) {
|
||||
if ( !proxyInterface.isInterface() ) {
|
||||
throw new MappingException(
|
||||
"proxy must be either an interface, or the class itself: " + getEntityName()
|
||||
|
@ -136,12 +141,12 @@ public class PojoEntityTuplizer extends AbstractEntityTuplizer {
|
|||
proxyInterfaces.add( mappedClass );
|
||||
}
|
||||
|
||||
Iterator subclasses = persistentClass.getSubclassIterator();
|
||||
Iterator<Subclass> subclasses = persistentClass.getSubclassIterator();
|
||||
while ( subclasses.hasNext() ) {
|
||||
final Subclass subclass = ( Subclass ) subclasses.next();
|
||||
final Subclass subclass = subclasses.next();
|
||||
final Class subclassProxy = subclass.getProxyInterface();
|
||||
final Class subclassClass = subclass.getMappedClass();
|
||||
if ( subclassProxy!=null && !subclassClass.equals( subclassProxy ) ) {
|
||||
if ( subclassProxy != null && !subclassClass.equals( subclassProxy ) ) {
|
||||
if ( !subclassProxy.isInterface() ) {
|
||||
throw new MappingException(
|
||||
"proxy must be either an interface, or the class itself: " + subclass.getEntityName()
|
||||
|
@ -157,25 +162,25 @@ public class PojoEntityTuplizer extends AbstractEntityTuplizer {
|
|||
Class clazz = persistentClass.getMappedClass();
|
||||
while ( properties.hasNext() ) {
|
||||
Property property = (Property) properties.next();
|
||||
Method method = property.getGetter(clazz).getMethod();
|
||||
Method method = property.getGetter( clazz ).getMethod();
|
||||
if ( method != null && Modifier.isFinal( method.getModifiers() ) ) {
|
||||
LOG.gettersOfLazyClassesCannotBeFinal(persistentClass.getEntityName(), property.getName());
|
||||
LOG.gettersOfLazyClassesCannotBeFinal( persistentClass.getEntityName(), property.getName() );
|
||||
}
|
||||
method = property.getSetter(clazz).getMethod();
|
||||
if ( method != null && Modifier.isFinal( method.getModifiers() ) ) {
|
||||
LOG.settersOfLazyClassesCannotBeFinal(persistentClass.getEntityName(), property.getName());
|
||||
method = property.getSetter( clazz ).getMethod();
|
||||
if ( method != null && Modifier.isFinal( method.getModifiers() ) ) {
|
||||
LOG.settersOfLazyClassesCannotBeFinal( persistentClass.getEntityName(), property.getName() );
|
||||
}
|
||||
}
|
||||
|
||||
Method idGetterMethod = idGetter==null ? null : idGetter.getMethod();
|
||||
Method idSetterMethod = idSetter==null ? null : idSetter.getMethod();
|
||||
Method idGetterMethod = idGetter == null ? null : idGetter.getMethod();
|
||||
Method idSetterMethod = idSetter == null ? null : idSetter.getMethod();
|
||||
|
||||
Method proxyGetIdentifierMethod = idGetterMethod==null || proxyInterface==null ?
|
||||
Method proxyGetIdentifierMethod = idGetterMethod == null || proxyInterface == null ?
|
||||
null :
|
||||
ReflectHelper.getMethod(proxyInterface, idGetterMethod);
|
||||
Method proxySetIdentifierMethod = idSetterMethod==null || proxyInterface==null ?
|
||||
ReflectHelper.getMethod( proxyInterface, idGetterMethod );
|
||||
Method proxySetIdentifierMethod = idSetterMethod == null || proxyInterface == null ?
|
||||
null :
|
||||
ReflectHelper.getMethod(proxyInterface, idSetterMethod);
|
||||
ReflectHelper.getMethod( proxyInterface, idSetterMethod );
|
||||
|
||||
ProxyFactory pf = buildProxyFactoryInternal( persistentClass, idGetter, idSetter );
|
||||
try {
|
||||
|
@ -186,25 +191,28 @@ public class PojoEntityTuplizer extends AbstractEntityTuplizer {
|
|||
proxyGetIdentifierMethod,
|
||||
proxySetIdentifierMethod,
|
||||
persistentClass.hasEmbeddedIdentifier() ?
|
||||
(CompositeType) persistentClass.getIdentifier().getType() :
|
||||
null
|
||||
(CompositeType) persistentClass.getIdentifier().getType() :
|
||||
null
|
||||
);
|
||||
}
|
||||
catch ( HibernateException he ) {
|
||||
LOG.unableToCreateProxyFactory(getEntityName(), he);
|
||||
catch (HibernateException he) {
|
||||
LOG.unableToCreateProxyFactory( getEntityName(), he );
|
||||
pf = null;
|
||||
}
|
||||
return pf;
|
||||
}
|
||||
|
||||
protected ProxyFactory buildProxyFactoryInternal(PersistentClass persistentClass, Getter idGetter, Setter idSetter) {
|
||||
protected ProxyFactory buildProxyFactoryInternal(
|
||||
PersistentClass persistentClass,
|
||||
Getter idGetter,
|
||||
Setter idSetter) {
|
||||
// TODO : YUCK!!! fix after HHH-1907 is complete
|
||||
return Environment.getBytecodeProvider().getProxyFactoryFactory().buildProxyFactory();
|
||||
// return getFactory().getSettings().getBytecodeProvider().getProxyFactoryFactory().buildProxyFactory();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Instantiator buildInstantiator(PersistentClass persistentClass) {
|
||||
protected Instantiator buildInstantiator(PersistentClass persistentClass) {
|
||||
if ( optimizer == null ) {
|
||||
return new PojoInstantiator( persistentClass, null );
|
||||
}
|
||||
|
@ -214,7 +222,7 @@ public class PojoEntityTuplizer extends AbstractEntityTuplizer {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void setPropertyValues(Object entity, Object[] values) throws HibernateException {
|
||||
public void setPropertyValues(Object entity, Object[] values) throws HibernateException {
|
||||
if ( !getEntityMetamodel().hasLazyProperties() && optimizer != null && optimizer.getAccessOptimizer() != null ) {
|
||||
setPropertyValuesWithOptimizer( entity, values );
|
||||
}
|
||||
|
@ -224,7 +232,7 @@ public class PojoEntityTuplizer extends AbstractEntityTuplizer {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Object[] getPropertyValues(Object entity) throws HibernateException {
|
||||
public Object[] getPropertyValues(Object entity) throws HibernateException {
|
||||
if ( shouldGetAllProperties( entity ) && optimizer != null && optimizer.getAccessOptimizer() != null ) {
|
||||
return getPropertyValuesWithOptimizer( entity );
|
||||
}
|
||||
|
@ -234,7 +242,8 @@ public class PojoEntityTuplizer extends AbstractEntityTuplizer {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Object[] getPropertyValuesToInsert(Object entity, Map mergeMap, SessionImplementor session) throws HibernateException {
|
||||
public Object[] getPropertyValuesToInsert(Object entity, Map mergeMap, SessionImplementor session)
|
||||
throws HibernateException {
|
||||
if ( shouldGetAllProperties( entity ) && optimizer != null && optimizer.getAccessOptimizer() != null ) {
|
||||
return getPropertyValuesWithOptimizer( entity );
|
||||
}
|
||||
|
@ -262,17 +271,17 @@ public class PojoEntityTuplizer extends AbstractEntityTuplizer {
|
|||
}
|
||||
|
||||
@Override
|
||||
public boolean isLifecycleImplementor() {
|
||||
public boolean isLifecycleImplementor() {
|
||||
return lifecycleImplementor;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Getter buildPropertyGetter(Property mappedProperty, PersistentClass mappedEntity) {
|
||||
protected Getter buildPropertyGetter(Property mappedProperty, PersistentClass mappedEntity) {
|
||||
return mappedProperty.getGetter( mappedEntity.getMappedClass() );
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Setter buildPropertySetter(Property mappedProperty, PersistentClass mappedEntity) {
|
||||
protected Setter buildPropertySetter(Property mappedProperty, PersistentClass mappedEntity) {
|
||||
return mappedProperty.getSetter( mappedEntity.getMappedClass() );
|
||||
}
|
||||
|
||||
|
@ -281,26 +290,26 @@ public class PojoEntityTuplizer extends AbstractEntityTuplizer {
|
|||
return proxyInterface;
|
||||
}
|
||||
|
||||
//TODO: need to make the majority of this functionality into a top-level support class for custom impl support
|
||||
//TODO: need to make the majority of this functionality into a top-level support class for custom impl support
|
||||
|
||||
@Override
|
||||
public void afterInitialize(Object entity, boolean lazyPropertiesAreUnfetched, SessionImplementor session) {
|
||||
public void afterInitialize(Object entity, boolean lazyPropertiesAreUnfetched, SessionImplementor session) {
|
||||
if ( isInstrumented() ) {
|
||||
Set lazyProps = lazyPropertiesAreUnfetched && getEntityMetamodel().hasLazyProperties() ?
|
||||
Set<String> lazyProps = lazyPropertiesAreUnfetched && getEntityMetamodel().hasLazyProperties() ?
|
||||
lazyPropertyNames : null;
|
||||
//TODO: if we support multiple fetch groups, we would need
|
||||
// to clone the set of lazy properties!
|
||||
FieldInterceptionHelper.injectFieldInterceptor( entity, getEntityName(), lazyProps, session );
|
||||
|
||||
//also clear the fields that are marked as dirty in the dirtyness tracker
|
||||
if(entity instanceof org.hibernate.engine.spi.SelfDirtinessTracker) {
|
||||
((org.hibernate.engine.spi.SelfDirtinessTracker) entity).$$_hibernate_clearDirtyAttributes();
|
||||
}
|
||||
//also clear the fields that are marked as dirty in the dirtyness tracker
|
||||
if ( entity instanceof org.hibernate.engine.spi.SelfDirtinessTracker ) {
|
||||
( (org.hibernate.engine.spi.SelfDirtinessTracker) entity ).$$_hibernate_clearDirtyAttributes();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasUninitializedLazyProperties(Object entity) {
|
||||
public boolean hasUninitializedLazyProperties(Object entity) {
|
||||
if ( getEntityMetamodel().hasLazyProperties() ) {
|
||||
FieldInterceptor callback = FieldInterceptionHelper.extractFieldInterceptor( entity );
|
||||
return callback != null && !callback.isInitialized();
|
||||
|
|
|
@ -36,8 +36,7 @@ import org.hibernate.type.Type;
|
|||
* @author Steve Ebersole
|
||||
*/
|
||||
public class VersionProperty extends AbstractNonIdentifierAttribute {
|
||||
|
||||
private final VersionValue unsavedValue;
|
||||
private final VersionValue unsavedValue;
|
||||
|
||||
/**
|
||||
* Constructs VersionProperty instances.
|
||||
|
@ -59,12 +58,13 @@ public class VersionProperty extends AbstractNonIdentifierAttribute {
|
|||
int attributeNumber,
|
||||
String attributeName,
|
||||
Type attributeType,
|
||||
BaselineAttributeInformation attributeInformation, VersionValue unsavedValue) {
|
||||
BaselineAttributeInformation attributeInformation,
|
||||
VersionValue unsavedValue) {
|
||||
super( source, sessionFactory, attributeNumber, attributeName, attributeType, attributeInformation );
|
||||
this.unsavedValue = unsavedValue;
|
||||
}
|
||||
|
||||
public VersionValue getUnsavedValue() {
|
||||
return unsavedValue;
|
||||
}
|
||||
public VersionValue getUnsavedValue() {
|
||||
return unsavedValue;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -39,7 +39,6 @@ import org.hibernate.engine.jdbc.Size;
|
|||
import org.hibernate.engine.spi.Mapping;
|
||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||
import org.hibernate.engine.spi.SessionImplementor;
|
||||
import org.hibernate.internal.util.StringHelper;
|
||||
import org.hibernate.internal.util.collections.ArrayHelper;
|
||||
import org.hibernate.type.descriptor.WrapperOptions;
|
||||
import org.hibernate.type.descriptor.java.JavaTypeDescriptor;
|
||||
|
|
|
@ -26,16 +26,12 @@ package org.hibernate.type;
|
|||
import java.io.Serializable;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.internal.CoreLogging;
|
||||
import org.hibernate.internal.CoreMessageLogger;
|
||||
import org.hibernate.usertype.CompositeUserType;
|
||||
import org.hibernate.usertype.UserType;
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
/**
|
||||
* A registry of {@link BasicType} instances
|
||||
|
@ -43,10 +39,10 @@ import java.util.concurrent.ConcurrentHashMap;
|
|||
* @author Steve Ebersole
|
||||
*/
|
||||
public class BasicTypeRegistry implements Serializable {
|
||||
private static final CoreMessageLogger LOG = CoreLogging.messageLogger( BasicTypeRegistry.class );
|
||||
private static final CoreMessageLogger LOG = CoreLogging.messageLogger( BasicTypeRegistry.class );
|
||||
|
||||
// TODO : analyze these sizing params; unfortunately this seems to be the only way to give a "concurrencyLevel"
|
||||
private Map<String,BasicType> registry = new ConcurrentHashMap<String, BasicType>( 100, .75f, 1 );
|
||||
private Map<String, BasicType> registry = new ConcurrentHashMap<String, BasicType>( 100, .75f, 1 );
|
||||
private boolean locked;
|
||||
|
||||
public BasicTypeRegistry() {
|
||||
|
@ -124,7 +120,7 @@ public class BasicTypeRegistry implements Serializable {
|
|||
*
|
||||
* @param registeredTypes The type map to copy over
|
||||
*/
|
||||
@SuppressWarnings({ "UnusedDeclaration" })
|
||||
@SuppressWarnings({"UnusedDeclaration"})
|
||||
private BasicTypeRegistry(Map<String, BasicType> registeredTypes) {
|
||||
registry.putAll( registeredTypes );
|
||||
locked = true;
|
||||
|
@ -145,13 +141,13 @@ public class BasicTypeRegistry implements Serializable {
|
|||
|
||||
for ( String key : type.getRegistrationKeys() ) {
|
||||
// be safe...
|
||||
if (key == null) {
|
||||
if ( key == null ) {
|
||||
continue;
|
||||
}
|
||||
LOG.debugf("Adding type registration %s -> %s", key, type);
|
||||
LOG.debugf( "Adding type registration %s -> %s", key, type );
|
||||
final Type old = registry.put( key, type );
|
||||
if (old != null && old != type) {
|
||||
LOG.typeRegistrationOverridesPrevious(key, old);
|
||||
if ( old != null && old != type ) {
|
||||
LOG.typeRegistrationOverridesPrevious( key, old );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,13 +24,12 @@
|
|||
package org.hibernate.type;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.sql.Types;
|
||||
|
||||
import org.hibernate.type.descriptor.java.BigDecimalTypeDescriptor;
|
||||
import org.hibernate.type.descriptor.sql.NumericTypeDescriptor;
|
||||
|
||||
/**
|
||||
* A type that maps between a {@link Types#NUMERIC NUMERIC} and {@link BigDecimal}.
|
||||
* A type that maps between a {@link java.sql.Types#NUMERIC NUMERIC} and {@link BigDecimal}.
|
||||
*
|
||||
* @author Gavin King
|
||||
* @author Steve Ebersole
|
||||
|
|
|
@ -24,14 +24,13 @@
|
|||
package org.hibernate.type;
|
||||
|
||||
import java.math.BigInteger;
|
||||
import java.sql.Types;
|
||||
|
||||
import org.hibernate.dialect.Dialect;
|
||||
import org.hibernate.type.descriptor.java.BigIntegerTypeDescriptor;
|
||||
import org.hibernate.type.descriptor.sql.BigIntTypeDescriptor;
|
||||
|
||||
/**
|
||||
* A type that maps between a {@link Types#NUMERIC NUMERIC} and {@link BigInteger}.
|
||||
* A type that maps between a {@link java.sql.Types#NUMERIC NUMERIC} and {@link BigInteger}.
|
||||
*
|
||||
* @author Gavin King
|
||||
* @author Steve Ebersole
|
||||
|
|
|
@ -37,13 +37,13 @@ import org.hibernate.FetchMode;
|
|||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.MappingException;
|
||||
import org.hibernate.PropertyNotFoundException;
|
||||
import org.hibernate.engine.jdbc.Size;
|
||||
import org.hibernate.engine.spi.CascadeStyle;
|
||||
import org.hibernate.engine.spi.Mapping;
|
||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||
import org.hibernate.engine.spi.SessionImplementor;
|
||||
import org.hibernate.internal.util.StringHelper;
|
||||
import org.hibernate.internal.util.collections.ArrayHelper;
|
||||
import org.hibernate.engine.jdbc.Size;
|
||||
import org.hibernate.tuple.StandardProperty;
|
||||
import org.hibernate.tuple.component.ComponentMetamodel;
|
||||
import org.hibernate.tuple.component.ComponentTuplizer;
|
||||
|
@ -76,11 +76,11 @@ public class ComponentType extends AbstractType implements CompositeType, Proced
|
|||
// for now, just "re-flatten" the metamodel since this is temporary stuff anyway (HHH-1907)
|
||||
this.isKey = metamodel.isKey();
|
||||
this.propertySpan = metamodel.getPropertySpan();
|
||||
this.propertyNames = new String[ propertySpan ];
|
||||
this.propertyTypes = new Type[ propertySpan ];
|
||||
this.propertyNullability = new boolean[ propertySpan ];
|
||||
this.cascade = new CascadeStyle[ propertySpan ];
|
||||
this.joinedFetch = new FetchMode[ propertySpan ];
|
||||
this.propertyNames = new String[propertySpan];
|
||||
this.propertyTypes = new Type[propertySpan];
|
||||
this.propertyNullability = new boolean[propertySpan];
|
||||
this.cascade = new CascadeStyle[propertySpan];
|
||||
this.joinedFetch = new FetchMode[propertySpan];
|
||||
|
||||
for ( int i = 0; i < propertySpan; i++ ) {
|
||||
StandardProperty prop = metamodel.getProperty( i );
|
||||
|
@ -89,7 +89,7 @@ public class ComponentType extends AbstractType implements CompositeType, Proced
|
|||
this.propertyNullability[i] = prop.isNullable();
|
||||
this.cascade[i] = prop.getCascadeStyle();
|
||||
this.joinedFetch[i] = prop.getFetchMode();
|
||||
if (!prop.isNullable()) {
|
||||
if ( !prop.isNullable() ) {
|
||||
hasNotNullProperty = true;
|
||||
}
|
||||
}
|
||||
|
@ -109,6 +109,7 @@ public class ComponentType extends AbstractType implements CompositeType, Proced
|
|||
public ComponentTuplizer getComponentTuplizer() {
|
||||
return componentTuplizer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getColumnSpan(Mapping mapping) throws MappingException {
|
||||
int span = 0;
|
||||
|
@ -117,6 +118,7 @@ public class ComponentType extends AbstractType implements CompositeType, Proced
|
|||
}
|
||||
return span;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int[] sqlTypes(Mapping mapping) throws MappingException {
|
||||
//Not called at runtime so doesn't matter if its slow :)
|
||||
|
@ -134,7 +136,7 @@ public class ComponentType extends AbstractType implements CompositeType, Proced
|
|||
@Override
|
||||
public Size[] dictatedSizes(Mapping mapping) throws MappingException {
|
||||
//Not called at runtime so doesn't matter if its slow :)
|
||||
final Size[] sizes = new Size[ getColumnSpan( mapping ) ];
|
||||
final Size[] sizes = new Size[getColumnSpan( mapping )];
|
||||
int soFar = 0;
|
||||
for ( Type propertyType : propertyTypes ) {
|
||||
final Size[] propertySizes = propertyType.dictatedSizes( mapping );
|
||||
|
@ -147,7 +149,7 @@ public class ComponentType extends AbstractType implements CompositeType, Proced
|
|||
@Override
|
||||
public Size[] defaultSizes(Mapping mapping) throws MappingException {
|
||||
//Not called at runtime so doesn't matter if its slow :)
|
||||
final Size[] sizes = new Size[ getColumnSpan( mapping ) ];
|
||||
final Size[] sizes = new Size[getColumnSpan( mapping )];
|
||||
int soFar = 0;
|
||||
for ( Type propertyType : propertyTypes ) {
|
||||
final Size[] propertySizes = propertyType.defaultSizes( mapping );
|
||||
|
@ -159,7 +161,7 @@ public class ComponentType extends AbstractType implements CompositeType, Proced
|
|||
|
||||
|
||||
@Override
|
||||
public final boolean isComponentType() {
|
||||
public final boolean isComponentType() {
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -168,7 +170,7 @@ public class ComponentType extends AbstractType implements CompositeType, Proced
|
|||
}
|
||||
|
||||
@Override
|
||||
public boolean isSame(Object x, Object y) throws HibernateException {
|
||||
public boolean isSame(Object x, Object y) throws HibernateException {
|
||||
if ( x == y ) {
|
||||
return true;
|
||||
}
|
||||
|
@ -202,7 +204,8 @@ public class ComponentType extends AbstractType implements CompositeType, Proced
|
|||
}
|
||||
|
||||
@Override
|
||||
public boolean isEqual(final Object x, final Object y, final SessionFactoryImplementor factory) throws HibernateException {
|
||||
public boolean isEqual(final Object x, final Object y, final SessionFactoryImplementor factory)
|
||||
throws HibernateException {
|
||||
if ( x == y ) {
|
||||
return true;
|
||||
}
|
||||
|
@ -277,7 +280,8 @@ public class ComponentType extends AbstractType implements CompositeType, Proced
|
|||
return false;
|
||||
}
|
||||
|
||||
public boolean isDirty(final Object x, final Object y, final boolean[] checkable, final SessionImplementor session) throws HibernateException {
|
||||
public boolean isDirty(final Object x, final Object y, final boolean[] checkable, final SessionImplementor session)
|
||||
throws HibernateException {
|
||||
if ( x == y ) {
|
||||
return false;
|
||||
}
|
||||
|
@ -289,7 +293,7 @@ public class ComponentType extends AbstractType implements CompositeType, Proced
|
|||
int len = propertyTypes[i].getColumnSpan( session.getFactory() );
|
||||
if ( len <= 1 ) {
|
||||
final boolean dirty = ( len == 0 || checkable[loc] ) &&
|
||||
propertyTypes[i].isDirty( getPropertyValue( x, i ), getPropertyValue( y, i ), session );
|
||||
propertyTypes[i].isDirty( getPropertyValue( x, i ), getPropertyValue( y, i ), session );
|
||||
if ( dirty ) {
|
||||
return true;
|
||||
}
|
||||
|
@ -297,7 +301,12 @@ public class ComponentType extends AbstractType implements CompositeType, Proced
|
|||
else {
|
||||
boolean[] subcheckable = new boolean[len];
|
||||
System.arraycopy( checkable, loc, subcheckable, 0, len );
|
||||
final boolean dirty = propertyTypes[i].isDirty( getPropertyValue( x, i ), getPropertyValue( y, i ), subcheckable, session );
|
||||
final boolean dirty = propertyTypes[i].isDirty(
|
||||
getPropertyValue( x, i ),
|
||||
getPropertyValue( y, i ),
|
||||
subcheckable,
|
||||
session
|
||||
);
|
||||
if ( dirty ) {
|
||||
return true;
|
||||
}
|
||||
|
@ -308,14 +317,18 @@ public class ComponentType extends AbstractType implements CompositeType, Proced
|
|||
}
|
||||
|
||||
@Override
|
||||
public boolean isModified(final Object old, final Object current, final boolean[] checkable, final SessionImplementor session) throws HibernateException {
|
||||
public boolean isModified(
|
||||
final Object old,
|
||||
final Object current,
|
||||
final boolean[] checkable,
|
||||
final SessionImplementor session) throws HibernateException {
|
||||
if ( current == null ) {
|
||||
return old != null;
|
||||
}
|
||||
if ( old == null ) {
|
||||
return true;
|
||||
}
|
||||
Object[] oldValues = ( Object[] ) old;
|
||||
Object[] oldValues = (Object[]) old;
|
||||
int loc = 0;
|
||||
for ( int i = 0; i < propertySpan; i++ ) {
|
||||
int len = propertyTypes[i].getColumnSpan( session.getFactory() );
|
||||
|
@ -329,11 +342,13 @@ public class ComponentType extends AbstractType implements CompositeType, Proced
|
|||
return false;
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object nullSafeGet(ResultSet rs, String[] names, SessionImplementor session, Object owner)
|
||||
throws HibernateException, SQLException {
|
||||
return resolve( hydrate( rs, names, session, owner ), session, owner );
|
||||
}
|
||||
|
||||
@Override
|
||||
public void nullSafeSet(PreparedStatement st, Object value, int begin, SessionImplementor session)
|
||||
throws HibernateException, SQLException {
|
||||
|
@ -345,6 +360,7 @@ public class ComponentType extends AbstractType implements CompositeType, Proced
|
|||
begin += propertyTypes[i].getColumnSpan( session.getFactory() );
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void nullSafeSet(
|
||||
PreparedStatement st,
|
||||
|
@ -359,6 +375,7 @@ public class ComponentType extends AbstractType implements CompositeType, Proced
|
|||
int loc = 0;
|
||||
for ( int i = 0; i < propertySpan; i++ ) {
|
||||
int len = propertyTypes[i].getColumnSpan( session.getFactory() );
|
||||
//noinspection StatementWithEmptyBody
|
||||
if ( len == 0 ) {
|
||||
//noop
|
||||
}
|
||||
|
@ -386,17 +403,20 @@ public class ComponentType extends AbstractType implements CompositeType, Proced
|
|||
return getPropertyValues( value, entityMode );
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object nullSafeGet(ResultSet rs, String name, SessionImplementor session, Object owner)
|
||||
throws HibernateException, SQLException {
|
||||
|
||||
return nullSafeGet( rs, new String[] {name}, session, owner );
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getPropertyValue(Object component, int i, SessionImplementor session)
|
||||
throws HibernateException {
|
||||
return getPropertyValue( component, i );
|
||||
}
|
||||
|
||||
public Object getPropertyValue(Object component, int i, EntityMode entityMode)
|
||||
throws HibernateException {
|
||||
return getPropertyValue( component, i );
|
||||
|
@ -409,8 +429,9 @@ public class ComponentType extends AbstractType implements CompositeType, Proced
|
|||
// Object[] (ex: QueryKey hash codes for cached queries).
|
||||
// It's easiest to just check for the condition here prior to
|
||||
// trying reflection.
|
||||
return (( Object[] ) component)[i];
|
||||
} else {
|
||||
return ( (Object[]) component )[i];
|
||||
}
|
||||
else {
|
||||
return componentTuplizer.getPropertyValue( component, i );
|
||||
}
|
||||
}
|
||||
|
@ -420,6 +441,7 @@ public class ComponentType extends AbstractType implements CompositeType, Proced
|
|||
throws HibernateException {
|
||||
return getPropertyValues( component, entityMode );
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object[] getPropertyValues(Object component, EntityMode entityMode)
|
||||
throws HibernateException {
|
||||
|
@ -428,24 +450,29 @@ public class ComponentType extends AbstractType implements CompositeType, Proced
|
|||
// Object[] (ex: QueryKey hash codes for cached queries).
|
||||
// It's easiest to just check for the condition here prior to
|
||||
// trying reflection.
|
||||
return ( Object[] ) component;
|
||||
} else {
|
||||
return (Object[]) component;
|
||||
}
|
||||
else {
|
||||
return componentTuplizer.getPropertyValues( component );
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setPropertyValues(Object component, Object[] values, EntityMode entityMode)
|
||||
throws HibernateException {
|
||||
componentTuplizer.setPropertyValues( component, values );
|
||||
}
|
||||
|
||||
@Override
|
||||
public Type[] getSubtypes() {
|
||||
return propertyTypes;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "component" + ArrayHelper.toString( propertyNames );
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toLoggableString(Object value, SessionFactoryImplementor factory)
|
||||
throws HibernateException {
|
||||
|
@ -456,17 +483,19 @@ public class ComponentType extends AbstractType implements CompositeType, Proced
|
|||
if ( entityMode == null ) {
|
||||
throw new ClassCastException( value.getClass().getName() );
|
||||
}
|
||||
Map<String,String> result = new HashMap<String, String>();
|
||||
Map<String, String> result = new HashMap<String, String>();
|
||||
Object[] values = getPropertyValues( value, entityMode );
|
||||
for ( int i = 0; i < propertyTypes.length; i++ ) {
|
||||
result.put( propertyNames[i], propertyTypes[i].toLoggableString( values[i], factory ) );
|
||||
}
|
||||
return StringHelper.unqualify( getName() ) + result.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] getPropertyNames() {
|
||||
return propertyNames;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object deepCopy(Object component, SessionFactoryImplementor factory)
|
||||
throws HibernateException {
|
||||
|
@ -490,6 +519,7 @@ public class ComponentType extends AbstractType implements CompositeType, Proced
|
|||
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object replace(
|
||||
Object original,
|
||||
|
@ -522,7 +552,7 @@ public class ComponentType extends AbstractType implements CompositeType, Proced
|
|||
}
|
||||
|
||||
@Override
|
||||
public Object replace(
|
||||
public Object replace(
|
||||
Object original,
|
||||
Object target,
|
||||
SessionImplementor session,
|
||||
|
@ -576,17 +606,19 @@ public class ComponentType extends AbstractType implements CompositeType, Proced
|
|||
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public CascadeStyle getCascadeStyle(int i) {
|
||||
return cascade[i];
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isMutable() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Serializable disassemble(Object value, SessionImplementor session, Object owner)
|
||||
public Serializable disassemble(Object value, SessionImplementor session, Object owner)
|
||||
throws HibernateException {
|
||||
|
||||
if ( value == null ) {
|
||||
|
@ -602,30 +634,31 @@ public class ComponentType extends AbstractType implements CompositeType, Proced
|
|||
}
|
||||
|
||||
@Override
|
||||
public Object assemble(Serializable object, SessionImplementor session, Object owner)
|
||||
public Object assemble(Serializable object, SessionImplementor session, Object owner)
|
||||
throws HibernateException {
|
||||
|
||||
if ( object == null ) {
|
||||
return null;
|
||||
}
|
||||
else {
|
||||
Object[] values = ( Object[] ) object;
|
||||
Object[] values = (Object[]) object;
|
||||
Object[] assembled = new Object[values.length];
|
||||
for ( int i = 0; i < propertyTypes.length; i++ ) {
|
||||
assembled[i] = propertyTypes[i].assemble( ( Serializable ) values[i], session, owner );
|
||||
assembled[i] = propertyTypes[i].assemble( (Serializable) values[i], session, owner );
|
||||
}
|
||||
Object result = instantiate( owner, session );
|
||||
setPropertyValues( result, assembled, entityMode );
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public FetchMode getFetchMode(int i) {
|
||||
return joinedFetch[i];
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object hydrate(
|
||||
public Object hydrate(
|
||||
final ResultSet rs,
|
||||
final String[] names,
|
||||
final SessionImplementor session,
|
||||
|
@ -655,12 +688,12 @@ public class ComponentType extends AbstractType implements CompositeType, Proced
|
|||
}
|
||||
|
||||
@Override
|
||||
public Object resolve(Object value, SessionImplementor session, Object owner)
|
||||
public Object resolve(Object value, SessionImplementor session, Object owner)
|
||||
throws HibernateException {
|
||||
|
||||
if ( value != null ) {
|
||||
Object result = instantiate( owner, session );
|
||||
Object[] values = ( Object[] ) value;
|
||||
Object[] values = (Object[]) value;
|
||||
Object[] resolvedValues = new Object[values.length]; //only really need new array during semiresolve!
|
||||
for ( int i = 0; i < values.length; i++ ) {
|
||||
resolvedValues[i] = propertyTypes[i].resolve( values[i], session, owner );
|
||||
|
@ -674,32 +707,36 @@ public class ComponentType extends AbstractType implements CompositeType, Proced
|
|||
}
|
||||
|
||||
@Override
|
||||
public Object semiResolve(Object value, SessionImplementor session, Object owner)
|
||||
public Object semiResolve(Object value, SessionImplementor session, Object owner)
|
||||
throws HibernateException {
|
||||
//note that this implementation is kinda broken
|
||||
//for components with many-to-one associations
|
||||
return resolve( value, session, owner );
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean[] getPropertyNullability() {
|
||||
return propertyNullability;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isXMLElement() {
|
||||
public boolean isXMLElement() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object fromXMLNode(Node xml, Mapping factory) throws HibernateException {
|
||||
return xml;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setToXMLNode(Node node, Object value, SessionFactoryImplementor factory) throws HibernateException {
|
||||
replaceNode( node, ( Element ) value );
|
||||
replaceNode( node, (Element) value );
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean[] toColumnNullness(Object value, Mapping mapping) {
|
||||
boolean[] result = new boolean[ getColumnSpan( mapping ) ];
|
||||
boolean[] result = new boolean[getColumnSpan( mapping )];
|
||||
if ( value == null ) {
|
||||
return result;
|
||||
}
|
||||
|
@ -712,6 +749,7 @@ public class ComponentType extends AbstractType implements CompositeType, Proced
|
|||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isEmbedded() {
|
||||
return false;
|
||||
|
@ -741,10 +779,10 @@ public class ComponentType extends AbstractType implements CompositeType, Proced
|
|||
|
||||
private boolean determineIfProcedureParamExtractionCanBePerformed() {
|
||||
for ( Type propertyType : propertyTypes ) {
|
||||
if ( ! ProcedureParameterExtractionAware.class.isInstance( propertyType ) ) {
|
||||
if ( !ProcedureParameterExtractionAware.class.isInstance( propertyType ) ) {
|
||||
return false;
|
||||
}
|
||||
if ( ! ( (ProcedureParameterExtractionAware) propertyType ).canDoExtraction() ) {
|
||||
if ( !( (ProcedureParameterExtractionAware) propertyType ).canDoExtraction() ) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -773,7 +811,7 @@ public class ComponentType extends AbstractType implements CompositeType, Proced
|
|||
currentIndex += propertyType.getColumnSpan( session.getFactory() );
|
||||
}
|
||||
|
||||
if ( ! notNull ) {
|
||||
if ( !notNull ) {
|
||||
values = null;
|
||||
}
|
||||
|
||||
|
@ -781,7 +819,8 @@ public class ComponentType extends AbstractType implements CompositeType, Proced
|
|||
}
|
||||
|
||||
@Override
|
||||
public Object extract(CallableStatement statement, String[] paramNames, SessionImplementor session) throws SQLException {
|
||||
public Object extract(CallableStatement statement, String[] paramNames, SessionImplementor session)
|
||||
throws SQLException {
|
||||
// for this form to work all sub-property spans must be one (1)...
|
||||
|
||||
Object[] values = new Object[propertySpan];
|
||||
|
@ -791,7 +830,7 @@ public class ComponentType extends AbstractType implements CompositeType, Proced
|
|||
for ( String paramName : paramNames ) {
|
||||
// we know this cast is safe from canDoExtraction
|
||||
final ProcedureParameterExtractionAware propertyType = (ProcedureParameterExtractionAware) propertyTypes[indx];
|
||||
final Object value = propertyType.extract( statement, new String[] { paramName }, session );
|
||||
final Object value = propertyType.extract( statement, new String[] {paramName}, session );
|
||||
if ( value == null ) {
|
||||
if ( isKey ) {
|
||||
return null; //different nullability rules for pk/fk
|
||||
|
@ -803,13 +842,13 @@ public class ComponentType extends AbstractType implements CompositeType, Proced
|
|||
values[indx] = value;
|
||||
}
|
||||
|
||||
if ( ! notNull ) {
|
||||
if ( !notNull ) {
|
||||
values = null;
|
||||
}
|
||||
|
||||
return resolve( values, session, null );
|
||||
}
|
||||
|
||||
|
||||
public boolean hasNotNullProperty() {
|
||||
return hasNotNullProperty;
|
||||
}
|
||||
|
|
|
@ -46,12 +46,12 @@ public class EmbeddedComponentType extends ComponentType {
|
|||
}
|
||||
|
||||
public Object instantiate(Object parent, SessionImplementor session) throws HibernateException {
|
||||
final boolean useParent = parent!=null &&
|
||||
//TODO: Yuck! This is not quite good enough, it's a quick
|
||||
//hack around the problem of having a to-one association
|
||||
//that refers to an embedded component:
|
||||
super.getReturnedClass().isInstance(parent);
|
||||
final boolean useParent = parent != null &&
|
||||
//TODO: Yuck! This is not quite good enough, it's a quick
|
||||
//hack around the problem of having a to-one association
|
||||
//that refers to an embedded component:
|
||||
super.getReturnedClass().isInstance( parent );
|
||||
|
||||
return useParent ? parent : super.instantiate(parent, session);
|
||||
return useParent ? parent : super.instantiate( parent, session );
|
||||
}
|
||||
}
|
||||
|
|
|
@ -66,6 +66,7 @@ public abstract class EntityType extends AbstractType implements AssociationType
|
|||
|
||||
/**
|
||||
* Cached because of performance
|
||||
*
|
||||
* @see #getIdentifierType(SessionImplementor)
|
||||
* @see #getIdentifierType(Mapping)
|
||||
*/
|
||||
|
@ -73,6 +74,7 @@ public abstract class EntityType extends AbstractType implements AssociationType
|
|||
|
||||
/**
|
||||
* Cached because of performance
|
||||
*
|
||||
* @see #getAssociatedEntityPersister
|
||||
*/
|
||||
private transient volatile EntityPersister associatedEntityPersister;
|
||||
|
@ -117,7 +119,7 @@ public abstract class EntityType extends AbstractType implements AssociationType
|
|||
* @param unwrapProxy Is unwrapping of proxies allowed for this association; unwrapping
|
||||
* says to return the "implementation target" of lazy prooxies; typically only possible
|
||||
* with lazy="no-proxy".
|
||||
*
|
||||
*
|
||||
* @deprecated Use {@link #EntityType(org.hibernate.type.TypeFactory.TypeScope, String, boolean, String, boolean, boolean)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
|
@ -245,6 +247,7 @@ public abstract class EntityType extends AbstractType implements AssociationType
|
|||
* The name of the associated entity.
|
||||
*
|
||||
* @param factory The session factory, for resolution.
|
||||
*
|
||||
* @return The associated entity name.
|
||||
*/
|
||||
@Override
|
||||
|
@ -256,12 +259,14 @@ public abstract class EntityType extends AbstractType implements AssociationType
|
|||
* Retrieves the {@link Joinable} defining the associated entity.
|
||||
*
|
||||
* @param factory The session factory.
|
||||
*
|
||||
* @return The associated joinable
|
||||
*
|
||||
* @throws MappingException Generally indicates an invalid entity name.
|
||||
*/
|
||||
@Override
|
||||
public Joinable getAssociatedJoinable(SessionFactoryImplementor factory) throws MappingException {
|
||||
return ( Joinable ) getAssociatedEntityPersister( factory );
|
||||
return (Joinable) getAssociatedEntityPersister( factory );
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -282,16 +287,16 @@ public abstract class EntityType extends AbstractType implements AssociationType
|
|||
return returnedClass;
|
||||
}
|
||||
|
||||
private Class determineAssociatedEntityClass() {
|
||||
final String entityName = getAssociatedEntityName();
|
||||
try {
|
||||
return ReflectHelper.classForName(entityName);
|
||||
}
|
||||
catch ( ClassNotFoundException cnfe ) {
|
||||
return this.scope.resolveFactory().getEntityPersister(entityName).
|
||||
getEntityTuplizer().getMappedClass();
|
||||
}
|
||||
}
|
||||
private Class determineAssociatedEntityClass() {
|
||||
final String entityName = getAssociatedEntityName();
|
||||
try {
|
||||
return ReflectHelper.classForName( entityName );
|
||||
}
|
||||
catch (ClassNotFoundException cnfe) {
|
||||
return this.scope.resolveFactory().getEntityPersister( entityName ).
|
||||
getEntityTuplizer().getMappedClass();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object nullSafeGet(ResultSet rs, String name, SessionImplementor session, Object owner)
|
||||
|
@ -305,15 +310,15 @@ public abstract class EntityType extends AbstractType implements AssociationType
|
|||
String[] names,
|
||||
SessionImplementor session,
|
||||
Object owner) throws HibernateException, SQLException {
|
||||
return resolve( hydrate(rs, names, session, owner), session, owner );
|
||||
return resolve( hydrate( rs, names, session, owner ), session, owner );
|
||||
}
|
||||
|
||||
/**
|
||||
* Two entities are considered the same when their instances are the same.
|
||||
*
|
||||
*
|
||||
* @param x One entity instance
|
||||
* @param y Another entity instance
|
||||
*
|
||||
* @return True if x == y; false otherwise.
|
||||
*/
|
||||
@Override
|
||||
|
@ -341,7 +346,7 @@ public abstract class EntityType extends AbstractType implements AssociationType
|
|||
if ( original == null ) {
|
||||
return null;
|
||||
}
|
||||
Object cached = copyCache.get(original);
|
||||
Object cached = copyCache.get( original );
|
||||
if ( cached != null ) {
|
||||
return cached;
|
||||
}
|
||||
|
@ -349,7 +354,7 @@ public abstract class EntityType extends AbstractType implements AssociationType
|
|||
if ( original == target ) {
|
||||
return target;
|
||||
}
|
||||
if ( session.getContextEntityIdentifier( original ) == null &&
|
||||
if ( session.getContextEntityIdentifier( original ) == null &&
|
||||
ForeignKeys.isTransient( associatedEntityName, original, Boolean.FALSE, session ) ) {
|
||||
final Object copy = session.getEntityPersister( associatedEntityName, original )
|
||||
.instantiate( null, session );
|
||||
|
@ -359,10 +364,13 @@ public abstract class EntityType extends AbstractType implements AssociationType
|
|||
else {
|
||||
Object id = getIdentifier( original, session );
|
||||
if ( id == null ) {
|
||||
throw new AssertionFailure("non-transient entity has a null id: " + original.getClass().getName());
|
||||
throw new AssertionFailure(
|
||||
"non-transient entity has a null id: " + original.getClass()
|
||||
.getName()
|
||||
);
|
||||
}
|
||||
id = getIdentifierOrUniqueKeyType( session.getFactory() )
|
||||
.replace(id, null, session, owner, copyCache);
|
||||
.replace( id, null, session, owner, copyCache );
|
||||
return resolve( id, session, owner );
|
||||
}
|
||||
}
|
||||
|
@ -376,7 +384,7 @@ public abstract class EntityType extends AbstractType implements AssociationType
|
|||
}
|
||||
|
||||
final Serializable id;
|
||||
if (x instanceof HibernateProxy) {
|
||||
if ( x instanceof HibernateProxy ) {
|
||||
id = ( (HibernateProxy) x ).getHibernateLazyInitializer().getIdentifier();
|
||||
}
|
||||
else {
|
||||
|
@ -400,12 +408,12 @@ public abstract class EntityType extends AbstractType implements AssociationType
|
|||
|
||||
EntityPersister persister = getAssociatedEntityPersister( factory );
|
||||
if ( !persister.canExtractIdOutOfEntity() ) {
|
||||
return super.isEqual(x, y );
|
||||
return super.isEqual( x, y );
|
||||
}
|
||||
|
||||
final Class mappedClass = persister.getMappedClass();
|
||||
Serializable xid;
|
||||
if (x instanceof HibernateProxy) {
|
||||
if ( x instanceof HibernateProxy ) {
|
||||
xid = ( (HibernateProxy) x ).getHibernateLazyInitializer()
|
||||
.getIdentifier();
|
||||
}
|
||||
|
@ -420,7 +428,7 @@ public abstract class EntityType extends AbstractType implements AssociationType
|
|||
}
|
||||
|
||||
Serializable yid;
|
||||
if (y instanceof HibernateProxy) {
|
||||
if ( y instanceof HibernateProxy ) {
|
||||
yid = ( (HibernateProxy) y ).getHibernateLazyInitializer()
|
||||
.getIdentifier();
|
||||
}
|
||||
|
@ -435,7 +443,7 @@ public abstract class EntityType extends AbstractType implements AssociationType
|
|||
}
|
||||
|
||||
return persister.getIdentifierType()
|
||||
.isEqual(xid, yid, factory);
|
||||
.isEqual( xid, yid, factory );
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -451,7 +459,7 @@ public abstract class EntityType extends AbstractType implements AssociationType
|
|||
@Override
|
||||
public Object fromXMLNode(Node xml, Mapping factory) throws HibernateException {
|
||||
if ( !isEmbeddedInXML ) {
|
||||
return getIdentifierType(factory).fromXMLNode(xml, factory);
|
||||
return getIdentifierType( factory ).fromXMLNode( xml, factory );
|
||||
}
|
||||
else {
|
||||
return xml;
|
||||
|
@ -461,11 +469,11 @@ public abstract class EntityType extends AbstractType implements AssociationType
|
|||
@Override
|
||||
public void setToXMLNode(Node node, Object value, SessionFactoryImplementor factory) throws HibernateException {
|
||||
if ( !isEmbeddedInXML ) {
|
||||
getIdentifierType(factory).setToXMLNode(node, value, factory);
|
||||
getIdentifierType( factory ).setToXMLNode( node, value, factory );
|
||||
}
|
||||
else {
|
||||
Element elt = (Element) value;
|
||||
replaceNode( node, new ElementWrapper(elt) );
|
||||
replaceNode( node, new ElementWrapper( elt ) );
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -505,7 +513,7 @@ public abstract class EntityType extends AbstractType implements AssociationType
|
|||
return loadByUniqueKey( getAssociatedEntityName(), uniqueKeyPropertyName, value, session );
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
|
@ -529,12 +537,16 @@ public abstract class EntityType extends AbstractType implements AssociationType
|
|||
}
|
||||
|
||||
protected final Object getIdentifier(Object value, SessionImplementor session) throws HibernateException {
|
||||
if ( isNotEmbedded(session) ) {
|
||||
if ( isNotEmbedded( session ) ) {
|
||||
return value;
|
||||
}
|
||||
|
||||
if ( isReferenceToPrimaryKey() || uniqueKeyPropertyName == null ) {
|
||||
return ForeignKeys.getEntityIdentifierIfNotUnsaved( getAssociatedEntityName(), value, session ); //tolerates nulls
|
||||
return ForeignKeys.getEntityIdentifierIfNotUnsaved(
|
||||
getAssociatedEntityName(),
|
||||
value,
|
||||
session
|
||||
); //tolerates nulls
|
||||
}
|
||||
else if ( value == null ) {
|
||||
return null;
|
||||
|
@ -547,7 +559,7 @@ public abstract class EntityType extends AbstractType implements AssociationType
|
|||
// an entity type, in which case we need to resolve its identitifier
|
||||
Type type = entityPersister.getPropertyType( uniqueKeyPropertyName );
|
||||
if ( type.isEntityType() ) {
|
||||
propertyValue = ( ( EntityType ) type ).getIdentifier( propertyValue, session );
|
||||
propertyValue = ( (EntityType) type ).getIdentifier( propertyValue, session );
|
||||
}
|
||||
|
||||
return propertyValue;
|
||||
|
@ -569,7 +581,9 @@ public abstract class EntityType extends AbstractType implements AssociationType
|
|||
*
|
||||
* @param value The instance to be logged.
|
||||
* @param factory The session factory.
|
||||
*
|
||||
* @return The loggable string.
|
||||
*
|
||||
* @throws HibernateException Generally some form of resolution problem.
|
||||
*/
|
||||
@Override
|
||||
|
@ -577,7 +591,7 @@ public abstract class EntityType extends AbstractType implements AssociationType
|
|||
if ( value == null ) {
|
||||
return "null";
|
||||
}
|
||||
|
||||
|
||||
EntityPersister persister = getAssociatedEntityPersister( factory );
|
||||
StringBuilder result = new StringBuilder().append( associatedEntityName );
|
||||
|
||||
|
@ -588,19 +602,20 @@ public abstract class EntityType extends AbstractType implements AssociationType
|
|||
if ( isEmbeddedInXML ) {
|
||||
throw new ClassCastException( value.getClass().getName() );
|
||||
}
|
||||
id = ( Serializable ) value;
|
||||
} else if ( value instanceof HibernateProxy ) {
|
||||
HibernateProxy proxy = ( HibernateProxy ) value;
|
||||
id = (Serializable) value;
|
||||
}
|
||||
else if ( value instanceof HibernateProxy ) {
|
||||
HibernateProxy proxy = (HibernateProxy) value;
|
||||
id = proxy.getHibernateLazyInitializer().getIdentifier();
|
||||
}
|
||||
else {
|
||||
id = persister.getIdentifier( value );
|
||||
}
|
||||
|
||||
|
||||
result.append( '#' )
|
||||
.append( persister.getIdentifierType().toLoggableString( id, factory ) );
|
||||
.append( persister.getIdentifierType().toLoggableString( id, factory ) );
|
||||
}
|
||||
|
||||
|
||||
return result.toString();
|
||||
}
|
||||
|
||||
|
@ -624,6 +639,7 @@ public abstract class EntityType extends AbstractType implements AssociationType
|
|||
* Convenience method to locate the identifier type of the associated entity.
|
||||
*
|
||||
* @param factory The mappings...
|
||||
*
|
||||
* @return The identifier type
|
||||
*/
|
||||
Type getIdentifierType(final Mapping factory) {
|
||||
|
@ -644,6 +660,7 @@ public abstract class EntityType extends AbstractType implements AssociationType
|
|||
* Convenience method to locate the identifier type of the associated entity.
|
||||
*
|
||||
* @param session The originating session
|
||||
*
|
||||
* @return The identifier type
|
||||
*/
|
||||
Type getIdentifierType(final SessionImplementor session) {
|
||||
|
@ -663,18 +680,20 @@ public abstract class EntityType extends AbstractType implements AssociationType
|
|||
* the property-ref).
|
||||
*
|
||||
* @param factory The mappings...
|
||||
*
|
||||
* @return The appropriate type.
|
||||
*
|
||||
* @throws MappingException Generally, if unable to resolve the associated entity name
|
||||
* or unique key property name.
|
||||
*/
|
||||
public final Type getIdentifierOrUniqueKeyType(Mapping factory) throws MappingException {
|
||||
if ( isReferenceToPrimaryKey() || uniqueKeyPropertyName == null ) {
|
||||
return getIdentifierType(factory);
|
||||
return getIdentifierType( factory );
|
||||
}
|
||||
else {
|
||||
Type type = factory.getReferencedPropertyType( getAssociatedEntityName(), uniqueKeyPropertyName );
|
||||
if ( type.isEntityType() ) {
|
||||
type = ( ( EntityType ) type).getIdentifierOrUniqueKeyType( factory );
|
||||
type = ( (EntityType) type ).getIdentifierOrUniqueKeyType( factory );
|
||||
}
|
||||
return type;
|
||||
}
|
||||
|
@ -685,11 +704,13 @@ public abstract class EntityType extends AbstractType implements AssociationType
|
|||
* refers
|
||||
*
|
||||
* @param factory The mappings...
|
||||
*
|
||||
* @return The appropriate property name.
|
||||
*
|
||||
* @throws MappingException Generally, if unable to resolve the associated entity name
|
||||
*/
|
||||
public final String getIdentifierOrUniqueKeyPropertyName(Mapping factory)
|
||||
throws MappingException {
|
||||
throws MappingException {
|
||||
if ( isReferenceToPrimaryKey() || uniqueKeyPropertyName == null ) {
|
||||
return factory.getIdentifierPropertyName( getAssociatedEntityName() );
|
||||
}
|
||||
|
@ -697,7 +718,7 @@ public abstract class EntityType extends AbstractType implements AssociationType
|
|||
return uniqueKeyPropertyName;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
protected abstract boolean isNullable();
|
||||
|
||||
/**
|
||||
|
@ -705,7 +726,9 @@ public abstract class EntityType extends AbstractType implements AssociationType
|
|||
*
|
||||
* @param id The entity id to resolve
|
||||
* @param session The orginating session.
|
||||
*
|
||||
* @return The resolved identifier (i.e., loaded entity).
|
||||
*
|
||||
* @throws org.hibernate.HibernateException Indicates problems performing the load.
|
||||
*/
|
||||
protected final Object resolveIdentifier(Serializable id, SessionImplementor session) throws HibernateException {
|
||||
|
@ -721,7 +744,7 @@ public abstract class EntityType extends AbstractType implements AssociationType
|
|||
);
|
||||
|
||||
if ( proxyOrEntity instanceof HibernateProxy ) {
|
||||
( ( HibernateProxy ) proxyOrEntity ).getHibernateLazyInitializer()
|
||||
( (HibernateProxy) proxyOrEntity ).getHibernateLazyInitializer()
|
||||
.setUnwrap( isProxyUnwrapEnabled );
|
||||
}
|
||||
|
||||
|
@ -739,23 +762,25 @@ public abstract class EntityType extends AbstractType implements AssociationType
|
|||
* @param uniqueKeyPropertyName The name of the property defining the uniqie key.
|
||||
* @param key The unique key property value.
|
||||
* @param session The originating session.
|
||||
*
|
||||
* @return The loaded entity
|
||||
*
|
||||
* @throws HibernateException generally indicates problems performing the load.
|
||||
*/
|
||||
public Object loadByUniqueKey(
|
||||
String entityName,
|
||||
String uniqueKeyPropertyName,
|
||||
Object key,
|
||||
String entityName,
|
||||
String uniqueKeyPropertyName,
|
||||
Object key,
|
||||
SessionImplementor session) throws HibernateException {
|
||||
final SessionFactoryImplementor factory = session.getFactory();
|
||||
UniqueKeyLoadable persister = ( UniqueKeyLoadable ) factory.getEntityPersister( entityName );
|
||||
UniqueKeyLoadable persister = (UniqueKeyLoadable) factory.getEntityPersister( entityName );
|
||||
|
||||
//TODO: implement caching?! proxies?!
|
||||
|
||||
EntityUniqueKey euk = new EntityUniqueKey(
|
||||
entityName,
|
||||
uniqueKeyPropertyName,
|
||||
key,
|
||||
entityName,
|
||||
uniqueKeyPropertyName,
|
||||
key,
|
||||
getIdentifierOrUniqueKeyType( factory ),
|
||||
persister.getEntityMode(),
|
||||
session.getFactory()
|
||||
|
|
|
@ -37,6 +37,7 @@ import org.hibernate.AssertionFailure;
|
|||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||
import org.hibernate.engine.spi.SessionImplementor;
|
||||
import org.hibernate.internal.CoreLogging;
|
||||
import org.hibernate.internal.util.ReflectHelper;
|
||||
import org.hibernate.internal.util.config.ConfigurationHelper;
|
||||
import org.hibernate.usertype.DynamicParameterizedType;
|
||||
|
@ -69,7 +70,7 @@ import org.jboss.logging.Logger;
|
|||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public class EnumType implements EnhancedUserType, DynamicParameterizedType,LoggableUserType, Serializable {
|
||||
private static final Logger LOG = Logger.getLogger( EnumType.class.getName() );
|
||||
private static final Logger LOG = CoreLogging.logger( EnumType.class );
|
||||
|
||||
public static final String ENUM = "enumClass";
|
||||
public static final String NAMED = "useNamed";
|
||||
|
@ -458,9 +459,9 @@ public class EnumType implements EnhancedUserType, DynamicParameterizedType,Logg
|
|||
|
||||
private Enum fromName(String name) {
|
||||
try {
|
||||
if(name == null) {
|
||||
return null;
|
||||
}
|
||||
if (name == null) {
|
||||
return null;
|
||||
}
|
||||
return Enum.valueOf( enumClass, name.trim() );
|
||||
}
|
||||
catch ( IllegalArgumentException iae ) {
|
||||
|
|
|
@ -56,14 +56,16 @@ public class SerializableToBlobType<T extends Serializable> extends AbstractSing
|
|||
ParameterType reader = (ParameterType) parameters.get( PARAMETER_TYPE );
|
||||
if ( reader != null ) {
|
||||
setJavaTypeDescriptor( new SerializableTypeDescriptor<T>( reader.getReturnedClass() ) );
|
||||
} else {
|
||||
}
|
||||
else {
|
||||
String className = parameters.getProperty( CLASS_NAME );
|
||||
if ( className == null ) {
|
||||
throw new MappingException( "No class name defined for type: " + SerializableToBlobType.class.getName() );
|
||||
}
|
||||
try {
|
||||
setJavaTypeDescriptor( new SerializableTypeDescriptor<T>( ReflectHelper.classForName( className ) ) );
|
||||
} catch ( ClassNotFoundException e ) {
|
||||
}
|
||||
catch ( ClassNotFoundException e ) {
|
||||
throw new MappingException( "Unable to load class from " + CLASS_NAME + " parameter", e );
|
||||
}
|
||||
}
|
||||
|
|
|
@ -60,8 +60,8 @@ public class SetType extends CollectionType {
|
|||
|
||||
public Object instantiate(int anticipatedSize) {
|
||||
return anticipatedSize <= 0
|
||||
? new HashSet()
|
||||
: new HashSet( anticipatedSize + (int)( anticipatedSize * .75f ), .75f );
|
||||
? new HashSet()
|
||||
: new HashSet( anticipatedSize + (int)( anticipatedSize * .75f ), .75f );
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -38,22 +38,21 @@ import org.hibernate.usertype.CompositeUserType;
|
|||
import org.hibernate.usertype.ParameterizedType;
|
||||
import org.hibernate.usertype.UserType;
|
||||
|
||||
import org.jboss.logging.Logger;
|
||||
import static org.hibernate.internal.CoreLogging.messageLogger;
|
||||
|
||||
/**
|
||||
* Used internally to build instances of {@link Type}, specifically it builds instances of
|
||||
*
|
||||
*
|
||||
* <p/>
|
||||
* <p/>
|
||||
* Used internally to obtain instances of <tt>Type</tt>. Applications should use static methods
|
||||
* and constants on <tt>org.hibernate.Hibernate</tt>.
|
||||
*
|
||||
* @author Gavin King
|
||||
* @author Steve Ebersole
|
||||
*/
|
||||
@SuppressWarnings({ "unchecked" })
|
||||
@SuppressWarnings({"unchecked"})
|
||||
public final class TypeFactory implements Serializable {
|
||||
|
||||
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, TypeFactory.class.getName());
|
||||
private static final CoreMessageLogger LOG = messageLogger( TypeFactory.class );
|
||||
|
||||
private final TypeScopeImpl typeScope = new TypeScopeImpl();
|
||||
|
||||
|
@ -135,10 +134,10 @@ public final class TypeFactory implements Serializable {
|
|||
( (ParameterizedType) type ).setParameterValues( EMPTY_PROPERTIES );
|
||||
}
|
||||
else {
|
||||
( (ParameterizedType) type ).setParameterValues(parameters);
|
||||
( (ParameterizedType) type ).setParameterValues( parameters );
|
||||
}
|
||||
}
|
||||
else if ( parameters!=null && !parameters.isEmpty() ) {
|
||||
else if ( parameters != null && !parameters.isEmpty() ) {
|
||||
throw new MappingException( "type is not parameterized: " + type.getClass().getName() );
|
||||
}
|
||||
}
|
||||
|
@ -151,14 +150,17 @@ public final class TypeFactory implements Serializable {
|
|||
* @deprecated Only for use temporary use by {@link org.hibernate.Hibernate}
|
||||
*/
|
||||
@Deprecated
|
||||
@SuppressWarnings({ "JavaDoc" })
|
||||
public static CompositeCustomType customComponent(Class<CompositeUserType> typeClass, Properties parameters, TypeScope scope) {
|
||||
@SuppressWarnings({"JavaDoc"})
|
||||
public static CompositeCustomType customComponent(
|
||||
Class<CompositeUserType> typeClass,
|
||||
Properties parameters,
|
||||
TypeScope scope) {
|
||||
try {
|
||||
CompositeUserType userType = typeClass.newInstance();
|
||||
injectParameters( userType, parameters );
|
||||
return new CompositeCustomType( userType );
|
||||
}
|
||||
catch ( Exception e ) {
|
||||
catch (Exception e) {
|
||||
throw new MappingException( "Unable to instantiate custom type: " + typeClass.getName(), e );
|
||||
}
|
||||
}
|
||||
|
@ -179,7 +181,7 @@ public final class TypeFactory implements Serializable {
|
|||
try {
|
||||
typeClass = ReflectHelper.classForName( typeName );
|
||||
}
|
||||
catch ( ClassNotFoundException cnfe ) {
|
||||
catch (ClassNotFoundException cnfe) {
|
||||
throw new MappingException( "user collection type class not found: " + typeName, cnfe );
|
||||
}
|
||||
CustomCollectionType result = new CustomCollectionType( typeScope, typeClass, role, propertyRef, embedded );
|
||||
|
@ -198,7 +200,7 @@ public final class TypeFactory implements Serializable {
|
|||
try {
|
||||
typeClass = ReflectHelper.classForName( typeName );
|
||||
}
|
||||
catch ( ClassNotFoundException cnfe ) {
|
||||
catch (ClassNotFoundException cnfe) {
|
||||
throw new MappingException( "user collection type class not found: " + typeName, cnfe );
|
||||
}
|
||||
CustomCollectionType result = new CustomCollectionType( typeScope, typeClass, role, propertyRef );
|
||||
|
@ -216,13 +218,13 @@ public final class TypeFactory implements Serializable {
|
|||
* @deprecated Only for use temporary use by {@link org.hibernate.Hibernate}
|
||||
*/
|
||||
@Deprecated
|
||||
public static CustomType custom(Class<UserType> typeClass, Properties parameters, TypeScope scope) {
|
||||
public static CustomType custom(Class<UserType> typeClass, Properties parameters, TypeScope scope) {
|
||||
try {
|
||||
UserType userType = typeClass.newInstance();
|
||||
injectParameters( userType, parameters );
|
||||
return new CustomType( userType );
|
||||
}
|
||||
catch ( Exception e ) {
|
||||
catch (Exception e) {
|
||||
throw new MappingException( "Unable to instantiate custom type: " + typeClass.getName(), e );
|
||||
}
|
||||
}
|
||||
|
@ -242,40 +244,6 @@ public final class TypeFactory implements Serializable {
|
|||
|
||||
// one-to-one type builders ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
/**
|
||||
* @deprecated Use {@link #oneToOne(String, ForeignKeyDirection, String, boolean, boolean, String, String, boolean)} instead.
|
||||
* See Jira issue: <a href="https://hibernate.onjira.com/browse/HHH-7771">HHH-7771</a>
|
||||
*/
|
||||
@Deprecated
|
||||
public EntityType oneToOne(
|
||||
String persistentClass,
|
||||
ForeignKeyDirection foreignKeyType,
|
||||
String uniqueKeyPropertyName,
|
||||
boolean lazy,
|
||||
boolean unwrapProxy,
|
||||
boolean isEmbeddedInXML,
|
||||
String entityName,
|
||||
String propertyName) {
|
||||
return oneToOne( persistentClass, foreignKeyType, uniqueKeyPropertyName == null, uniqueKeyPropertyName, lazy, unwrapProxy, entityName,
|
||||
propertyName );
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use {@link #oneToOne(String, ForeignKeyDirection, String, boolean, boolean, String, String, boolean)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public EntityType oneToOne(
|
||||
String persistentClass,
|
||||
ForeignKeyDirection foreignKeyType,
|
||||
String uniqueKeyPropertyName,
|
||||
boolean lazy,
|
||||
boolean unwrapProxy,
|
||||
String entityName,
|
||||
String propertyName) {
|
||||
return oneToOne( persistentClass, foreignKeyType, uniqueKeyPropertyName == null, uniqueKeyPropertyName, lazy, unwrapProxy, entityName,
|
||||
propertyName );
|
||||
}
|
||||
|
||||
public EntityType oneToOne(
|
||||
String persistentClass,
|
||||
ForeignKeyDirection foreignKeyType,
|
||||
|
@ -285,24 +253,10 @@ public final class TypeFactory implements Serializable {
|
|||
boolean unwrapProxy,
|
||||
String entityName,
|
||||
String propertyName) {
|
||||
return new OneToOneType( typeScope, persistentClass, foreignKeyType, referenceToPrimaryKey,
|
||||
uniqueKeyPropertyName, lazy, unwrapProxy, entityName, propertyName );
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use {@link #specialOneToOne(String, ForeignKeyDirection, String, boolean, boolean, String, String, boolean)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public EntityType specialOneToOne(
|
||||
String persistentClass,
|
||||
ForeignKeyDirection foreignKeyType,
|
||||
String uniqueKeyPropertyName,
|
||||
boolean lazy,
|
||||
boolean unwrapProxy,
|
||||
String entityName,
|
||||
String propertyName) {
|
||||
return specialOneToOne( persistentClass, foreignKeyType, uniqueKeyPropertyName == null, uniqueKeyPropertyName, lazy, unwrapProxy,
|
||||
entityName, propertyName );
|
||||
return new OneToOneType(
|
||||
typeScope, persistentClass, foreignKeyType, referenceToPrimaryKey,
|
||||
uniqueKeyPropertyName, lazy, unwrapProxy, entityName, propertyName
|
||||
);
|
||||
}
|
||||
|
||||
public EntityType specialOneToOne(
|
||||
|
@ -314,8 +268,10 @@ public final class TypeFactory implements Serializable {
|
|||
boolean unwrapProxy,
|
||||
String entityName,
|
||||
String propertyName) {
|
||||
return new SpecialOneToOneType( typeScope, persistentClass, foreignKeyType, referenceToPrimaryKey,
|
||||
uniqueKeyPropertyName, lazy, unwrapProxy, entityName, propertyName );
|
||||
return new SpecialOneToOneType(
|
||||
typeScope, persistentClass, foreignKeyType, referenceToPrimaryKey,
|
||||
uniqueKeyPropertyName, lazy, unwrapProxy, entityName, propertyName
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
|
@ -342,8 +298,15 @@ public final class TypeFactory implements Serializable {
|
|||
boolean isEmbeddedInXML,
|
||||
boolean ignoreNotFound,
|
||||
boolean isLogicalOneToOne) {
|
||||
return manyToOne( persistentClass, uniqueKeyPropertyName == null, uniqueKeyPropertyName, lazy, unwrapProxy, ignoreNotFound,
|
||||
isLogicalOneToOne );
|
||||
return manyToOne(
|
||||
persistentClass,
|
||||
uniqueKeyPropertyName == null,
|
||||
uniqueKeyPropertyName,
|
||||
lazy,
|
||||
unwrapProxy,
|
||||
ignoreNotFound,
|
||||
isLogicalOneToOne
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -357,8 +320,15 @@ public final class TypeFactory implements Serializable {
|
|||
boolean unwrapProxy,
|
||||
boolean ignoreNotFound,
|
||||
boolean isLogicalOneToOne) {
|
||||
return manyToOne( persistentClass, uniqueKeyPropertyName == null, uniqueKeyPropertyName, lazy, unwrapProxy, ignoreNotFound,
|
||||
isLogicalOneToOne );
|
||||
return manyToOne(
|
||||
persistentClass,
|
||||
uniqueKeyPropertyName == null,
|
||||
uniqueKeyPropertyName,
|
||||
lazy,
|
||||
unwrapProxy,
|
||||
ignoreNotFound,
|
||||
isLogicalOneToOne
|
||||
);
|
||||
}
|
||||
|
||||
public EntityType manyToOne(
|
||||
|
|
|
@ -27,7 +27,6 @@ import java.io.Serializable;
|
|||
import java.util.Properties;
|
||||
|
||||
import org.hibernate.MappingException;
|
||||
import org.hibernate.classic.Lifecycle;
|
||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||
import org.hibernate.internal.util.ReflectHelper;
|
||||
import org.hibernate.usertype.CompositeUserType;
|
||||
|
@ -105,7 +104,7 @@ public class TypeResolver implements Serializable {
|
|||
* look for 'typeName' as a class name and<ol>
|
||||
* <li>if it names a {@link Type} implementor, return an instance</li>
|
||||
* <li>if it names a {@link CompositeUserType} or a {@link UserType}, return an instance of class wrapped intot the appropriate {@link Type} adapter</li>
|
||||
* <li>if it implements {@link Lifecycle}, return the corresponding entity type</li>
|
||||
* <li>if it implements {@link org.hibernate.classic.Lifecycle}, return the corresponding entity type</li>
|
||||
* <li>if it implements {@link Serializable}, return the corresponding serializable type</li>
|
||||
* </ol>
|
||||
* </li>
|
||||
|
|
|
@ -29,16 +29,17 @@ import java.util.HashMap;
|
|||
import java.util.Map;
|
||||
|
||||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.internal.CoreLogging;
|
||||
import org.hibernate.internal.CoreMessageLogger;
|
||||
|
||||
import static org.hibernate.internal.CoreLogging.messageLogger;
|
||||
|
||||
/**
|
||||
* (Badly named) helper for dealing with standard JDBC types as defined by {@link java.sql.Types}
|
||||
*
|
||||
* @author Steve Ebersole
|
||||
*/
|
||||
public final class JdbcTypeNameMapper {
|
||||
private static final CoreMessageLogger LOG = CoreLogging.messageLogger( JdbcTypeNameMapper.class );
|
||||
private static final CoreMessageLogger LOG = messageLogger( JdbcTypeNameMapper.class );
|
||||
|
||||
private static Map<Integer,String> JDBC_TYPE_MAP = buildJdbcTypeMap();
|
||||
|
||||
|
@ -52,7 +53,7 @@ public final class JdbcTypeNameMapper {
|
|||
try {
|
||||
final int code = field.getInt( null );
|
||||
String old = map.put( code, field.getName() );
|
||||
if ( old != null ) {
|
||||
if ( old != null ) {
|
||||
LOG.JavaSqlTypesMappedSameCodeMultipleTimes( code, old, field.getName() );
|
||||
}
|
||||
}
|
||||
|
|
|
@ -34,8 +34,6 @@ import org.hibernate.type.descriptor.ValueBinder;
|
|||
import org.hibernate.type.descriptor.ValueExtractor;
|
||||
import org.hibernate.type.descriptor.WrapperOptions;
|
||||
import org.hibernate.type.descriptor.java.JavaTypeDescriptor;
|
||||
import org.hibernate.type.descriptor.sql.BasicBinder;
|
||||
import org.hibernate.type.descriptor.sql.BasicExtractor;
|
||||
import org.hibernate.type.descriptor.sql.SqlTypeDescriptor;
|
||||
|
||||
import org.jboss.logging.Logger;
|
||||
|
|
|
@ -55,7 +55,7 @@ public class SerializableTypeDescriptor<T extends Serializable> extends Abstract
|
|||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings({ "unchecked" })
|
||||
@SuppressWarnings({ "unchecked" })
|
||||
public S deepCopyNotNull(S value) {
|
||||
return (S) SerializationHelper.clone( value );
|
||||
}
|
||||
|
|
|
@ -40,10 +40,10 @@ import org.jboss.logging.Logger;
|
|||
* @author Steve Ebersole
|
||||
*/
|
||||
public abstract class BasicBinder<J> implements ValueBinder<J> {
|
||||
private static final Logger log = CoreLogging.logger( BasicBinder.class );
|
||||
private static final Logger log = CoreLogging.logger( BasicBinder.class );
|
||||
|
||||
private static final String BIND_MSG_TEMPLATE = "binding parameter [%s] as [%s] - [%s]";
|
||||
private static final String NULL_BIND_MSG_TEMPLATE = "binding parameter [%s] as [%s] - [null]";
|
||||
private static final String BIND_MSG_TEMPLATE = "binding parameter [%s] as [%s] - [%s]";
|
||||
private static final String NULL_BIND_MSG_TEMPLATE = "binding parameter [%s] as [%s] - [null]";
|
||||
|
||||
private final JavaTypeDescriptor<J> javaDescriptor;
|
||||
private final SqlTypeDescriptor sqlDescriptor;
|
||||
|
@ -63,22 +63,22 @@ public abstract class BasicBinder<J> implements ValueBinder<J> {
|
|||
|
||||
@Override
|
||||
public final void bind(PreparedStatement st, J value, int index, WrapperOptions options) throws SQLException {
|
||||
final boolean traceEnabled = log.isTraceEnabled();
|
||||
if ( value == null ) {
|
||||
if ( traceEnabled ) {
|
||||
log.trace(
|
||||
final boolean traceEnabled = log.isTraceEnabled();
|
||||
if ( value == null ) {
|
||||
if ( traceEnabled ) {
|
||||
log.trace(
|
||||
String.format(
|
||||
NULL_BIND_MSG_TEMPLATE,
|
||||
index,
|
||||
JdbcTypeNameMapper.getTypeName( getSqlDescriptor().getSqlType() )
|
||||
)
|
||||
);
|
||||
}
|
||||
st.setNull( index, sqlDescriptor.getSqlType() );
|
||||
}
|
||||
else {
|
||||
if ( traceEnabled ) {
|
||||
log.trace(
|
||||
}
|
||||
st.setNull( index, sqlDescriptor.getSqlType() );
|
||||
}
|
||||
else {
|
||||
if ( traceEnabled ) {
|
||||
log.trace(
|
||||
String.format(
|
||||
BIND_MSG_TEMPLATE,
|
||||
index,
|
||||
|
@ -86,9 +86,9 @@ public abstract class BasicBinder<J> implements ValueBinder<J> {
|
|||
getJavaDescriptor().extractLoggableRepresentation( value )
|
||||
)
|
||||
);
|
||||
}
|
||||
doBind( st, value, index, options );
|
||||
}
|
||||
}
|
||||
doBind( st, value, index, options );
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -101,5 +101,6 @@ public abstract class BasicBinder<J> implements ValueBinder<J> {
|
|||
*
|
||||
* @throws SQLException Indicates a problem binding to the prepared statement.
|
||||
*/
|
||||
protected abstract void doBind(PreparedStatement st, J value, int index, WrapperOptions options) throws SQLException;
|
||||
protected abstract void doBind(PreparedStatement st, J value, int index, WrapperOptions options)
|
||||
throws SQLException;
|
||||
}
|
||||
|
|
|
@ -57,9 +57,9 @@ public abstract class ClobTypeDescriptor implements SqlTypeDescriptor {
|
|||
public <X> ValueExtractor<X> getExtractor(final JavaTypeDescriptor<X> javaTypeDescriptor) {
|
||||
return new BasicExtractor<X>( javaTypeDescriptor, this ) {
|
||||
@Override
|
||||
protected X doExtract(ResultSet rs, String name, WrapperOptions options) throws SQLException {
|
||||
return javaTypeDescriptor.wrap( rs.getClob( name ), options );
|
||||
}
|
||||
protected X doExtract(ResultSet rs, String name, WrapperOptions options) throws SQLException {
|
||||
return javaTypeDescriptor.wrap( rs.getClob( name ), options );
|
||||
}
|
||||
|
||||
@Override
|
||||
protected X doExtract(CallableStatement statement, int index, WrapperOptions options)
|
||||
|
@ -78,7 +78,7 @@ public abstract class ClobTypeDescriptor implements SqlTypeDescriptor {
|
|||
protected abstract <X> BasicBinder<X> getClobBinder(JavaTypeDescriptor<X> javaTypeDescriptor);
|
||||
|
||||
@Override
|
||||
public <X> ValueBinder<X> getBinder(JavaTypeDescriptor<X> javaTypeDescriptor) {
|
||||
public <X> ValueBinder<X> getBinder(JavaTypeDescriptor<X> javaTypeDescriptor) {
|
||||
return getClobBinder( javaTypeDescriptor );
|
||||
}
|
||||
|
||||
|
@ -88,7 +88,8 @@ public abstract class ClobTypeDescriptor implements SqlTypeDescriptor {
|
|||
public <X> BasicBinder<X> getClobBinder(final JavaTypeDescriptor<X> javaTypeDescriptor) {
|
||||
return new BasicBinder<X>( javaTypeDescriptor, this ) {
|
||||
@Override
|
||||
protected void doBind(PreparedStatement st, X value, int index, WrapperOptions options) throws SQLException {
|
||||
protected void doBind(PreparedStatement st, X value, int index, WrapperOptions options)
|
||||
throws SQLException {
|
||||
if ( options.useStreamForLobBinding() ) {
|
||||
STREAM_BINDING.getClobBinder( javaTypeDescriptor ).doBind( st, value, index, options );
|
||||
}
|
||||
|
@ -120,7 +121,11 @@ public abstract class ClobTypeDescriptor implements SqlTypeDescriptor {
|
|||
@Override
|
||||
protected void doBind(PreparedStatement st, X value, int index, WrapperOptions options)
|
||||
throws SQLException {
|
||||
final CharacterStream characterStream = javaTypeDescriptor.unwrap( value, CharacterStream.class, options );
|
||||
final CharacterStream characterStream = javaTypeDescriptor.unwrap(
|
||||
value,
|
||||
CharacterStream.class,
|
||||
options
|
||||
);
|
||||
st.setCharacterStream( index, characterStream.asReader(), characterStream.getLength() );
|
||||
}
|
||||
};
|
||||
|
@ -134,7 +139,11 @@ public abstract class ClobTypeDescriptor implements SqlTypeDescriptor {
|
|||
@Override
|
||||
protected void doBind(PreparedStatement st, X value, int index, WrapperOptions options)
|
||||
throws SQLException {
|
||||
final CharacterStream characterStream = javaTypeDescriptor.unwrap( value, CharacterStream.class, options );
|
||||
final CharacterStream characterStream = javaTypeDescriptor.unwrap(
|
||||
value,
|
||||
CharacterStream.class,
|
||||
options
|
||||
);
|
||||
st.setCharacterStream( index, characterStream.asReader(), characterStream.getLength() );
|
||||
}
|
||||
};
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue