HHH-2826 : (HQL) <component> is [not] null
git-svn-id: https://svn.jboss.org/repos/hibernate/core/branches/Branch_3_2@14097 1b8cb986-b30d-0410-93ca-fae66ebed9b2
This commit is contained in:
parent
0fe95705fe
commit
45e29dd47c
|
@ -1005,72 +1005,122 @@ public class StatefulPersistenceContext implements PersistenceContext {
|
|||
}
|
||||
|
||||
/**
|
||||
* Search the persistence context for an owner for the child object,
|
||||
* given a collection role. If <tt>mergeMap</tt> is non-null, also
|
||||
* check the detached graph being merged for a parent.
|
||||
* Search <tt>this</tt> persistence context for an associated entity instance which is considered the "owner" of
|
||||
* the given <tt>childEntity</tt>, and return that owner's id value. This is performed in the scenario of a
|
||||
* uni-directional, non-inverse one-to-many collection (which means that the collection elements do not maintain
|
||||
* a direct reference to the owner).
|
||||
* <p/>
|
||||
* As such, the processing here is basically to loop over every entity currently associated with this persistence
|
||||
* context and for those of the correct entity (sub) type to extract its collection role property value and see
|
||||
* if the child is contained within that collection. If so, we have found the owner; if not, we go on.
|
||||
* <p/>
|
||||
* Also need to account for <tt>mergeMap</tt> which acts as a local copy cache managed for the duration of a merge
|
||||
* operation. It represents a map of the detached entity instances pointing to the corresponding managed instance.
|
||||
*
|
||||
* @param entityName The entity name for the entity type which would own the child
|
||||
* @param propertyName The name of the property on the owning entity type which would name this child association.
|
||||
* @param childEntity The child entity instance for which to locate the owner instance id.
|
||||
* @param mergeMap A map of non-persistent instances from an on-going merge operation (possibly null).
|
||||
*
|
||||
* @return The id of the entityName instance which is said to own the child; null if an appropriate owner not
|
||||
* located.
|
||||
*/
|
||||
public Serializable getOwnerId(String entity, String property, Object childEntity, Map mergeMap) {
|
||||
|
||||
EntityPersister persister = session.getFactory()
|
||||
.getEntityPersister(entity);
|
||||
final CollectionPersister collectionPersister = session.getFactory()
|
||||
.getCollectionPersister(entity + '.' + property);
|
||||
|
||||
public Serializable getOwnerId(String entityName, String propertyName, Object childEntity, Map mergeMap) {
|
||||
final String collectionRole = entityName + '.' + propertyName;
|
||||
final EntityPersister persister = session.getFactory().getEntityPersister( entityName );
|
||||
final CollectionPersister collectionPersister = session.getFactory().getCollectionPersister( collectionRole );
|
||||
|
||||
// iterate all the entities currently associated with the persistence context.
|
||||
Iterator entities = entityEntries.entrySet().iterator();
|
||||
while ( entities.hasNext() ) {
|
||||
Map.Entry me = (Map.Entry) entities.next();
|
||||
EntityEntry ee = (EntityEntry) me.getValue();
|
||||
if ( persister.isSubclassEntityName( ee.getEntityName() ) ) {
|
||||
Object instance = me.getKey();
|
||||
final Map.Entry me = ( Map.Entry ) entities.next();
|
||||
final EntityEntry entityEntry = ( EntityEntry ) me.getValue();
|
||||
// does this entity entry pertain to the entity persister in which we are interested (owner)?
|
||||
if ( persister.isSubclassEntityName( entityEntry.getEntityName() ) ) {
|
||||
final Object entityEntryInstance = me.getKey();
|
||||
|
||||
//check if the managed object is the parent
|
||||
boolean found = isFoundInParent(
|
||||
property,
|
||||
childEntity,
|
||||
persister,
|
||||
boolean found = isFoundInParent(
|
||||
propertyName,
|
||||
childEntity,
|
||||
persister,
|
||||
collectionPersister,
|
||||
instance
|
||||
);
|
||||
entityEntryInstance
|
||||
);
|
||||
|
||||
if (!found && mergeMap!=null) {
|
||||
if ( !found && mergeMap != null ) {
|
||||
//check if the detached object being merged is the parent
|
||||
Object unmergedInstance = mergeMap.get(instance);
|
||||
Object unmergedChild = mergeMap.get(childEntity);
|
||||
if ( unmergedInstance!=null && unmergedChild!=null ) {
|
||||
found = isFoundInParent(
|
||||
property,
|
||||
unmergedChild,
|
||||
persister,
|
||||
Object unmergedInstance = mergeMap.get( entityEntryInstance );
|
||||
Object unmergedChild = mergeMap.get( childEntity );
|
||||
if ( unmergedInstance != null && unmergedChild != null ) {
|
||||
found = isFoundInParent(
|
||||
propertyName,
|
||||
unmergedChild,
|
||||
persister,
|
||||
collectionPersister,
|
||||
unmergedInstance
|
||||
);
|
||||
unmergedInstance
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if ( found ) {
|
||||
return ee.getId();
|
||||
return entityEntry.getId();
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
// if we get here, it is possible that we have a proxy 'in the way' of the merge map resolution...
|
||||
// NOTE: decided to put this here rather than in the above loop as I was nervous about the performance
|
||||
// of the loop-in-loop especially considering this is far more likely the 'edge case'
|
||||
if ( mergeMap != null ) {
|
||||
Iterator mergeMapItr = mergeMap.entrySet().iterator();
|
||||
while ( mergeMapItr.hasNext() ) {
|
||||
final Map.Entry mergeMapEntry = ( Map.Entry ) mergeMapItr.next();
|
||||
if ( mergeMapEntry.getKey() instanceof HibernateProxy ) {
|
||||
final HibernateProxy proxy = ( HibernateProxy ) mergeMapEntry.getKey();
|
||||
if ( persister.isSubclassEntityName( proxy.getHibernateLazyInitializer().getEntityName() ) ) {
|
||||
boolean found = isFoundInParent(
|
||||
propertyName,
|
||||
childEntity,
|
||||
persister,
|
||||
collectionPersister,
|
||||
mergeMap.get( proxy )
|
||||
);
|
||||
if ( !found ) {
|
||||
found = isFoundInParent(
|
||||
propertyName,
|
||||
mergeMap.get( childEntity ),
|
||||
persister,
|
||||
collectionPersister,
|
||||
mergeMap.get( proxy )
|
||||
);
|
||||
}
|
||||
if ( found ) {
|
||||
return proxy.getHibernateLazyInitializer().getIdentifier();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private boolean isFoundInParent(
|
||||
String property,
|
||||
Object childEntity,
|
||||
EntityPersister persister,
|
||||
String property,
|
||||
Object childEntity,
|
||||
EntityPersister persister,
|
||||
CollectionPersister collectionPersister,
|
||||
Object potentialParent
|
||||
) {
|
||||
Object collection = persister.getPropertyValue(
|
||||
potentialParent,
|
||||
property,
|
||||
session.getEntityMode()
|
||||
);
|
||||
return collection!=null && Hibernate.isInitialized(collection) &&
|
||||
collectionPersister.getCollectionType()
|
||||
.contains(collection, childEntity, session);
|
||||
Object potentialParent) {
|
||||
Object collection = persister.getPropertyValue(
|
||||
potentialParent,
|
||||
property,
|
||||
session.getEntityMode()
|
||||
);
|
||||
return collection != null
|
||||
&& Hibernate.isInitialized( collection )
|
||||
&& collectionPersister.getCollectionType().contains( collection, childEntity, session );
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -1,13 +1,18 @@
|
|||
// $Id$
|
||||
package org.hibernate.hql.ast;
|
||||
|
||||
import java.lang.reflect.Constructor;
|
||||
|
||||
import antlr.ASTFactory;
|
||||
import antlr.Token;
|
||||
import antlr.collections.AST;
|
||||
|
||||
import org.hibernate.hql.antlr.HqlSqlTokenTypes;
|
||||
import org.hibernate.hql.ast.tree.AggregateNode;
|
||||
import org.hibernate.hql.ast.tree.BetweenOperatorNode;
|
||||
import org.hibernate.hql.ast.tree.BinaryArithmeticOperatorNode;
|
||||
import org.hibernate.hql.ast.tree.BinaryLogicOperatorNode;
|
||||
import org.hibernate.hql.ast.tree.BooleanLiteralNode;
|
||||
import org.hibernate.hql.ast.tree.Case2Node;
|
||||
import org.hibernate.hql.ast.tree.CaseNode;
|
||||
import org.hibernate.hql.ast.tree.CollectionFunction;
|
||||
|
@ -19,10 +24,14 @@ import org.hibernate.hql.ast.tree.FromClause;
|
|||
import org.hibernate.hql.ast.tree.FromElement;
|
||||
import org.hibernate.hql.ast.tree.IdentNode;
|
||||
import org.hibernate.hql.ast.tree.ImpliedFromElement;
|
||||
import org.hibernate.hql.ast.tree.InLogicOperatorNode;
|
||||
import org.hibernate.hql.ast.tree.IndexNode;
|
||||
import org.hibernate.hql.ast.tree.InitializeableNode;
|
||||
import org.hibernate.hql.ast.tree.InsertStatement;
|
||||
import org.hibernate.hql.ast.tree.IntoClause;
|
||||
import org.hibernate.hql.ast.tree.IsNotNullLogicOperatorNode;
|
||||
import org.hibernate.hql.ast.tree.IsNullLogicOperatorNode;
|
||||
import org.hibernate.hql.ast.tree.JavaConstantNode;
|
||||
import org.hibernate.hql.ast.tree.LiteralNode;
|
||||
import org.hibernate.hql.ast.tree.MethodNode;
|
||||
import org.hibernate.hql.ast.tree.OrderByClause;
|
||||
|
@ -30,18 +39,12 @@ import org.hibernate.hql.ast.tree.ParameterNode;
|
|||
import org.hibernate.hql.ast.tree.QueryNode;
|
||||
import org.hibernate.hql.ast.tree.SelectClause;
|
||||
import org.hibernate.hql.ast.tree.SelectExpressionImpl;
|
||||
import org.hibernate.hql.ast.tree.SessionFactoryAwareNode;
|
||||
import org.hibernate.hql.ast.tree.SqlFragment;
|
||||
import org.hibernate.hql.ast.tree.SqlNode;
|
||||
import org.hibernate.hql.ast.tree.UnaryArithmeticNode;
|
||||
import org.hibernate.hql.ast.tree.UpdateStatement;
|
||||
import org.hibernate.hql.ast.tree.BetweenOperatorNode;
|
||||
import org.hibernate.hql.ast.tree.UnaryLogicOperatorNode;
|
||||
import org.hibernate.hql.ast.tree.InLogicOperatorNode;
|
||||
import org.hibernate.hql.ast.tree.JavaConstantNode;
|
||||
import org.hibernate.hql.ast.tree.SessionFactoryAwareNode;
|
||||
import org.hibernate.hql.ast.tree.BooleanLiteralNode;
|
||||
|
||||
import java.lang.reflect.Constructor;
|
||||
import org.hibernate.hql.ast.tree.UpdateStatement;
|
||||
|
||||
/**
|
||||
* Custom AST factory the intermediate tree that causes ANTLR to create specialized
|
||||
|
@ -157,7 +160,9 @@ public class SqlASTFactory extends ASTFactory implements HqlSqlTokenTypes {
|
|||
case NOT_BETWEEN:
|
||||
return BetweenOperatorNode.class;
|
||||
case IS_NULL:
|
||||
return IsNullLogicOperatorNode.class;
|
||||
case IS_NOT_NULL:
|
||||
return IsNotNullLogicOperatorNode.class;
|
||||
case EXISTS:
|
||||
return UnaryLogicOperatorNode.class;
|
||||
default:
|
||||
|
|
|
@ -0,0 +1,133 @@
|
|||
/*
|
||||
* Copyright (c) 2007, Red Hat Middleware, LLC. All rights reserved.
|
||||
*
|
||||
* This copyrighted material is made available to anyone wishing to use, modify,
|
||||
* copy, or redistribute it subject to the terms and conditions of the GNU
|
||||
* Lesser General Public License, v. 2.1. This program is distributed in the
|
||||
* hope that it will be useful, but WITHOUT A WARRANTY; without even the implied
|
||||
* warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details. You should have received a
|
||||
* copy of the GNU Lesser General Public License, v.2.1 along with this
|
||||
* distribution; if not, write to the Free Software Foundation, Inc.,
|
||||
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
*
|
||||
* Red Hat Author(s): Steve Ebersole
|
||||
*/
|
||||
package org.hibernate.hql.ast.tree;
|
||||
|
||||
import antlr.collections.AST;
|
||||
|
||||
import org.hibernate.type.Type;
|
||||
import org.hibernate.engine.SessionFactoryImplementor;
|
||||
import org.hibernate.hql.antlr.HqlSqlTokenTypes;
|
||||
import org.hibernate.util.StringHelper;
|
||||
import org.hibernate.HibernateException;
|
||||
|
||||
/**
|
||||
* AbstractNullnessCheckNode implementation
|
||||
*
|
||||
* @author Steve Ebersole
|
||||
*/
|
||||
public abstract class AbstractNullnessCheckNode extends UnaryLogicOperatorNode {
|
||||
public void initialize() {
|
||||
// TODO : this really needs to be delayed unitl after we definitively know the operand node type;
|
||||
// where this is currently a problem is parameters for which where we cannot unequivocally
|
||||
// resolve an expected type
|
||||
Type operandType = extractDataType( getOperand() );
|
||||
if ( operandType == null ) {
|
||||
return;
|
||||
}
|
||||
SessionFactoryImplementor sessionFactory = getSessionFactoryHelper().getFactory();
|
||||
int operandColumnSpan = operandType.getColumnSpan( sessionFactory );
|
||||
if ( operandColumnSpan > 1 ) {
|
||||
mutateRowValueConstructorSyntax( operandColumnSpan );
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract int getExpansionConnectorType();
|
||||
protected abstract String getExpansionConnectorText();
|
||||
|
||||
private void mutateRowValueConstructorSyntax(int operandColumnSpan) {
|
||||
final int comparisonType = getType();
|
||||
final String comparisonText = getText();
|
||||
|
||||
final int expansionConnectorType = getExpansionConnectorType();
|
||||
final String expansionConnectorText = getExpansionConnectorText();
|
||||
|
||||
setType( expansionConnectorType );
|
||||
setText( expansionConnectorText );
|
||||
|
||||
String[] mutationTexts = extractMutationTexts( getOperand(), operandColumnSpan );
|
||||
|
||||
AST container = this;
|
||||
for ( int i = operandColumnSpan - 1; i > 0; i-- ) {
|
||||
if ( i == 1 ) {
|
||||
AST op1 = getASTFactory().create( comparisonType, comparisonText );
|
||||
AST operand1 = getASTFactory().create( HqlSqlTokenTypes.SQL_TOKEN, mutationTexts[0] );
|
||||
op1.setFirstChild( operand1 );
|
||||
container.setFirstChild( op1 );
|
||||
AST op2 = getASTFactory().create( comparisonType, comparisonText );
|
||||
AST operand2 = getASTFactory().create( HqlSqlTokenTypes.SQL_TOKEN, mutationTexts[1] );
|
||||
op2.setFirstChild( operand2 );
|
||||
op1.setNextSibling( op2 );
|
||||
}
|
||||
else {
|
||||
AST op = getASTFactory().create( comparisonType, comparisonText );
|
||||
AST operand = getASTFactory().create( HqlSqlTokenTypes.SQL_TOKEN, mutationTexts[i] );
|
||||
op.setFirstChild( operand );
|
||||
AST newContainer = getASTFactory().create( expansionConnectorType, expansionConnectorText );
|
||||
container.setFirstChild( newContainer );
|
||||
newContainer.setNextSibling( op );
|
||||
container = newContainer;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected Type extractDataType(Node operand) {
|
||||
Type type = null;
|
||||
if ( operand instanceof SqlNode ) {
|
||||
type = ( ( SqlNode ) operand ).getDataType();
|
||||
}
|
||||
if ( type == null && operand instanceof ExpectedTypeAwareNode ) {
|
||||
type = ( ( ExpectedTypeAwareNode ) operand ).getExpectedType();
|
||||
}
|
||||
return type;
|
||||
}
|
||||
|
||||
private static String[] extractMutationTexts(Node operand, int count) {
|
||||
if ( operand instanceof ParameterNode ) {
|
||||
String[] rtn = new String[count];
|
||||
for ( int i = 0; i < count; i++ ) {
|
||||
rtn[i] = "?";
|
||||
}
|
||||
return rtn;
|
||||
}
|
||||
else if ( operand.getType() == HqlSqlTokenTypes.VECTOR_EXPR ) {
|
||||
String[] rtn = new String[ operand.getNumberOfChildren() ];
|
||||
int x = 0;
|
||||
AST node = operand.getFirstChild();
|
||||
while ( node != null ) {
|
||||
rtn[ x++ ] = node.getText();
|
||||
node = node.getNextSibling();
|
||||
}
|
||||
return rtn;
|
||||
}
|
||||
else if ( operand instanceof SqlNode ) {
|
||||
String nodeText = operand.getText();
|
||||
if ( nodeText.startsWith( "(" ) ) {
|
||||
nodeText = nodeText.substring( 1 );
|
||||
}
|
||||
if ( nodeText.endsWith( ")" ) ) {
|
||||
nodeText = nodeText.substring( 0, nodeText.length() - 1 );
|
||||
}
|
||||
String[] splits = StringHelper.split( ", ", nodeText );
|
||||
if ( count != splits.length ) {
|
||||
throw new HibernateException( "SqlNode's text did not reference expected number of columns" );
|
||||
}
|
||||
return splits;
|
||||
}
|
||||
else {
|
||||
throw new HibernateException( "dont know how to extract row value elements from node : " + operand );
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,33 @@
|
|||
/*
|
||||
* Copyright (c) 2007, Red Hat Middleware, LLC. All rights reserved.
|
||||
*
|
||||
* This copyrighted material is made available to anyone wishing to use, modify,
|
||||
* copy, or redistribute it subject to the terms and conditions of the GNU
|
||||
* Lesser General Public License, v. 2.1. This program is distributed in the
|
||||
* hope that it will be useful, but WITHOUT A WARRANTY; without even the implied
|
||||
* warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details. You should have received a
|
||||
* copy of the GNU Lesser General Public License, v.2.1 along with this
|
||||
* distribution; if not, write to the Free Software Foundation, Inc.,
|
||||
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
*
|
||||
* Red Hat Author(s): Steve Ebersole
|
||||
*/
|
||||
package org.hibernate.hql.ast.tree;
|
||||
|
||||
import org.hibernate.hql.antlr.HqlSqlTokenTypes;
|
||||
|
||||
/**
|
||||
* IsNotNullLogicOperatorNode implementation
|
||||
*
|
||||
* @author Steve Ebersole
|
||||
*/
|
||||
public class IsNotNullLogicOperatorNode extends AbstractNullnessCheckNode {
|
||||
protected int getExpansionConnectorType() {
|
||||
return HqlSqlTokenTypes.OR;
|
||||
}
|
||||
|
||||
protected String getExpansionConnectorText() {
|
||||
return "OR";
|
||||
}
|
||||
}
|
|
@ -0,0 +1,33 @@
|
|||
/*
|
||||
* Copyright (c) 2007, Red Hat Middleware, LLC. All rights reserved.
|
||||
*
|
||||
* This copyrighted material is made available to anyone wishing to use, modify,
|
||||
* copy, or redistribute it subject to the terms and conditions of the GNU
|
||||
* Lesser General Public License, v. 2.1. This program is distributed in the
|
||||
* hope that it will be useful, but WITHOUT A WARRANTY; without even the implied
|
||||
* warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details. You should have received a
|
||||
* copy of the GNU Lesser General Public License, v.2.1 along with this
|
||||
* distribution; if not, write to the Free Software Foundation, Inc.,
|
||||
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
*
|
||||
* Red Hat Author(s): Steve Ebersole
|
||||
*/
|
||||
package org.hibernate.hql.ast.tree;
|
||||
|
||||
import org.hibernate.hql.antlr.HqlSqlTokenTypes;
|
||||
|
||||
/**
|
||||
* Represents a 'is null' check.
|
||||
*
|
||||
* @author Steve Ebersole
|
||||
*/
|
||||
public class IsNullLogicOperatorNode extends AbstractNullnessCheckNode {
|
||||
protected int getExpansionConnectorType() {
|
||||
return HqlSqlTokenTypes.AND;
|
||||
}
|
||||
|
||||
protected String getExpansionConnectorText() {
|
||||
return "AND";
|
||||
}
|
||||
}
|
|
@ -4,9 +4,11 @@ import org.hibernate.type.Type;
|
|||
import org.hibernate.Hibernate;
|
||||
|
||||
/**
|
||||
* @author <a href="mailto:steve@hibernate.org">Steve Ebersole </a>
|
||||
* Represents a unary operator node.
|
||||
*
|
||||
* @author Steve Ebersole
|
||||
*/
|
||||
public class UnaryLogicOperatorNode extends SqlNode implements UnaryOperatorNode {
|
||||
public class UnaryLogicOperatorNode extends HqlSqlWalkerNode implements UnaryOperatorNode {
|
||||
public Node getOperand() {
|
||||
return ( Node ) getFirstChild();
|
||||
}
|
||||
|
|
|
@ -1,67 +1,112 @@
|
|||
//$Id$
|
||||
/*
|
||||
* Copyright (c) 2007, Red Hat Middleware, LLC. All rights reserved.
|
||||
*
|
||||
* This copyrighted material is made available to anyone wishing to use, modify,
|
||||
* copy, or redistribute it subject to the terms and conditions of the GNU
|
||||
* Lesser General Public License, v. 2.1. This program is distributed in the
|
||||
* hope that it will be useful, but WITHOUT A WARRANTY; without even the implied
|
||||
* warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Lesser General Public License for more details. You should have received a
|
||||
* copy of the GNU Lesser General Public License, v.2.1 along with this
|
||||
* distribution; if not, write to the Free Software Foundation, Inc.,
|
||||
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
*
|
||||
* Red Hat Author(s): Gavin King, Steve Ebersole
|
||||
*/
|
||||
package org.hibernate.property;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.Map;
|
||||
import java.io.Serializable;
|
||||
|
||||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.engine.SessionImplementor;
|
||||
import org.hibernate.engine.SessionFactoryImplementor;
|
||||
import org.hibernate.engine.SessionImplementor;
|
||||
|
||||
/**
|
||||
* Represents a "back-reference" to the id of a collection owner.
|
||||
* Represents a "back-reference" to the id of a collection owner. A "back-reference" is pertinent in mapping scenarios
|
||||
* where we have a uni-directional one-to-many association in which only the many side is mapped. In this case it is
|
||||
* the collection itself which is responsible for the FK value.
|
||||
* <p/>
|
||||
* In this scenario, the one side has no inherent knowledge of its "owner". So we introduce a synthetic property into
|
||||
* the one side to represent the association; a so-called back-reference.
|
||||
*
|
||||
* @author Gavin King
|
||||
* @author Steve Ebersole
|
||||
*/
|
||||
public class BackrefPropertyAccessor implements PropertyAccessor {
|
||||
|
||||
private final String propertyName;
|
||||
private final String entityName;
|
||||
|
||||
// cache these since they are stateless
|
||||
private final BackrefSetter setter; // this one could even be static...
|
||||
private final BackrefGetter getter;
|
||||
|
||||
/**
|
||||
* A placeholder for a property value, indicating that
|
||||
* we don't know the value of the back reference
|
||||
*/
|
||||
public static final Serializable UNKNOWN = new Serializable() {
|
||||
public String toString() { return "<unknown>"; }
|
||||
public String toString() {
|
||||
return "<unknown>";
|
||||
}
|
||||
|
||||
public Object readResolve() {
|
||||
return UNKNOWN;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Constructs a new instance of BackrefPropertyAccessor.
|
||||
*
|
||||
* @param collectionRole The collection role which this back ref references.
|
||||
* @param entityName The owner's entity name.
|
||||
*/
|
||||
public BackrefPropertyAccessor(String collectionRole, String entityName) {
|
||||
this.propertyName = collectionRole.substring( entityName.length() + 1 );
|
||||
this.entityName = entityName;
|
||||
|
||||
this.setter = new BackrefSetter();
|
||||
this.getter = new BackrefGetter();
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public Setter getSetter(Class theClass, String propertyName) {
|
||||
return new BackrefSetter();
|
||||
return setter;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public Getter getGetter(Class theClass, String propertyName) {
|
||||
return new BackrefGetter();
|
||||
return getter;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* The Setter implementation for id backrefs.
|
||||
* Internal implementation of a property setter specific to these back-ref properties.
|
||||
*/
|
||||
public static final class BackrefSetter implements Setter {
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public Method getMethod() {
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public String getMethodName() {
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public void set(Object target, Object value, SessionFactoryImplementor factory) {
|
||||
// this page intentionally left blank :)
|
||||
}
|
||||
|
@ -70,33 +115,46 @@ public class BackrefPropertyAccessor implements PropertyAccessor {
|
|||
|
||||
|
||||
/**
|
||||
* The Getter implementation for id backrefs.
|
||||
* Internal implementation of a property getter specific to these back-ref properties.
|
||||
*/
|
||||
public class BackrefGetter implements Getter {
|
||||
|
||||
public Object getForInsert(Object target, Map mergeMap, SessionImplementor session)
|
||||
throws HibernateException {
|
||||
if (session==null) {
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public Object getForInsert(Object target, Map mergeMap, SessionImplementor session) {
|
||||
if ( session == null ) {
|
||||
return UNKNOWN;
|
||||
}
|
||||
else {
|
||||
return session.getPersistenceContext()
|
||||
.getOwnerId( entityName, propertyName, target, mergeMap );
|
||||
return session.getPersistenceContext().getOwnerId( entityName, propertyName, target, mergeMap );
|
||||
}
|
||||
}
|
||||
|
||||
public Object get(Object target) {
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public Object get(Object target) {
|
||||
return UNKNOWN;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public Method getMethod() {
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public String getMethodName() {
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public Class getReturnType() {
|
||||
return Object.class;
|
||||
}
|
||||
|
|
|
@ -101,6 +101,41 @@ public class ASTParserLoadingTest extends FunctionalTestCase {
|
|||
return new FunctionalTestClassTestSuite( ASTParserLoadingTest.class );
|
||||
}
|
||||
|
||||
public void testComponentNullnessChecks() {
|
||||
Session s = openSession();
|
||||
s.beginTransaction();
|
||||
Human h = new Human();
|
||||
h.setName( new Name( "Johnny", 'B', "Goode" ) );
|
||||
s.save( h );
|
||||
h = new Human();
|
||||
h.setName( new Name( "Steve", null, "Ebersole" ) );
|
||||
s.save( h );
|
||||
h = new Human();
|
||||
h.setName( new Name( "Bono", null, null ) );
|
||||
s.save( h );
|
||||
h = new Human();
|
||||
h.setName( new Name( null, null, null ) );
|
||||
s.save( h );
|
||||
s.getTransaction().commit();
|
||||
s.close();
|
||||
|
||||
s = openSession();
|
||||
s.beginTransaction();
|
||||
List results = s.createQuery( "from Human where name is null" ).list();
|
||||
assertEquals( 1, results.size() );
|
||||
results = s.createQuery( "from Human where name is not null" ).list();
|
||||
assertEquals( 3, results.size() );
|
||||
s.createQuery( "from Human where ? is null" ).setParameter( 0, null ).list();
|
||||
s.getTransaction().commit();
|
||||
s.close();
|
||||
|
||||
s = openSession();
|
||||
s.beginTransaction();
|
||||
s.createQuery( "delete Human" ).executeUpdate();
|
||||
s.getTransaction().commit();
|
||||
s.close();
|
||||
}
|
||||
|
||||
public void testInvalidCollectionDereferencesFail() {
|
||||
Session s = openSession();
|
||||
s.beginTransaction();
|
||||
|
|
|
@ -18,7 +18,7 @@ import org.hibernate.junit.functional.FunctionalTestClassTestSuite;
|
|||
* @author Gavin King
|
||||
*/
|
||||
public class MutableNaturalIdTest extends FunctionalTestCase {
|
||||
|
||||
|
||||
public MutableNaturalIdTest(String str) {
|
||||
super(str);
|
||||
}
|
||||
|
@ -75,78 +75,78 @@ public class MutableNaturalIdTest extends FunctionalTestCase {
|
|||
s.getTransaction().commit();
|
||||
s.close();
|
||||
}
|
||||
|
||||
|
||||
public void testNonexistentNaturalIdCache() {
|
||||
getSessions().getStatistics().clear();
|
||||
|
||||
Session s = openSession();
|
||||
Transaction t = s.beginTransaction();
|
||||
|
||||
|
||||
Object nullUser = s.createCriteria(User.class)
|
||||
.add( Restrictions.naturalId()
|
||||
.set("name", "gavin")
|
||||
.set("org", "hb")
|
||||
.set("org", "hb")
|
||||
)
|
||||
.setCacheable(true)
|
||||
.uniqueResult();
|
||||
|
||||
|
||||
assertNull(nullUser);
|
||||
|
||||
|
||||
t.commit();
|
||||
s.close();
|
||||
|
||||
|
||||
assertEquals( getSessions().getStatistics().getQueryExecutionCount(), 1 );
|
||||
assertEquals( getSessions().getStatistics().getQueryCacheHitCount(), 0 );
|
||||
assertEquals( getSessions().getStatistics().getQueryCachePutCount(), 0 );
|
||||
|
||||
|
||||
s = openSession();
|
||||
t = s.beginTransaction();
|
||||
|
||||
|
||||
User u = new User("gavin", "hb", "secret");
|
||||
s.persist(u);
|
||||
|
||||
|
||||
t.commit();
|
||||
s.close();
|
||||
|
||||
|
||||
getSessions().getStatistics().clear();
|
||||
|
||||
s = openSession();
|
||||
t = s.beginTransaction();
|
||||
|
||||
|
||||
u = (User) s.createCriteria(User.class)
|
||||
.add( Restrictions.naturalId()
|
||||
.set("name", "gavin")
|
||||
.set("org", "hb")
|
||||
.set("org", "hb")
|
||||
)
|
||||
.setCacheable(true)
|
||||
.uniqueResult();
|
||||
|
||||
|
||||
assertNotNull(u);
|
||||
|
||||
|
||||
t.commit();
|
||||
s.close();
|
||||
|
||||
assertEquals( getSessions().getStatistics().getQueryExecutionCount(), 1 );
|
||||
assertEquals( getSessions().getStatistics().getQueryCacheHitCount(), 0 );
|
||||
assertEquals( getSessions().getStatistics().getQueryCachePutCount(), 1 );
|
||||
|
||||
|
||||
getSessions().getStatistics().clear();
|
||||
|
||||
s = openSession();
|
||||
t = s.beginTransaction();
|
||||
|
||||
|
||||
u = (User) s.createCriteria(User.class)
|
||||
.add( Restrictions.naturalId()
|
||||
.set("name", "gavin")
|
||||
.set("org", "hb")
|
||||
.set("org", "hb")
|
||||
).setCacheable(true)
|
||||
.uniqueResult();
|
||||
|
||||
|
||||
s.delete(u);
|
||||
|
||||
|
||||
t.commit();
|
||||
s.close();
|
||||
|
||||
|
||||
assertEquals( getSessions().getStatistics().getQueryExecutionCount(), 0 );
|
||||
assertEquals( getSessions().getStatistics().getQueryCacheHitCount(), 1 );
|
||||
|
||||
|
@ -154,94 +154,86 @@ public class MutableNaturalIdTest extends FunctionalTestCase {
|
|||
|
||||
s = openSession();
|
||||
t = s.beginTransaction();
|
||||
|
||||
|
||||
nullUser = s.createCriteria(User.class)
|
||||
.add( Restrictions.naturalId()
|
||||
.set("name", "gavin")
|
||||
.set("org", "hb")
|
||||
.set("org", "hb")
|
||||
)
|
||||
.setCacheable(true)
|
||||
.uniqueResult();
|
||||
|
||||
|
||||
assertNull(nullUser);
|
||||
|
||||
|
||||
t.commit();
|
||||
s.close();
|
||||
|
||||
|
||||
assertEquals( getSessions().getStatistics().getQueryExecutionCount(), 1 );
|
||||
assertEquals( getSessions().getStatistics().getQueryCacheHitCount(), 0 );
|
||||
assertEquals( getSessions().getStatistics().getQueryCachePutCount(), 0 );
|
||||
|
||||
|
||||
}
|
||||
|
||||
public void testNaturalIdCache() {
|
||||
Session s = openSession();
|
||||
Transaction t = s.beginTransaction();
|
||||
|
||||
User u = new User("gavin", "hb", "secret");
|
||||
s.persist(u);
|
||||
|
||||
User u = new User( "gavin", "hb", "secret" );
|
||||
s.persist( u );
|
||||
t.commit();
|
||||
s.close();
|
||||
|
||||
|
||||
getSessions().getStatistics().clear();
|
||||
|
||||
s = openSession();
|
||||
t = s.beginTransaction();
|
||||
|
||||
u = (User) s.createCriteria(User.class)
|
||||
.add( Restrictions.naturalId()
|
||||
.set("name", "gavin")
|
||||
.set("org", "hb")
|
||||
)
|
||||
.setCacheable(true)
|
||||
.uniqueResult();
|
||||
|
||||
assertNotNull(u);
|
||||
|
||||
u = ( User ) s.createCriteria( User.class )
|
||||
.add( Restrictions.naturalId()
|
||||
.set( "name", "gavin" )
|
||||
.set( "org", "hb" )
|
||||
)
|
||||
.setCacheable( true )
|
||||
.uniqueResult();
|
||||
assertNotNull( u );
|
||||
t.commit();
|
||||
s.close();
|
||||
|
||||
assertEquals( getSessions().getStatistics().getQueryExecutionCount(), 1 );
|
||||
assertEquals( getSessions().getStatistics().getQueryCacheHitCount(), 0 );
|
||||
assertEquals( getSessions().getStatistics().getQueryCachePutCount(), 1 );
|
||||
|
||||
|
||||
s = openSession();
|
||||
t = s.beginTransaction();
|
||||
|
||||
User v = new User("xam", "hb", "foobar");
|
||||
s.persist(v);
|
||||
|
||||
t.commit();
|
||||
s.close();
|
||||
|
||||
|
||||
getSessions().getStatistics().clear();
|
||||
|
||||
s = openSession();
|
||||
t = s.beginTransaction();
|
||||
|
||||
u = (User) s.createCriteria( User.class)
|
||||
.add( Restrictions.naturalId()
|
||||
.set("name", "gavin")
|
||||
.set("org", "hb")
|
||||
).setCacheable(true)
|
||||
.uniqueResult();
|
||||
|
||||
u = ( User ) s.createCriteria( User.class )
|
||||
.add( Restrictions.naturalId()
|
||||
.set("name", "gavin")
|
||||
.set("org", "hb")
|
||||
)
|
||||
.setCacheable( true )
|
||||
.uniqueResult();
|
||||
assertNotNull(u);
|
||||
assertEquals( getSessions().getStatistics().getQueryExecutionCount(), 1 );
|
||||
assertEquals( getSessions().getStatistics().getQueryCacheHitCount(), 0 );
|
||||
|
||||
u = (User) s.createCriteria( User.class)
|
||||
.add( Restrictions.naturalId()
|
||||
.set("name", "gavin")
|
||||
.set("org", "hb")
|
||||
).setCacheable(true)
|
||||
.uniqueResult();
|
||||
|
||||
u = ( User ) s.createCriteria( User.class )
|
||||
.add( Restrictions.naturalId()
|
||||
.set("name", "gavin")
|
||||
.set("org", "hb")
|
||||
)
|
||||
.setCacheable( true )
|
||||
.uniqueResult();
|
||||
assertNotNull(u);
|
||||
assertEquals( getSessions().getStatistics().getQueryExecutionCount(), 1 );
|
||||
assertEquals( getSessions().getStatistics().getQueryCacheHitCount(), 1 );
|
||||
|
||||
|
||||
t.commit();
|
||||
s.close();
|
||||
|
||||
|
|
|
@ -1,27 +1,27 @@
|
|||
<?xml version="1.0"?>
|
||||
<!DOCTYPE hibernate-mapping PUBLIC
|
||||
<!DOCTYPE hibernate-mapping PUBLIC
|
||||
"-//Hibernate/Hibernate Mapping DTD 3.0//EN"
|
||||
"http://hibernate.sourceforge.net/hibernate-mapping-3.0.dtd">
|
||||
|
||||
<!--
|
||||
|
||||
This mapping illustrates use of <natural-id mutable="true"/>
|
||||
|
||||
|
||||
-->
|
||||
|
||||
<hibernate-mapping
|
||||
<hibernate-mapping
|
||||
package="org.hibernate.test.naturalid"
|
||||
default-access="field">
|
||||
|
||||
|
||||
<class name="org.hibernate.test.naturalid.mutable.User" table="SystemUserInfo">
|
||||
<id name="id">
|
||||
<generator class="increment"/>
|
||||
</id>
|
||||
<natural-id>
|
||||
<natural-id mutable="true">
|
||||
<property name="name"/>
|
||||
<property name="org"/>
|
||||
</natural-id>
|
||||
<property name="password"/>
|
||||
</class>
|
||||
|
||||
|
||||
</hibernate-mapping>
|
|
@ -3,6 +3,7 @@ package org.hibernate.test.unidir;
|
|||
|
||||
import junit.framework.Test;
|
||||
|
||||
import org.hibernate.Hibernate;
|
||||
import org.hibernate.Session;
|
||||
import org.hibernate.Transaction;
|
||||
import org.hibernate.junit.functional.FunctionalTestCase;
|
||||
|
@ -84,5 +85,37 @@ public class BackrefTest extends FunctionalTestCase {
|
|||
t.commit();
|
||||
s.close();
|
||||
}
|
||||
|
||||
public void testBackRefToProxiedEntityOnMerge() {
|
||||
Session s = openSession();
|
||||
s.beginTransaction();
|
||||
Parent me = new Parent( "Steve" );
|
||||
me.getChildren().add( new Child( "Joe" ) );
|
||||
s.persist( me );
|
||||
s.getTransaction().commit();
|
||||
s.close();
|
||||
|
||||
// while detached, add a new element
|
||||
me.getChildren().add( new Child( "Cece" ) );
|
||||
me.getChildren().add( new Child( "Austin" ) );
|
||||
|
||||
s = openSession();
|
||||
s.beginTransaction();
|
||||
// load 'me' to associate it with the new session as a proxy (this may have occurred as 'prior work'
|
||||
// to the reattachment below)...
|
||||
Object meProxy = s.load( Parent.class, me.getName() );
|
||||
assertFalse( Hibernate.isInitialized( meProxy ) );
|
||||
// now, do the reattchment...
|
||||
s.merge( me );
|
||||
s.getTransaction().commit();
|
||||
s.close();
|
||||
|
||||
s = openSession();
|
||||
s.beginTransaction();
|
||||
s.createQuery( "delete from Child" ).executeUpdate();
|
||||
s.createQuery( "delete from Parent" ).executeUpdate();
|
||||
s.getTransaction().commit();
|
||||
s.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -8,19 +8,31 @@ package org.hibernate.test.unidir;
|
|||
public class Child {
|
||||
private String name;
|
||||
private int age;
|
||||
Child() {}
|
||||
public Child(String name) {
|
||||
this.name = name;
|
||||
|
||||
Child() {
|
||||
}
|
||||
|
||||
public Child(String name) {
|
||||
this( name, 0 );
|
||||
}
|
||||
|
||||
public Child(String name, int age) {
|
||||
this.name = name;
|
||||
this.age = age;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public int getAge() {
|
||||
return age;
|
||||
}
|
||||
|
||||
public void setAge(int age) {
|
||||
this.age = age;
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue