diff --git a/hibernate-core/src/main/java/org/hibernate/engine/spi/ActionQueue.java b/hibernate-core/src/main/java/org/hibernate/engine/spi/ActionQueue.java index d41518b709..3d00b68c95 100644 --- a/hibernate-core/src/main/java/org/hibernate/engine/spi/ActionQueue.java +++ b/hibernate-core/src/main/java/org/hibernate/engine/spi/ActionQueue.java @@ -11,13 +11,13 @@ import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import java.util.ArrayList; -import java.util.HashMap; +import java.util.BitSet; import java.util.HashSet; +import java.util.IdentityHashMap; +import java.util.Iterator; import java.util.LinkedHashMap; -import java.util.LinkedList; import java.util.List; import java.util.Map; -import java.util.Objects; import java.util.Queue; import java.util.Set; import java.util.concurrent.ConcurrentLinkedQueue; @@ -45,7 +45,7 @@ import org.hibernate.cache.CacheException; import org.hibernate.engine.internal.NonNullableTransientDependencies; import org.hibernate.internal.CoreLogging; import org.hibernate.internal.CoreMessageLogger; -import org.hibernate.metadata.ClassMetadata; +import org.hibernate.persister.collection.CollectionPersister; import org.hibernate.proxy.HibernateProxy; import org.hibernate.proxy.LazyInitializer; import org.hibernate.type.CollectionType; @@ -128,7 +128,7 @@ public class ActionQueue { ExecutableList init(ActionQueue instance) { if ( instance.isOrderInsertsEnabled() ) { return instance.insertions = new ExecutableList( - new InsertActionSorter() + InsertActionSorter.INSTANCE ); } else { @@ -1022,11 +1022,13 @@ public class ActionQueue { * directionality of foreign-keys. So even though we will be changing the ordering here, we need to make absolutely * certain that we do not circumvent this FK ordering to the extent of causing constraint violations. *

- * Sorts the insert actions using more hashes. + * The algorithm first discovers the transitive incoming dependencies for every insert action + * and groups all inserts by the entity name. + * Finally, it schedules these groups one by one, as long as all the dependencies of the groups are fulfilled. *

- * NOTE: this class is not thread-safe. - * - * @author Jay Erb + * The implementation will only produce an optimal insert order for the insert groups that can be perfectly scheduled serially. + * Scheduling serially means, that there is an order which doesn't violate the FK constraint dependencies. + * The inserts of insert groups which can't be scheduled, are going to be inserted in the original order. */ private static class InsertActionSorter implements ExecutableList.Sorter { /** @@ -1034,106 +1036,140 @@ public class ActionQueue { */ public static final InsertActionSorter INSTANCE = new InsertActionSorter(); - private static class BatchIdentifier { + private static class InsertInfo { + private final AbstractEntityInsertAction insertAction; + // Inserts in this set must be executed before this insert + private Set transitiveIncomingDependencies; + // Child dependencies of i.e. one-to-many or inverse one-to-one + // It's necessary to have this for unidirectional associations, to propagate incoming dependencies + private Set outgoingDependencies; + // The current index of the insert info within an insert schedule + private int index; - private final String entityName; - private final String rootEntityName; - - private Set parentEntityNames = new HashSet<>( ); - - private Set childEntityNames = new HashSet<>( ); - - private BatchIdentifier parent; - - BatchIdentifier(String entityName, String rootEntityName) { - this.entityName = entityName; - this.rootEntityName = rootEntityName; + public InsertInfo(AbstractEntityInsertAction insertAction, int index) { + this.insertAction = insertAction; + this.index = index; } - public BatchIdentifier getParent() { - return parent; + public void buildDirectDependencies(IdentityHashMap insertInfosByEntity) { + final Object[] propertyValues = insertAction.getState(); + final Type[] propertyTypes = insertAction.getPersister().getPropertyTypes(); + for (int i = 0, propertyTypesLength = propertyTypes.length; i < propertyTypesLength; i++) { + addDirectDependency(propertyTypes[i], propertyValues[i], insertInfosByEntity); + } } - public void setParent(BatchIdentifier parent) { - this.parent = parent; + public void propagateChildDependencies() { + if ( outgoingDependencies != null ) { + for (InsertInfo childDependency : outgoingDependencies) { + if (childDependency.transitiveIncomingDependencies == null) { + childDependency.transitiveIncomingDependencies = new HashSet<>(); + } + childDependency.transitiveIncomingDependencies.add(this); + } + } + } + + public void buildTransitiveDependencies(Set visited) { + if (transitiveIncomingDependencies != null) { + visited.addAll(transitiveIncomingDependencies); + for (InsertInfo insertInfo : transitiveIncomingDependencies.toArray(new InsertInfo[0])) { + insertInfo.addTransitiveDependencies(this, visited); + } + visited.clear(); + } + } + + public void addTransitiveDependencies(InsertInfo origin, Set visited) { + if (transitiveIncomingDependencies != null) { + for (InsertInfo insertInfo : transitiveIncomingDependencies) { + if (visited.add(insertInfo)) { + origin.transitiveIncomingDependencies.add(insertInfo); + insertInfo.addTransitiveDependencies(origin, visited); + } + } + } + } + + private void addDirectDependency(Type type, Object value, IdentityHashMap insertInfosByEntity) { + if ( type.isEntityType() && value != null ) { + final EntityType entityType = (EntityType) type; + final InsertInfo insertInfo = insertInfosByEntity.get(value); + if (insertInfo != null) { + if (entityType.isOneToOne() && OneToOneType.class.cast(entityType).getForeignKeyDirection() == ForeignKeyDirection.TO_PARENT) { + if (!entityType.isReferenceToPrimaryKey()) { + if (outgoingDependencies == null) { + outgoingDependencies = new HashSet<>(); + } + outgoingDependencies.add(insertInfo); + } + } + else { + if (transitiveIncomingDependencies == null) { + transitiveIncomingDependencies = new HashSet<>(); + } + transitiveIncomingDependencies.add(insertInfo); + } + } + } + else if ( type.isCollectionType() && value != null ) { + CollectionType collectionType = (CollectionType) type; + final CollectionPersister collectionPersister = insertAction.getSession().getFactory().getMetamodel().collectionPersister(collectionType.getRole()); + // We only care about mappedBy one-to-many associations, because for these, the elements depend on the collection owner + if ( collectionPersister.isOneToMany() && collectionPersister.getElementType().isEntityType() ) { + final Iterator elementsIterator = collectionType.getElementsIterator(value, insertAction.getSession()); + while ( elementsIterator.hasNext() ) { + final Object element = elementsIterator.next(); + final InsertInfo insertInfo = insertInfosByEntity.get(element); + if (insertInfo != null) { + if (outgoingDependencies == null) { + outgoingDependencies = new HashSet<>(); + } + outgoingDependencies.add(insertInfo); + } + } + } + } + else if ( type.isComponentType() && value != null ) { + // Support recursive checks of composite type properties for associations and collections. + CompositeType compositeType = (CompositeType) type; + final SharedSessionContractImplementor session = insertAction.getSession(); + Object[] componentValues = compositeType.getPropertyValues( value, session ); + for ( int j = 0; j < componentValues.length; ++j ) { + Type componentValueType = compositeType.getSubtypes()[j]; + Object componentValue = componentValues[j]; + addDirectDependency( componentValueType, componentValue, insertInfosByEntity); + } + } } @Override public boolean equals(Object o) { - if ( this == o ) { + if (this == o) { return true; } - if ( !( o instanceof BatchIdentifier ) ) { + if (o == null || getClass() != o.getClass()) { return false; } - BatchIdentifier that = (BatchIdentifier) o; - return Objects.equals( entityName, that.entityName ); + + InsertInfo that = (InsertInfo) o; + + return insertAction.equals(that.insertAction); } @Override public int hashCode() { - return Objects.hash( entityName ); + return insertAction.hashCode(); } - String getEntityName() { - return entityName; - } - - String getRootEntityName() { - return rootEntityName; - } - - Set getParentEntityNames() { - return parentEntityNames; - } - - Set getChildEntityNames() { - return childEntityNames; - } - - boolean hasAnyParentEntityNames(BatchIdentifier batchIdentifier) { - return parentEntityNames.contains( batchIdentifier.getEntityName() ) || - parentEntityNames.contains( batchIdentifier.getRootEntityName() ); - } - - boolean hasAnyChildEntityNames(BatchIdentifier batchIdentifier) { - return childEntityNames.contains( batchIdentifier.getEntityName() ); - } - - /** - * Check if this {@link BatchIdentifier} has a parent or grand parent - * matching the given {@link BatchIdentifier} reference. - * - * @param batchIdentifier {@link BatchIdentifier} reference - * - * @return This {@link BatchIdentifier} has a parent matching the given {@link BatchIdentifier} reference - */ - boolean hasParent(BatchIdentifier batchIdentifier) { - return ( - parent == batchIdentifier - || ( parentEntityNames.contains( batchIdentifier.getEntityName() ) ) - || parent != null && parent.hasParent( batchIdentifier, new ArrayList<>() ) - ); - } - - private boolean hasParent(BatchIdentifier batchIdentifier, List stack) { - if ( !stack.contains( this ) && parent != null ) { - stack.add( this ); - return parent.hasParent( batchIdentifier, stack ); - } - return ( - parent == batchIdentifier - || parentEntityNames.contains( batchIdentifier.getEntityName() ) - ); + @Override + public String toString() { + return "InsertInfo{" + + "insertAction=" + insertAction + + '}'; } } - // the mapping of entity names to their latest batch numbers. - private List latestBatches; - - // the map of batch numbers to EntityInsertAction lists - private Map> actionBatches; - public InsertActionSorter() { } @@ -1141,181 +1177,144 @@ public class ActionQueue { * Sort the insert actions. */ public void sort(List insertions) { - // optimize the hash size to eliminate a rehash. - this.latestBatches = new ArrayList<>( ); - this.actionBatches = new HashMap<>(); + final int insertInfoCount = insertions.size(); + // Build up dependency metadata for insert actions + final InsertInfo[] insertInfos = new InsertInfo[insertInfoCount]; + // A map of all insert infos keyed by the entity instance + // This is needed to discover insert infos for direct dependencies + final IdentityHashMap insertInfosByEntity = new IdentityHashMap<>( insertInfos.length ); + // Construct insert infos and build a map for that, keyed by entity instance + for (int i = 0; i < insertInfoCount; i++) { + final AbstractEntityInsertAction insertAction = insertions.get(i); + final InsertInfo insertInfo = new InsertInfo(insertAction, i); + insertInfosByEntity.put(insertAction.getInstance(), insertInfo); + insertInfos[i] = insertInfo; + } + // First we must discover the direct dependencies + for (int i = 0; i < insertInfoCount; i++) { + insertInfos[i].buildDirectDependencies(insertInfosByEntity); + } + // Then we can propagate child dependencies to the insert infos incoming dependencies + for (int i = 0; i < insertInfoCount; i++) { + insertInfos[i].propagateChildDependencies(); + } + // Finally, we add all the transitive incoming dependencies + // and then group insert infos into EntityInsertGroup keyed by entity name + final Set visited = new HashSet<>(); + final Map insertInfosByEntityName = new LinkedHashMap<>(); + for (int i = 0; i < insertInfoCount; i++) { + final InsertInfo insertInfo = insertInfos[i]; + insertInfo.buildTransitiveDependencies( visited ); - for ( AbstractEntityInsertAction action : insertions ) { - BatchIdentifier batchIdentifier = new BatchIdentifier( - action.getEntityName(), - action.getSession() - .getFactory() - .getMetamodel() - .entityPersister( action.getEntityName() ) - .getRootEntityName() - ); - - // the entity associated with the current action. - Object currentEntity = action.getInstance(); - int index = latestBatches.indexOf( batchIdentifier ); - - if ( index != -1 ) { - batchIdentifier = latestBatches.get( index ); + final String entityName = insertInfo.insertAction.getPersister().getEntityName(); + EntityInsertGroup entityInsertGroup = insertInfosByEntityName.get(entityName); + if (entityInsertGroup == null) { + insertInfosByEntityName.put(entityName, entityInsertGroup = new EntityInsertGroup(entityName)); } - else { - latestBatches.add( batchIdentifier ); + entityInsertGroup.add(insertInfo); + } + // Now we can go through the EntityInsertGroups and schedule all the ones + // for which we have already scheduled all the dependentEntityNames + final Set scheduledEntityNames = new HashSet<>(insertInfosByEntityName.size()); + int schedulePosition = 0; + int lastScheduleSize; + do { + lastScheduleSize = scheduledEntityNames.size(); + final Iterator iterator = insertInfosByEntityName.values().iterator(); + while (iterator.hasNext()) { + final EntityInsertGroup insertGroup = iterator.next(); + if (scheduledEntityNames.containsAll(insertGroup.dependentEntityNames)) { + schedulePosition = schedule(insertInfos, insertGroup.insertInfos, schedulePosition); + scheduledEntityNames.add(insertGroup.entityName); + iterator.remove(); + } } - addParentChildEntityNames( action, batchIdentifier ); - addToBatch( batchIdentifier, action ); + // we try to schedule entity groups over and over again, until we can't schedule any further + } while (lastScheduleSize != scheduledEntityNames.size()); + if ( !insertInfosByEntityName.isEmpty() ) { + LOG.warn("The batch containing " + insertions.size() + " statements could not be sorted. " + + "This might indicate a circular entity relationship."); } insertions.clear(); - - // Examine each entry in the batch list, and build the dependency graph. - for ( int i = 0; i < latestBatches.size(); i++ ) { - BatchIdentifier batchIdentifier = latestBatches.get( i ); - - for ( int j = i - 1; j >= 0; j-- ) { - BatchIdentifier prevBatchIdentifier = latestBatches.get( j ); - if ( prevBatchIdentifier.hasAnyParentEntityNames( batchIdentifier ) ) { - prevBatchIdentifier.parent = batchIdentifier; - } - if ( batchIdentifier.hasAnyChildEntityNames( prevBatchIdentifier ) ) { - prevBatchIdentifier.parent = batchIdentifier; - } - } - - for ( int j = i + 1; j < latestBatches.size(); j++ ) { - BatchIdentifier nextBatchIdentifier = latestBatches.get( j ); - - if ( nextBatchIdentifier.hasAnyParentEntityNames( batchIdentifier ) ) { - nextBatchIdentifier.parent = batchIdentifier; - } - if ( batchIdentifier.hasAnyChildEntityNames( nextBatchIdentifier ) ) { - nextBatchIdentifier.parent = batchIdentifier; - } - } - } - - boolean sorted = false; - - long maxIterations = latestBatches.size() * latestBatches.size(); - long iterations = 0; - - sort: - do { - // Examine each entry in the batch list, sorting them based on parent/child association - // as depicted by the dependency graph. - iterations++; - - for ( int i = 0; i < latestBatches.size(); i++ ) { - BatchIdentifier batchIdentifier = latestBatches.get( i ); - - // Iterate next batches and make sure that children types are after parents. - // Since the outer loop looks at each batch entry individually and the prior loop will reorder - // entries as well, we need to look and verify if the current batch is a child of the next - // batch or if the current batch is seen as a parent or child of the next batch. - for ( int j = i + 1; j < latestBatches.size(); j++ ) { - BatchIdentifier nextBatchIdentifier = latestBatches.get( j ); - - if ( batchIdentifier.hasParent( nextBatchIdentifier ) && !nextBatchIdentifier.hasParent( batchIdentifier ) ) { - latestBatches.remove( batchIdentifier ); - latestBatches.add( j, batchIdentifier ); - - continue sort; - } - } - } - sorted = true; - } - while ( !sorted && iterations <= maxIterations); - - if ( iterations > maxIterations ) { - LOG.warn( "The batch containing " + latestBatches.size() + " statements could not be sorted after " + maxIterations + " iterations. " + - "This might indicate a circular entity relationship." ); - } - - // Now, rebuild the insertions list. There is a batch for each entry in the name list. - for ( BatchIdentifier rootIdentifier : latestBatches ) { - List batch = actionBatches.get( rootIdentifier ); - insertions.addAll( batch ); + for (InsertInfo insertInfo : insertInfos) { + insertions.add(insertInfo.insertAction); } } - /** - * Add parent and child entity names so that we know how to rearrange dependencies - * - * @param action The action being sorted - * @param batchIdentifier The batch identifier of the entity affected by the action - */ - private void addParentChildEntityNames(AbstractEntityInsertAction action, BatchIdentifier batchIdentifier) { - Object[] propertyValues = action.getState(); - ClassMetadata classMetadata = action.getPersister().getClassMetadata(); - if ( classMetadata != null ) { - Type[] propertyTypes = classMetadata.getPropertyTypes(); - - for ( int i = 0; i < propertyValues.length; i++ ) { - Object value = propertyValues[i]; - Type type = propertyTypes[i]; - addParentChildEntityNameByPropertyAndValue( action, batchIdentifier, type, value ); + private int schedule(InsertInfo[] insertInfos, List insertInfosToSchedule, int schedulePosition) { + final InsertInfo[] newInsertInfos = new InsertInfo[insertInfos.length]; + // The bitset is there to quickly query if an index is already scheduled + final BitSet bitSet = new BitSet(insertInfos.length); + // Remember the smallest index of the insertInfosToSchedule to check if we actually need to reorder anything + int smallestScheduledIndex = -1; + // The biggestScheduledIndex is needed as upper bound for shifting elements that were replaced by insertInfosToSchedule + int biggestScheduledIndex = -1; + for (int i = 0; i < insertInfosToSchedule.size(); i++) { + final int index = insertInfosToSchedule.get(i).index; + bitSet.set(index); + smallestScheduledIndex = Math.min(smallestScheduledIndex, index); + biggestScheduledIndex = Math.max(biggestScheduledIndex, index); + } + final int nextSchedulePosition = schedulePosition + insertInfosToSchedule.size(); + if (smallestScheduledIndex == schedulePosition && biggestScheduledIndex == nextSchedulePosition) { + // In this case, the order is already correct and we can skip some copying + return nextSchedulePosition; + } + // The index to which we start to shift elements that appear within the range of [schedulePosition, nextSchedulePosition) + int shiftSchedulePosition = nextSchedulePosition; + for (int i = 0; i < insertInfosToSchedule.size(); i++) { + final InsertInfo insertInfoToSchedule = insertInfosToSchedule.get(i); + final int targetSchedulePosition = schedulePosition + i; + newInsertInfos[targetSchedulePosition] = insertInfoToSchedule; + insertInfoToSchedule.index = targetSchedulePosition; + final InsertInfo oldInsertInfo = insertInfos[targetSchedulePosition]; + // Move the insert info previously located at the target schedule position to the current shift position + if (!bitSet.get(targetSchedulePosition)) { + oldInsertInfo.index = shiftSchedulePosition; + // Also set this index in the bitset to skip copying the value later, as it is considered scheduled + bitSet.set(targetSchedulePosition); + newInsertInfos[shiftSchedulePosition++]= oldInsertInfo; } } + // We have to shift all the elements up to the biggestMovedIndex + 1 + biggestScheduledIndex++; + for (int i = bitSet.nextClearBit(schedulePosition); i < biggestScheduledIndex; i++) { + // Only copy the old insert info over if it wasn't already scheduled + if (!bitSet.get(i)) { + final InsertInfo insertInfo = insertInfos[i]; + insertInfo.index = shiftSchedulePosition; + newInsertInfos[shiftSchedulePosition++] = insertInfo; + } + } + // Copy over the newly reordered array part into the main array + System.arraycopy(newInsertInfos, schedulePosition, insertInfos, schedulePosition, biggestScheduledIndex - schedulePosition); + return nextSchedulePosition; } - private void addParentChildEntityNameByPropertyAndValue(AbstractEntityInsertAction action, BatchIdentifier batchIdentifier, Type type, Object value) { - if ( type.isEntityType() && value != null ) { - final EntityType entityType = (EntityType) type; - final String entityName = entityType.getName(); - final String rootEntityName = action.getSession().getFactory().getMetamodel().entityPersister( entityName ).getRootEntityName(); + public static class EntityInsertGroup { + private final String entityName; + private final List insertInfos = new ArrayList<>(); + private final Set dependentEntityNames = new HashSet<>(); - if ( entityType.isOneToOne() && OneToOneType.class.cast( entityType ).getForeignKeyDirection() == ForeignKeyDirection.TO_PARENT ) { - if ( !entityType.isReferenceToPrimaryKey() ) { - batchIdentifier.getChildEntityNames().add( entityName ); - } - if ( !rootEntityName.equals( entityName ) ) { - batchIdentifier.getChildEntityNames().add( rootEntityName ); - } - } - else { - batchIdentifier.getParentEntityNames().add( entityName ); - if ( !rootEntityName.equals( entityName ) ) { - batchIdentifier.getParentEntityNames().add( rootEntityName ); - } - } + public EntityInsertGroup(String entityName) { + this.entityName = entityName; } - else if ( type.isCollectionType() && value != null ) { - CollectionType collectionType = (CollectionType) type; - final SessionFactoryImplementor sessionFactory = ( (SessionImplementor) action.getSession() ) - .getSessionFactory(); - if ( collectionType.getElementType( sessionFactory ).isEntityType() ) { - String entityName = collectionType.getAssociatedEntityName( sessionFactory ); - String rootEntityName = action.getSession().getFactory().getMetamodel().entityPersister( entityName ).getRootEntityName(); - batchIdentifier.getChildEntityNames().add( entityName ); - if ( !rootEntityName.equals( entityName ) ) { - batchIdentifier.getChildEntityNames().add( rootEntityName ); - } - } - } - else if ( type.isComponentType() && value != null ) { - // Support recursive checks of composite type properties for associations and collections. - CompositeType compositeType = (CompositeType) type; - final SharedSessionContractImplementor session = action.getSession(); - Object[] componentValues = compositeType.getPropertyValues( value, session ); - for ( int j = 0; j < componentValues.length; ++j ) { - Type componentValueType = compositeType.getSubtypes()[j]; - Object componentValue = componentValues[j]; - addParentChildEntityNameByPropertyAndValue( action, batchIdentifier, componentValueType, componentValue ); - } - } - } - private void addToBatch(BatchIdentifier batchIdentifier, AbstractEntityInsertAction action) { - List actions = actionBatches.get( batchIdentifier ); - - if ( actions == null ) { - actions = new LinkedList<>(); - actionBatches.put( batchIdentifier, actions ); + public void add(InsertInfo insertInfo) { + insertInfos.add(insertInfo); + if (insertInfo.transitiveIncomingDependencies != null) { + for (InsertInfo dependency : insertInfo.transitiveIncomingDependencies) { + dependentEntityNames.add(dependency.insertAction.getEntityName()); + } + } + } + + @Override + public String toString() { + return "EntityInsertGroup{" + + "entityName='" + entityName + '\'' + + '}'; } - actions.add( action ); } } diff --git a/hibernate-core/src/test/java/org/hibernate/test/insertordering/InsertOrderingCircularDependencyFalsePositiveTest.java b/hibernate-core/src/test/java/org/hibernate/test/insertordering/InsertOrderingCircularDependencyFalsePositiveTest.java new file mode 100644 index 0000000000..5a7ef76614 --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/insertordering/InsertOrderingCircularDependencyFalsePositiveTest.java @@ -0,0 +1,174 @@ +/* + * Hibernate, Relational Persistence for Idiomatic Java + * + * License: GNU Lesser General Public License (LGPL), version 2.1 or later. + * See the lgpl.txt file in the root directory or . + */ +package org.hibernate.test.insertordering; + +import org.hibernate.cfg.Environment; +import org.hibernate.test.util.jdbc.PreparedStatementSpyConnectionProvider; +import org.hibernate.testing.DialectChecks; +import org.hibernate.testing.FailureExpected; +import org.hibernate.testing.RequiresDialectFeature; +import org.hibernate.testing.TestForIssue; +import org.hibernate.testing.junit4.BaseNonConfigCoreFunctionalTestCase; +import org.junit.Test; + +import javax.persistence.CascadeType; +import javax.persistence.Entity; +import javax.persistence.FetchType; +import javax.persistence.Id; +import javax.persistence.OneToMany; +import javax.persistence.OneToOne; +import java.sql.SQLException; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +import static org.hibernate.testing.transaction.TransactionUtil.doInHibernate; + +@TestForIssue(jiraKey = "HHH-16485") +@RequiresDialectFeature(DialectChecks.SupportsJdbcDriverProxying.class) +public class InsertOrderingCircularDependencyFalsePositiveTest extends BaseNonConfigCoreFunctionalTestCase { + + private PreparedStatementSpyConnectionProvider connectionProvider = new PreparedStatementSpyConnectionProvider(true, false); + + @Override + protected Class[] getAnnotatedClasses() { + return new Class[]{ + Wrapper.class, + Condition.class, + SimpleCondition.class, + Expression.class, + ConstantExpression.class, + Condition.class, + CompoundCondition.class, + }; + } + + @Override + protected void addSettings(Map settings) { + settings.put(Environment.ORDER_INSERTS, "true"); + settings.put(Environment.ORDER_UPDATES, "true"); + settings.put(Environment.STATEMENT_BATCH_SIZE, "50"); + settings.put( + org.hibernate.cfg.AvailableSettings.CONNECTION_PROVIDER, + connectionProvider + ); + } + + @Override + public void releaseResources() { + super.releaseResources(); + connectionProvider.stop(); + } + + @Override + protected boolean rebuildSessionFactoryOnError() { + return false; + } + + @Test + public void testBatching() throws SQLException { + doInHibernate(this::sessionFactory, session -> { + connectionProvider.clear(); + // This should be persistable but currently reports that it might be circular + session.persist(Wrapper.create()); + }); + } + + @Entity(name = "Wrapper") + public static class Wrapper { + @Id + private String id; + @OneToOne(cascade = CascadeType.ALL, orphanRemoval = true, fetch = FetchType.EAGER) + private Condition condition; + @OneToMany(cascade = CascadeType.ALL, fetch = FetchType.EAGER) + private Set constantExpressions; + + public Wrapper() { + } + + public static Wrapper create() { + final Wrapper w = new Wrapper(); + final CompoundCondition cc = new CompoundCondition(); + final SimpleCondition c1 = new SimpleCondition(); + final SimpleCondition c2 = new SimpleCondition(); + final ConstantExpression e1 = new ConstantExpression(); + final ConstantExpression e2 = new ConstantExpression(); + final ConstantExpression e3 = new ConstantExpression(); + final ConstantExpression e4 = new ConstantExpression(); + final ConstantExpression e5 = new ConstantExpression(); + w.id = "w"; + w.condition = cc; + cc.id = "cc"; + cc.first = c1; + cc.second = c2; + c1.id = "c1"; + c1.left = e1; + c1.right = e2; + c2.id = "c2"; + c2.left = e3; + c2.right = e4; + e1.id = "e1"; + e1.value = "e1"; + e2.id = "e2"; + e2.value = "e2"; + e3.id = "e3"; + e3.value = "e3"; + e4.id = "e4"; + e4.value = "e4"; + e5.id = "e5"; + e5.value = "e5"; + w.constantExpressions = new HashSet<>(); + w.constantExpressions.add(e5); + return w; + } + } + + @Entity(name = "Condition") + public static abstract class Condition { + @Id + protected String id; + + public Condition() { + } + } + @Entity(name = "SimpleCondition") + public static class SimpleCondition extends Condition { + @OneToOne(cascade = CascadeType.ALL) + private Expression left; + @OneToOne(cascade = CascadeType.ALL) + private Expression right; + + public SimpleCondition() { + } + } + @Entity(name = "Expression") + public static abstract class Expression { + @Id + protected String id; + + protected Expression() { + } + + } + @Entity(name = "ConstantExpression") + public static class ConstantExpression extends Expression { + private String value; + + public ConstantExpression() { + } + } + @Entity(name = "CompoundCondition") + public static class CompoundCondition extends Condition { + @OneToOne(cascade = CascadeType.ALL) + protected Condition first; + @OneToOne(cascade = CascadeType.ALL) + protected Condition second; + + public CompoundCondition() { + } + } +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/insertordering/InsertOrderingRCATest.java b/hibernate-core/src/test/java/org/hibernate/test/insertordering/InsertOrderingRCATest.java new file mode 100644 index 0000000000..39895c1a0f --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/insertordering/InsertOrderingRCATest.java @@ -0,0 +1,2131 @@ +/* + * Hibernate, Relational Persistence for Idiomatic Java + * + * License: GNU Lesser General Public License (LGPL), version 2.1 or later. + * See the lgpl.txt file in the root directory or . + */ +package org.hibernate.test.insertordering; + +import org.hibernate.cfg.Environment; +import org.hibernate.test.util.jdbc.PreparedStatementSpyConnectionProvider; +import org.hibernate.testing.DialectChecks; +import org.hibernate.testing.RequiresDialectFeature; +import org.hibernate.testing.TestForIssue; +import org.hibernate.testing.junit4.BaseNonConfigCoreFunctionalTestCase; +import org.junit.Test; + +import javax.persistence.CascadeType; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.FetchType; +import javax.persistence.GeneratedValue; +import javax.persistence.Id; +import javax.persistence.JoinColumn; +import javax.persistence.JoinTable; +import javax.persistence.MappedSuperclass; +import javax.persistence.OneToMany; +import javax.persistence.OneToOne; +import javax.persistence.Table; +import java.io.Serializable; +import java.sql.SQLException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.UUID; + +import static org.hibernate.testing.transaction.TransactionUtil.doInHibernate; +import static org.mockito.Mockito.verify; + +@TestForIssue(jiraKey = "HHH-16485") +@RequiresDialectFeature(DialectChecks.SupportsJdbcDriverProxying.class) +public class InsertOrderingRCATest extends BaseNonConfigCoreFunctionalTestCase { + + private PreparedStatementSpyConnectionProvider connectionProvider = new PreparedStatementSpyConnectionProvider(true, false); + + @Override + protected Class[] getAnnotatedClasses() { + return new Class[]{ + WeightedCause.class, + TimeManipulation.class, + Symptom.class, + SimpleCondition.class, + RCATemplate.class, + ParameterExpression.class, + NumberedExpression.class, + MathExpression.class, + FieldExpression.class, + Expression.class, + ConstantExpression.class, + ConditionAndExpression.class, + ConditionalExpression.class, + Condition.class, + CompoundCondition.class, + Cause.class, + CalculationExpression.class, + AlertCondition.class, + BaseEntity.class + }; + } + + @Override + protected void addSettings(Map settings) { + settings.put(Environment.ORDER_INSERTS, "true"); + settings.put(Environment.ORDER_UPDATES, "true"); + settings.put(Environment.STATEMENT_BATCH_SIZE, "50"); + settings.put( + org.hibernate.cfg.AvailableSettings.CONNECTION_PROVIDER, + connectionProvider + ); + } + + @Override + public void releaseResources() { + super.releaseResources(); + connectionProvider.stop(); + } + + @Override + protected boolean rebuildSessionFactoryOnError() { + return false; + } + + @Test + public void testBatching() throws SQLException { + doInHibernate(this::sessionFactory, session -> { + connectionProvider.clear(); + for (RCATemplate template : DefaultTemplatesVault.getDefaultRCATemplates()) { + session.persist(template); + } + }); + } + + @Entity(name = "WeightedCause") + @Table(name = "rca_weighted_cause") + public static class WeightedCause extends BaseEntity { + private Cause cause; + private Integer weight; + + public WeightedCause() { + } + + public WeightedCause(Cause cause, Integer weight) { + this.cause = cause; + this.weight = weight; + } + + @OneToOne(cascade = CascadeType.ALL, orphanRemoval = true, fetch = FetchType.EAGER) + @JoinColumn(name = "cause_id") + public Cause getCause() { + return cause; + } + + public void setCause(Cause cause) { + this.cause = cause; + } + + public Integer getWeight() { + return weight; + } + + public void setWeight(Integer weight) { + this.weight = weight; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + if (!super.equals(o)) { + return false; + } + + WeightedCause cause1 = (WeightedCause) o; + + if (cause != null ? !cause.equals(cause1.cause) : cause1.cause != null) { + return false; + } + if (weight != null ? !weight.equals(cause1.weight) : cause1.weight != null) { + return false; + } + + return true; + } + + @Override + public int hashCode() { + int result = super.hashCode(); + result = 31 * result + (cause != null ? cause.hashCode() : 0); + result = 31 * result + (weight != null ? weight.hashCode() : 0); + return result; + } + } + @Entity(name = "TimeManipulation") + @Table(name = "rca_time_manipulation") + public static class TimeManipulation extends BaseEntity implements Serializable { + private ManipulationType type; + private MathOperator op; + private Long time; + private RelativeToType relativeToType; + private String paramName; + + public TimeManipulation() { + } + + public TimeManipulation(ManipulationType type, MathOperator op, Long time, RelativeToType relativeToType, String paramName) { + this.type = type; + this.op = op; + this.time = time; + this.relativeToType = relativeToType; + this.paramName = paramName; + } + + public ManipulationType getType() { + return type; + } + + public void setType(ManipulationType type) { + this.type = type; + } + + public MathOperator getOp() { + return op; + } + + public void setOp(MathOperator op) { + this.op = op; + } + + public Long getTime() { + return time; + } + + public void setTime(Long time) { + this.time = time; + } + + public RelativeToType getRelativeToType() { + return relativeToType; + } + + public void setRelativeToType(RelativeToType relativeToType) { + this.relativeToType = relativeToType; + } + + public String getParamName() { + return paramName; + } + + public void setParamName(String paramName) { + this.paramName = paramName; + } + } + @Entity(name = "Symptom") + @Table(name = "rca_symptom") + public static class Symptom extends BaseEntity implements Serializable { + private String name; + private String objectType; + private Condition condition; + private String nodeTypeString; + private String processTypeString; + private String filterString; + + public Symptom() { + } + + public Symptom(String name, String objectType, Condition condition) { + this.name = name; + this.objectType = objectType; + this.condition = condition; + } + + public String getObjectType() { + return objectType; + } + + public void setObjectType(String objectType) { + this.objectType = objectType; + } + + @OneToOne(cascade = CascadeType.ALL, orphanRemoval = true, fetch = FetchType.EAGER) + @JoinColumn(name = "condition_id") + public Condition getCondition() { + return condition; + } + + public void setCondition(Condition condition) { + this.condition = condition; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getNodeTypeString() { + return nodeTypeString; + } + + public void setNodeTypeString(String nodeTypeString) { + this.nodeTypeString = nodeTypeString; + } + + public String getProcessTypeString() { + return processTypeString; + } + + public void setProcessTypeString(String processTypeString) { + this.processTypeString = processTypeString; + } + + public String getFilterString() { + return filterString; + } + + public void setFilterString(String filterString) { + this.filterString = filterString; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + if (!super.equals(o)) { + return false; + } + + Symptom symptom = (Symptom) o; + + if (condition != null ? !condition.equals(symptom.condition) : symptom.condition != null) { + return false; + } + if (name != null ? !name.equals(symptom.name) : symptom.name != null) { + return false; + } + if (objectType != null ? !objectType.equals(symptom.objectType) : symptom.objectType != null) { + return false; + } + + return true; + } + + @Override + public int hashCode() { + int result = super.hashCode(); + result = 31 * result + (name != null ? name.hashCode() : 0); + result = 31 * result + (objectType != null ? objectType.hashCode() : 0); + result = 31 * result + (condition != null ? condition.hashCode() : 0); + return result; + } + } + @Entity(name = "SimpleCondition") + public static class SimpleCondition extends Condition { + private Expression left; + private Operator op; + private Expression right; + + public SimpleCondition() { + } + + public SimpleCondition(Expression left, Operator op, Expression right) { + this.left = left; + this.op = op; + this.right = right; + } + + @OneToOne(cascade = CascadeType.ALL, orphanRemoval = true, fetch = FetchType.EAGER) + @JoinColumn(name = "left_expression_id") + public Expression getLeft() { + return left; + } + + public void setLeft(Expression left) { + this.left = left; + } + + public Operator getOp() { + return op; + } + + public void setOp(Operator op) { + this.op = op; + } + + @OneToOne(cascade = CascadeType.ALL, orphanRemoval = true, fetch = FetchType.EAGER) + @JoinColumn(name = "right_expression_id") + public Expression getRight() { + return right; + } + + public void setRight(Expression right) { + this.right = right; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + if (!super.equals(o)) { + return false; + } + + SimpleCondition that = (SimpleCondition) o; + + if (left != null ? !left.equals(that.left) : that.left != null) { + return false; + } + if (op != that.op) { + return false; + } + if (right != null ? !right.equals(that.right) : that.right != null) { + return false; + } + + return true; + } + + @Override + public int hashCode() { + int result = super.hashCode(); + result = 31 * result + (left != null ? left.hashCode() : 0); + result = 31 * result + (op != null ? op.hashCode() : 0); + result = 31 * result + (right != null ? right.hashCode() : 0); + return result; + } + } + @Entity(name = "RCATemplate") + public static class RCATemplate extends BaseEntity { + private String name; + private Symptom symptom; + private Set possibleCauses; + + public RCATemplate() { + } + + public RCATemplate(String name, Symptom symptom, Set possibleCauses) { + this.name = name; + this.symptom = symptom; + this.possibleCauses = possibleCauses; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + @OneToOne(cascade = CascadeType.ALL, orphanRemoval = true, fetch = FetchType.EAGER) + @JoinColumn(name = "symptom_id") + public Symptom getSymptom() { + return symptom; + + } + + public void setSymptom(Symptom symptom) { + this.symptom = symptom; + } + + @OneToMany(cascade = CascadeType.ALL, fetch = FetchType.EAGER) + @JoinTable(name = "template_cause", joinColumns = { + @JoinColumn(name = "f_template_id")}, inverseJoinColumns = { + @JoinColumn(name = "f_cause_id")}) + public Set getPossibleCauses() { + return possibleCauses; + } + + public void setPossibleCauses(Set possibleCauses) { + this.possibleCauses = possibleCauses; + } + + } + @Entity(name = "ParameterExpression") + public static class ParameterExpression extends Expression { + private String parameterName; + private String defaultValue; + + + public ParameterExpression() { + } + + public ParameterExpression(String parameterName, String defaultValue, MetadataFieldType type) { + this.parameterName = parameterName; + this.defaultValue = defaultValue; + this.type = type; + } + + public String getParameterName() { + return parameterName; + } + + public void setParameterName(String parameterName) { + this.parameterName = parameterName; + } + + public String getDefaultValue() { + return defaultValue; + } + + public void setDefaultValue(String defaultValue) { + this.defaultValue = defaultValue; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + if (!super.equals(o)) { + return false; + } + + ParameterExpression that = (ParameterExpression) o; + + if (defaultValue != null ? !defaultValue.equals(that.defaultValue) : that.defaultValue != null) { + return false; + } + if (parameterName != null ? !parameterName.equals(that.parameterName) : that.parameterName != null) { + return false; + } + + return true; + } + + @Override + public int hashCode() { + int result = super.hashCode(); + result = 31 * result + (parameterName != null ? parameterName.hashCode() : 0); + result = 31 * result + (defaultValue != null ? defaultValue.hashCode() : 0); + return result; + } + } + @Entity(name = "NumberedExpression") + @Table(name = "rca_numbered_exception") + public static class NumberedExpression extends BaseEntity implements Comparable { + private Long num; + private Expression expression; + + public NumberedExpression() { + } + + public NumberedExpression(Long num, Expression expression) { + this.num = num; + this.expression = expression; + } + + public Long getNum() { + return num; + } + + public void setNum(Long num) { + this.num = num; + } + + @OneToOne(cascade = CascadeType.ALL, orphanRemoval = true, fetch = FetchType.EAGER) + @JoinColumn(name = "num_expression_id") + public Expression getExpression() { + return expression; + } + + public void setExpression(Expression expression) { + this.expression = expression; + } + + @Override + public int compareTo(Object o) { + NumberedExpression other = (NumberedExpression) o; + return (int) (this.num - other.num); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + if (!super.equals(o)) { + return false; + } + + NumberedExpression that = (NumberedExpression) o; + + if (expression != null ? !expression.equals(that.expression) : that.expression != null) { + return false; + } + if (num != null ? !num.equals(that.num) : that.num != null) { + return false; + } + + return true; + } + + @Override + public int hashCode() { + int result = super.hashCode(); + result = 31 * result + (num != null ? num.hashCode() : 0); + result = 31 * result + (expression != null ? expression.hashCode() : 0); + return result; + } + } + @Entity(name = "MathExpression") + public static class MathExpression extends Expression { + private Expression left; + private Expression right; + private MathOperator op; + + public MathExpression() { + } + + public MathExpression(Expression left, Expression right, MathOperator op) { + this.left = left; + this.right = right; + this.op = op; + } + + @OneToOne(cascade = CascadeType.ALL, orphanRemoval = true, fetch = FetchType.EAGER) + @JoinColumn(name = "left_expression_id") + public Expression getLeft() { + return left; + } + + public void setLeft(Expression left) { + this.left = left; + } + + @OneToOne(cascade = CascadeType.ALL, orphanRemoval = true, fetch = FetchType.EAGER) + @JoinColumn(name = "right_expression_id") + public Expression getRight() { + return right; + } + + public void setRight(Expression right) { + this.right = right; + } + + public MathOperator getOp() { + return op; + } + + public void setOp(MathOperator op) { + this.op = op; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + if (!super.equals(o)) { + return false; + } + + MathExpression that = (MathExpression) o; + + if (left != null ? !left.equals(that.left) : that.left != null) { + return false; + } + if (op != that.op) { + return false; + } + if (right != null ? !right.equals(that.right) : that.right != null) { + return false; + } + + return true; + } + + @Override + public int hashCode() { + int result = super.hashCode(); + result = 31 * result + (left != null ? left.hashCode() : 0); + result = 31 * result + (right != null ? right.hashCode() : 0); + result = 31 * result + (op != null ? op.hashCode() : 0); + return result; + } + } + @Entity(name = "FieldExpression") + public static class FieldExpression extends Expression { + private String objectType; + private String name; + + public FieldExpression() { + } + + public FieldExpression(String objectType, String name) { + this.objectType = objectType; + this.name = name; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getObjectType() { + return objectType; + } + + public void setObjectType(String objectType) { + this.objectType = objectType; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + if (!super.equals(o)) { + return false; + } + + FieldExpression that = (FieldExpression) o; + + if (name != null ? !name.equals(that.name) : that.name != null) { + return false; + } + if (objectType != null ? !objectType.equals(that.objectType) : that.objectType != null) { + return false; + } + + return true; + } + + @Override + public int hashCode() { + int result = super.hashCode(); + result = 31 * result + (objectType != null ? objectType.hashCode() : 0); + result = 31 * result + (name != null ? name.hashCode() : 0); + return result; + } + } + @Entity(name = "Expression") + @Table(name = "rca_expression") + public static abstract class Expression extends BaseEntity implements Serializable { + protected MetadataFieldType type; + + protected Expression() { + } + + protected Expression(MetadataFieldType type) { + this.type = type; + } + + public MetadataFieldType getType() { + return type; + } + + public void setType(MetadataFieldType type) { + this.type = type; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + if (!super.equals(o)) { + return false; + } + + Expression that = (Expression) o; + + if (type != that.type) { + return false; + } + + return true; + } + + @Override + public int hashCode() { + int result = super.hashCode(); + result = 31 * result + (type != null ? type.hashCode() : 0); + return result; + } + } + @Entity(name = "ConstantExpression") + public static class ConstantExpression extends Expression { + private String value; + + public ConstantExpression() { + } + + public ConstantExpression(String value, MetadataFieldType type) { + super(type); + this.value = value; + } + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + if (!super.equals(o)) { + return false; + } + + ConstantExpression that = (ConstantExpression) o; + + if (value != null ? !value.equals(that.value) : that.value != null) { + return false; + } + + return true; + } + + @Override + public int hashCode() { + int result = super.hashCode(); + result = 31 * result + (value != null ? value.hashCode() : 0); + return result; + } + } + @Entity(name = "ConditionAndExpression") + @Table(name = "rca_cond_and_expr") + public static class ConditionAndExpression extends BaseEntity { + private Condition condition; + private Expression expression; + + public ConditionAndExpression() { + } + + public ConditionAndExpression(Condition condition, Expression expression) { + this.condition = condition; + this.expression = expression; + } + + @OneToOne(cascade = CascadeType.ALL, orphanRemoval = true, fetch = FetchType.EAGER) + @JoinColumn(name = "cond_cond_id") + public Condition getCondition() { + return condition; + } + + public void setCondition(Condition condition) { + this.condition = condition; + } + + @OneToOne(cascade = CascadeType.ALL, orphanRemoval = true, fetch = FetchType.EAGER) + @JoinColumn(name = "cond_expression_id") + public Expression getExpression() { + return expression; + } + + public void setExpression(Expression expression) { + this.expression = expression; + } + } + @Entity(name = "ConditionalExpression") + public static class ConditionalExpression extends Expression { + private Set possibilities; + + public ConditionalExpression() { + } + + public ConditionalExpression(MetadataFieldType type, Set possibilities) { + super(type); + this.possibilities = possibilities; + } + + @OneToMany(cascade = CascadeType.ALL, fetch = FetchType.EAGER) + @JoinTable(name = "rca_cond_posssibility", joinColumns = { + @JoinColumn(name = "f_cond_id")}, inverseJoinColumns = { + @JoinColumn(name = "f_pos_id")}) + public Set getPossibilities() { + return possibilities; + } + + public void setPossibilities(Set possibilities) { + this.possibilities = possibilities; + } + } + @Entity(name = "Condition") + @Table(name = "rca_condition") + public static abstract class Condition extends BaseEntity implements Serializable { + } + @Entity(name = "CompoundCondition") + public static class CompoundCondition extends Condition { + private Condition first; + private Condition second; + private LogicalOperator op; + + public CompoundCondition() { + } + + public CompoundCondition(Condition first, Condition second) { + this.first = first; + this.second = second; + this.op = LogicalOperator.AND; + } + + public CompoundCondition(Condition first, Condition second, LogicalOperator op) { + this.first = first; + this.second = second; + this.op = op; + } + + @OneToOne(cascade = CascadeType.ALL, orphanRemoval = true, fetch = FetchType.EAGER) + @JoinColumn(name = "first_id") + public Condition getFirst() { + return first; + } + + public void setFirst(Condition first) { + this.first = first; + } + + @OneToOne(cascade = CascadeType.ALL, orphanRemoval = true, fetch = FetchType.EAGER) + @JoinColumn(name = "second_id") + public Condition getSecond() { + return second; + } + + public void setSecond(Condition second) { + this.second = second; + } + + public LogicalOperator getOp() { + return op; + } + + public void setOp(LogicalOperator op) { + this.op = op; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + if (!super.equals(o)) { + return false; + } + + CompoundCondition that = (CompoundCondition) o; + + if (first != null ? !first.equals(that.first) : that.first != null) { + return false; + } + if (op != that.op) { + return false; + } + if (second != null ? !second.equals(that.second) : that.second != null) { + return false; + } + + return true; + } + + @Override + public int hashCode() { + int result = super.hashCode(); + result = 31 * result + (first != null ? first.hashCode() : 0); + result = 31 * result + (second != null ? second.hashCode() : 0); + result = 31 * result + (op != null ? op.hashCode() : 0); + return result; + } + } + @Entity + @Table(name = "rca_cause") + public static class Cause extends BaseEntity implements Serializable { + private String name; + private String nodeType; + private MappingRelationType relationType; + private Set fetchConditions; + private Condition condition; + private Condition auxCondition; + private String messageTemplate; + private String generalMessageTemplate; + private DataManipulationFunction function; + private Set messageTemplateParams; + private Set generalMessageTemplateParams; + private TimeManipulation startTimeManipulation; + private TimeManipulation endTimeManipulation; + private String plugin; + + public Cause() { + } + + public Cause(String name, String nodeType, MappingRelationType relationType, Set fetchConditions, + Condition condition, String messageTemplate, DataManipulationFunction function, Set messageTemplateParams, String plugin) { + this.name = name; + this.nodeType = nodeType; + this.relationType = relationType; + this.fetchConditions = fetchConditions; + this.condition = condition; + this.messageTemplate = messageTemplate; + this.function = function; + this.messageTemplateParams = messageTemplateParams; + this.plugin = plugin; + } + + public String getNodeType() { + return nodeType; + } + + public void setNodeType(String nodeType) { + this.nodeType = nodeType; + } + + public String getPlugin() { + return plugin; + } + + public void setPlugin(String plugin) { + this.plugin = plugin; + } + + @OneToOne(cascade = CascadeType.ALL, orphanRemoval = true, fetch = FetchType.EAGER) + @JoinColumn(name = "condition_id") + public Condition getCondition() { + return condition; + } + + public void setCondition(Condition condition) { + this.condition = condition; + } + + @OneToOne(cascade = CascadeType.ALL, orphanRemoval = true, fetch = FetchType.EAGER) + @JoinColumn(name = "aux_condition_id") + public Condition getAuxCondition() { + return auxCondition; + } + + public void setAuxCondition(Condition auxCondition) { + this.auxCondition = auxCondition; + } + + public String getMessageTemplate() { + return messageTemplate; + } + + public void setMessageTemplate(String messageTemplate) { + this.messageTemplate = messageTemplate; + } + + public String getGeneralMessageTemplate() { + return generalMessageTemplate; + } + + public void setGeneralMessageTemplate(String generalMessageTemplate) { + this.generalMessageTemplate = generalMessageTemplate; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public DataManipulationFunction getFunction() { + return function; + } + + public void setFunction(DataManipulationFunction function) { + this.function = function; + } + + public MappingRelationType getRelationType() { + return relationType; + } + + public void setRelationType(MappingRelationType relationType) { + this.relationType = relationType; + } + + @OneToMany(cascade = CascadeType.ALL, fetch = FetchType.EAGER) + @JoinTable(name = "rca_cause_param", joinColumns = { + @JoinColumn(name = "f_cause_id")}, inverseJoinColumns = { + @JoinColumn(name = "f_param_id")}) + public Set getMessageTemplateParams() { + return messageTemplateParams; + } + + public void setMessageTemplateParams(Set messageTemplateParams) { + this.messageTemplateParams = messageTemplateParams; + } + + @OneToMany(cascade = CascadeType.ALL, fetch = FetchType.EAGER) + @JoinTable(name = "rca_cause_general_param", joinColumns = { + @JoinColumn(name = "f_cause_id")}, inverseJoinColumns = { + @JoinColumn(name = "f_param_id")}) + public Set getGeneralMessageTemplateParams() { + return generalMessageTemplateParams; + } + + public void setGeneralMessageTemplateParams(Set generalMessageTemplateParams) { + this.generalMessageTemplateParams = generalMessageTemplateParams; + } + + @OneToMany(cascade = CascadeType.ALL, fetch = FetchType.EAGER) + @JoinTable(name = "rca_cause_fetch", joinColumns = { + @JoinColumn(name = "f_cause_id")}, inverseJoinColumns = { + @JoinColumn(name = "f_fetch_id")}) + public Set getFetchConditions() { + return fetchConditions; + } + + public void setFetchConditions(Set fetchConditions) { + this.fetchConditions = fetchConditions; + } + + @OneToOne(cascade = CascadeType.ALL, orphanRemoval = true, fetch = FetchType.EAGER) + @JoinColumn(name = "start_time_manip_id") + public TimeManipulation getStartTimeManipulation() { + return startTimeManipulation; + } + + public void setStartTimeManipulation(TimeManipulation startTimeManipulation) { + this.startTimeManipulation = startTimeManipulation; + } + + @OneToOne(cascade = CascadeType.ALL, orphanRemoval = true, fetch = FetchType.EAGER) + @JoinColumn(name = "end_time_manip_id") + public TimeManipulation getEndTimeManipulation() { + return endTimeManipulation; + } + + public void setEndTimeManipulation(TimeManipulation endTimeManipulation) { + this.endTimeManipulation = endTimeManipulation; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + if (!super.equals(o)) { + return false; + } + + Cause cause = (Cause) o; + + if (function != cause.function) { + return false; + } + if (messageTemplate != null ? !messageTemplate.equals(cause.messageTemplate) : cause.messageTemplate != null) { + return false; + } + if (name != null ? !name.equals(cause.name) : cause.name != null) { + return false; + } + if (nodeType != null ? !nodeType.equals(cause.nodeType) : cause.nodeType != null) { + return false; + } + if (relationType != cause.relationType) { + return false; + } + + return true; + } + + @Override + public int hashCode() { + int result = super.hashCode(); + result = 31 * result + (name != null ? name.hashCode() : 0); + result = 31 * result + (nodeType != null ? nodeType.hashCode() : 0); + result = 31 * result + (relationType != null ? relationType.hashCode() : 0); + result = 31 * result + (messageTemplate != null ? messageTemplate.hashCode() : 0); + result = 31 * result + (function != null ? function.hashCode() : 0); + return result; + } + } + @Entity(name = "CalculationExpression") + public static class CalculationExpression extends Expression { + private DataManipulationFunction function; + private String objectType; + private String fieldName; + + public CalculationExpression() { + } + + public CalculationExpression(DataManipulationFunction function, String objectType, String fieldName) { + this.function = function; + this.objectType = objectType; + this.fieldName = fieldName; + } + + public DataManipulationFunction getFunction() { + return function; + } + + public void setFunction(DataManipulationFunction function) { + this.function = function; + } + + public String getObjectType() { + return objectType; + } + + public void setObjectType(String objectType) { + this.objectType = objectType; + } + + public String getFieldName() { + return fieldName; + } + + public void setFieldName(String fieldName) { + this.fieldName = fieldName; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + if (!super.equals(o)) { + return false; + } + + CalculationExpression that = (CalculationExpression) o; + + if (fieldName != null ? !fieldName.equals(that.fieldName) : that.fieldName != null) { + return false; + } + if (function != that.function) { + return false; + } + if (objectType != null ? !objectType.equals(that.objectType) : that.objectType != null) { + return false; + } + + return true; + } + + @Override + public int hashCode() { + int result = super.hashCode(); + result = 31 * result + (function != null ? function.hashCode() : 0); + result = 31 * result + (objectType != null ? objectType.hashCode() : 0); + result = 31 * result + (fieldName != null ? fieldName.hashCode() : 0); + return result; + } + } + @Entity(name = "AlertCondition") + public static class AlertCondition extends Condition { + private String ruleName; + + public AlertCondition() { + } + + public String getRuleName() { + return ruleName; + } + + public void setRuleName(String ruleName) { + this.ruleName = ruleName; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + if (!super.equals(o)) { + return false; + } + + AlertCondition that = (AlertCondition) o; + + if (ruleName != null ? !ruleName.equals(that.ruleName) : that.ruleName != null) { + return false; + } + + return true; + } + + @Override + public int hashCode() { + int result = super.hashCode(); + result = 31 * result + (ruleName != null ? ruleName.hashCode() : 0); + return result; + } + } + @MappedSuperclass + public static abstract class BaseEntity implements Serializable { + private UUID id; + + @Id + @Column(name = "f_id") + @GeneratedValue + public UUID getId() { + return id; + } + + public void setId(UUID id) { + this.id = id; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + BaseEntity that = (BaseEntity) o; + + // noinspection SimplifiableIfStatement + if (id == null || that.id == null) { + return false; // null != everything (including null) + } + return id.equals(that.id); + } + + @Override + public int hashCode() { + return id != null ? id.hashCode() : 0; + } + + } + + public enum RelativeToType { + START, + END + } + + public enum Operator { + EQUALS(" = "), + NOT_EQUALS(" != "), + GREATER_THAN(" > "), + SMALLER_THAN(" < "), + CONTAINS(" contains "), + BEGINS_WITH(" begins with "), + ENDS_WITH(" ends with "); + + private String readable; + + private Operator(String readable) { + this.readable = readable; + } + + @Override + public String toString() { + return readable; + } + } + + public enum MathOperator { + ADD, + SUBTRACT, + MULTIPLY, + DIVIDE, + MAX; + + @Override + public String toString() { + switch (this) { + case ADD: + return "+"; + case SUBTRACT: + return "-"; + case MULTIPLY: + return "*"; + case DIVIDE: + return "/"; + case MAX: + return "max"; + } + return null; + } + + + } + + public enum ManipulationType { + MOVE_IT_BY_ABSOLUTE_TIME, + MOVE_IT_RELATIVE_TO_OTHER_TIME, + ABSOLUTE + } + + public enum LogicalOperator { + AND, + OR + } + + public enum MetadataFieldType { + BOOLEAN, STRING, NULL, NUMERIC + } + + public enum DataManipulationFunction { + AVG, + MAX, + MIN, + COUNT, + SUM, + EXISTS, + LAST, + FIRST, + INCREASED_BY + } + + public enum MappingRelationType { + JOB_BACKING_UP_HOST, JOB_USES_BACKUPCLIENT, JOB_USES_BACKUP_POOL, JOB_USES_MEDIA_SERVER, RECOVERPOINT_CG_COPY_ACTIVE_RPA, JOB_USES_TAPE_DRIVE + } + + public static class DefaultTemplatesVault { + + public static final String NO_AGENT_RAN_ON_CLIENT = "No agent ran on client"; + public static final String BACKUP_AGENT_IS_DOWN_ON_BACKUP_CLIENT = "Backup agent is down on backup client"; + public static final String BACKUP_FAILED_SYMPTOM_NAME = "Backup failed"; + public static final String CG_COPY_RPO_VIOLATION_SYMPTOM_NAME = "CG Copy RPO Violation"; + public static final String PACKET_LOSS = "Packet Loss"; + + private static RCATemplate buildBackupFailedTemplate() { + Symptom symptom = new Symptom(BACKUP_FAILED_SYMPTOM_NAME, "Backupjob", + new SimpleCondition(new FieldExpression("Backupjob", "status"), Operator.EQUALS, new ConstantExpression("failed", MetadataFieldType.STRING))); + symptom.setFilterString("status = 'failed'"); + symptom.setNodeTypeString("clients"); + symptom.setProcessTypeString("backup jobs"); + + // high client cpu ' + WeightedCause cause1 = buildHighClientCpu(); + + // high storage node cpu + WeightedCause cause2 = buildHighStorageNodeCpu(); + + // no space left on device + WeightedCause cause3 = buildNoSpaceOnPool(); + + // no space left on data domain + WeightedCause cause4 = buildNoSpaceOnDD(); + + // no agent ran on client + WeightedCause cause5 = buildNoAgentOnClient(); + + // no agent ran on client + WeightedCause cause6 = buildNoAgentOnClient2(); + + // errors on tape + WeightedCause errorsOnTapeCause = buildErrorsOnTape(); + + // switch port for ethernet + WeightedCause cause7 = buildPortSwitchForEthernet(); + + // switch port for fibre channel + WeightedCause cause8 = buildPortSwitchForFibreChannel(); + + // port settings for ethernet + WeightedCause cause9 = buildDifferentPortSettings(); + + // no space on client + WeightedCause noSpaceOnClientCause = buildNoSpaceOnClient(); + + // todo -all other causes + Set causes = new HashSet<>(); + causes.add(cause1); + causes.add(cause2); + causes.add(cause3); + causes.add(cause4); + causes.add(cause5); + causes.add(cause6); + //causes.add(errorsOnTapeCause); + causes.add(cause7); + causes.add(cause8); + causes.add(cause9); + causes.add(noSpaceOnClientCause); + return new RCATemplate("Backup failed RCA template", symptom, causes); + } + + private static WeightedCause buildNoSpaceOnClient() { + Expression clientName = new FieldExpression("Host", "name"); + NumberedExpression nameFirst = new NumberedExpression(1L, clientName); + + Expression mountpointName = new FieldExpression("FilesystemConfig", "mountpoint"); + NumberedExpression mountpointSecond = new NumberedExpression(2L, mountpointName); + + Expression leftSpace = new MathExpression(new FieldExpression("FilesystemConfig", "totalSpace"), + new FieldExpression("FilesystemStatus", "usedSpace"), + MathOperator.SUBTRACT); + ParameterExpression leftSpaceOnClient = new ParameterExpression("leftSpaceOnClient", "50", MetadataFieldType.NUMERIC); + NumberedExpression spaceThird = new NumberedExpression(3L, leftSpace); + + + Set messageArgs = new HashSet<>(); + messageArgs.add(nameFirst); + messageArgs.add(mountpointSecond); + messageArgs.add(spaceThird); + + Condition fetchCondition1 = new CompoundCondition( + new SimpleCondition(new FieldExpression("FilesystemStatus", "mountpoint"), Operator.EQUALS, new ConstantExpression("C:", MetadataFieldType.STRING)), + new SimpleCondition(new FieldExpression("FilesystemStatus", "agentId"), Operator.EQUALS, new FieldExpression("Host", "id")) + ); + Condition fetchCondition2 = new CompoundCondition( + new SimpleCondition(new FieldExpression("FilesystemConfig", "mountpoint"), Operator.EQUALS, new ConstantExpression("C:", MetadataFieldType.STRING)), + new SimpleCondition(new FieldExpression("FilesystemConfig", "agentId"), Operator.EQUALS, new FieldExpression("Host", "id")) + ); + + Set fetchConditions = new HashSet<>(); + fetchConditions.add(fetchCondition1); + fetchConditions.add(fetchCondition2); + + SimpleCondition condition = new SimpleCondition(leftSpace, Operator.SMALLER_THAN, leftSpaceOnClient); + + Cause noSpaceOnClient = new Cause("No space on Client", "Host", MappingRelationType.JOB_BACKING_UP_HOST, + fetchConditions, condition, "There is a lack of free space in {param}:{param}. The current free space is {param} MB", DataManipulationFunction.LAST, messageArgs, null); + + return new WeightedCause(noSpaceOnClient, 95); + } + + private static WeightedCause buildErrorsOnTape() { + + Expression tapeDriveNameExp = new FieldExpression("TapeDrive", "name"); + NumberedExpression tapeDriveName = new NumberedExpression(1L, tapeDriveNameExp); + + Expression errorsExp = new FieldExpression("TapedriveStatus", "correctedReadErrors"); + + Set messageArgs = new HashSet<>(); + messageArgs.add(tapeDriveName); + + Condition fetchCondition = new SimpleCondition(new FieldExpression("TapedriveStatus", "nodeId"), + Operator.EQUALS, new FieldExpression("TapeDrive", "id")); + Set fetchConditions = new HashSet<>(); + fetchConditions.add(fetchCondition); + + ConstantExpression zero = new ConstantExpression("0", MetadataFieldType.NUMERIC); + + Cause errorsOnTape = new Cause("Errors on tape", "TapeDrive", MappingRelationType.JOB_USES_TAPE_DRIVE, fetchConditions, + new CompoundCondition( + new CompoundCondition( + new CompoundCondition( + new SimpleCondition(new FieldExpression("TapedriveStatus", "correctedReadErrors"), Operator.GREATER_THAN, zero), + new SimpleCondition(new FieldExpression("TapedriveStatus", "correctedWriteErrors"), Operator.GREATER_THAN, zero), + LogicalOperator.OR), + new SimpleCondition(new FieldExpression("TapedriveStatus", "uncorrectedReadErrors"), Operator.GREATER_THAN, zero), + LogicalOperator.OR), + new SimpleCondition(new FieldExpression("TapedriveStatus", "uncorrectedWriteErrors"), Operator.GREATER_THAN, zero), + LogicalOperator.OR), + "Tape drive {param} had errors", DataManipulationFunction.INCREASED_BY, messageArgs, null); + return new WeightedCause(errorsOnTape, 70); + } + + private static WeightedCause buildNoSpaceOnDD() { + Expression ddName = new FieldExpression("DataDomain", "name"); + NumberedExpression ddFirst = new NumberedExpression(1L, ddName); + + + Expression ddUsage = new MathExpression(new MathExpression( + new FieldExpression("FilesystemStatus", "usedSpace"), + new FieldExpression("FilesystemConfig", "totalSpace"), + MathOperator.DIVIDE + ), + new ConstantExpression("100", MetadataFieldType.NUMERIC), MathOperator.MULTIPLY); + NumberedExpression ddSecond = new NumberedExpression(2L, ddUsage); + ParameterExpression ddUsageThreshold = new ParameterExpression("ddUsageThreshold", "80", MetadataFieldType.NUMERIC); + NumberedExpression ddThird = new NumberedExpression(3L, ddUsageThreshold); + Set messageArgs4 = new HashSet<>(); + messageArgs4.add(ddFirst); + messageArgs4.add(ddSecond); + messageArgs4.add(ddThird); + + Condition fetchCondition4 = new CompoundCondition( + new SimpleCondition(new FieldExpression("FilesystemStatus", "mountpoint"), Operator.EQUALS, new ConstantExpression("Data", MetadataFieldType.STRING)), + new SimpleCondition(new FieldExpression("FilesystemStatus", "agentId"), Operator.EQUALS, new FieldExpression("DataDomain", "id")) + ); + Condition fetchCondition5 = new CompoundCondition( + new SimpleCondition(new FieldExpression("FilesystemConfig", "mountpoint"), Operator.EQUALS, new ConstantExpression("Data", MetadataFieldType.STRING)), + new SimpleCondition(new FieldExpression("FilesystemConfig", "agentId"), Operator.EQUALS, new FieldExpression("DataDomain", "id")) + ); + Set fetchConditions4 = new HashSet<>(); + fetchConditions4.add(fetchCondition4); + fetchConditions4.add(fetchCondition5); + + Cause noSpaceOnDD = new Cause("No space on data domain", "DataDomain", MappingRelationType.JOB_USES_BACKUPCLIENT, + fetchConditions4, new SimpleCondition(ddUsage, Operator.GREATER_THAN, ddUsageThreshold), + "The capacity utilization for DataDomain {param} is {param}% which is the above threshold {param}%", DataManipulationFunction.LAST, messageArgs4, null); + + noSpaceOnDD.setGeneralMessageTemplate("The capacity utilization for DataDomain {param} is the above threshold {param}%"); + + Set generalMessageParams = new HashSet<>(); + generalMessageParams.add(ddFirst); + NumberedExpression ddThirdWhichIsNowSecond = new NumberedExpression(2L, ddUsageThreshold); + generalMessageParams.add(ddThirdWhichIsNowSecond); + noSpaceOnDD.setGeneralMessageTemplateParams(generalMessageParams); + + return new WeightedCause(noSpaceOnDD, 90); + } + + private static WeightedCause buildNoSpaceOnPool() { + Expression poolName = new FieldExpression("BackupPool", "name"); + NumberedExpression deviceFirst = new NumberedExpression(1L, poolName); + Expression numOfEmptyVolumes = new CalculationExpression(DataManipulationFunction.COUNT, "VolumeStatus", "id"); + NumberedExpression deviceSecond = new NumberedExpression(2L, numOfEmptyVolumes); + Set messageArgs3 = new HashSet<>(); + messageArgs3.add(deviceFirst); + messageArgs3.add(deviceSecond); + + Condition fetchCondition3 = new CompoundCondition( + new SimpleCondition(new FieldExpression("VolumeStatus", "pool"), Operator.EQUALS, new FieldExpression("BackupPool", "name")), + new SimpleCondition(new FieldExpression("VolumeStatus", "agentId"), Operator.EQUALS, new FieldExpression("Backupjob", "agentId")) + ); + Set fetchConditions3 = new HashSet<>(); + fetchConditions3.add(fetchCondition3); + + ConstantExpression threshold = new ConstantExpression("5", MetadataFieldType.NUMERIC); + Cause noSpaceOnDevice = new Cause("No space left on device", "BackupPool", MappingRelationType.JOB_USES_BACKUP_POOL, + fetchConditions3, + new SimpleCondition( + new FieldExpression("VolumeStatus", "state"), + Operator.SMALLER_THAN, + threshold), + "Pool {param} has only {param} empty volumes left", DataManipulationFunction.COUNT, messageArgs3, null); + noSpaceOnDevice.setAuxCondition(new SimpleCondition(new FieldExpression("VolumeStatus", "state"), + Operator.EQUALS, new ConstantExpression("Empty", MetadataFieldType.STRING))); + + noSpaceOnDevice.setGeneralMessageTemplate("Pool {param} has less than {param} empty volumes left"); + Set generalMessageParams = new HashSet<>(); + generalMessageParams.add(deviceFirst); + generalMessageParams.add(new NumberedExpression(2L, threshold)); + noSpaceOnDevice.setGeneralMessageTemplateParams(generalMessageParams); + + return new WeightedCause(noSpaceOnDevice, 80); + } + + private static WeightedCause buildHighStorageNodeCpu() { + Expression storageName = new FieldExpression("Host", "name"); + NumberedExpression storageFirst = new NumberedExpression(1L, storageName); + Expression avgCpuStorage = new CalculationExpression(DataManipulationFunction.AVG, "HostStatus", "cpuUsed"); + NumberedExpression storageSecond = new NumberedExpression(2L, avgCpuStorage); + Set messageArgs2 = new HashSet<>(); + messageArgs2.add(storageFirst); + messageArgs2.add(storageSecond); + + Condition fetchCondition2 = new SimpleCondition(new FieldExpression("HostStatus", "nodeId"), + Operator.EQUALS, new FieldExpression("Host", "id")); + Set fetchConditions2 = new HashSet<>(); + fetchConditions2.add(fetchCondition2); + + ParameterExpression storageNodeHighCPUThreshold = new ParameterExpression("storageNodeHighCPUThreshold", "95", MetadataFieldType.NUMERIC); + Cause highCpuOnStorageNode = new Cause("High storage node CPU", "Host", MappingRelationType.JOB_USES_MEDIA_SERVER, + fetchConditions2, + new SimpleCondition(new FieldExpression("HostStatus", "cpuUsed"), Operator.GREATER_THAN, + storageNodeHighCPUThreshold), "Storage node {param} CPU was {param}", DataManipulationFunction.AVG, + messageArgs2, null); + TimeManipulation startTimeManipulation = new TimeManipulation(ManipulationType.MOVE_IT_RELATIVE_TO_OTHER_TIME, + MathOperator.SUBTRACT, 3600L, RelativeToType.END, "job_cpu_offset"); + highCpuOnStorageNode.setStartTimeManipulation(startTimeManipulation); + + highCpuOnStorageNode.setGeneralMessageTemplate("Storage node {param} CPU was higher than {param}"); + Set generalMessageParams = new HashSet<>(); + generalMessageParams.add(storageFirst); + generalMessageParams.add(new NumberedExpression(2L, storageNodeHighCPUThreshold)); + highCpuOnStorageNode.setGeneralMessageTemplateParams(generalMessageParams); + + return new WeightedCause(highCpuOnStorageNode, 60); + } + + private static WeightedCause buildHighClientCpu() { + Expression clientName = new FieldExpression("Host", "name"); + NumberedExpression clientFirst = new NumberedExpression(1L, clientName); + Expression avgCpu = new CalculationExpression(DataManipulationFunction.AVG, "HostStatus", "cpuUsed"); + NumberedExpression clientSecond = new NumberedExpression(2L, avgCpu); + Set messageArgs1 = new HashSet<>(); + messageArgs1.add(clientFirst); + messageArgs1.add(clientSecond); + + Condition fetchCondition = new SimpleCondition(new FieldExpression("HostStatus", "nodeId"), + Operator.EQUALS, new FieldExpression("Host", "id")); + Set fetchConditions = new HashSet<>(); + fetchConditions.add(fetchCondition); + + + ParameterExpression clientHighCPUThreshold = new ParameterExpression("clientHighCPUThreshold", "95", MetadataFieldType.NUMERIC); + Cause highCpuOnClient = new Cause("High client CPU", "Host", MappingRelationType.JOB_BACKING_UP_HOST, + new HashSet<>(fetchConditions), + new SimpleCondition(new FieldExpression("HostStatus", "cpuUsed"), Operator.GREATER_THAN, + clientHighCPUThreshold), "Client {param} CPU was {param}", DataManipulationFunction.AVG, + messageArgs1, null); + + TimeManipulation startTimeManipulation = new TimeManipulation(ManipulationType.MOVE_IT_RELATIVE_TO_OTHER_TIME, + MathOperator.SUBTRACT, 3600L, RelativeToType.END, "job_cpu_offset"); + highCpuOnClient.setStartTimeManipulation(startTimeManipulation); + + highCpuOnClient.setGeneralMessageTemplate("Client {param} CPU was higher than {param}"); + Set generalMessageParams = new HashSet<>(); + generalMessageParams.add(clientFirst); + generalMessageParams.add(new NumberedExpression(2L, clientHighCPUThreshold)); + highCpuOnClient.setGeneralMessageTemplateParams(generalMessageParams); + + return new WeightedCause(highCpuOnClient, 50); + } + + private static WeightedCause buildNoAgentOnClient() { + Expression clientName = new FieldExpression("Host", "name"); + NumberedExpression clientFirst = new NumberedExpression(1L, clientName); + Set messageArgs1 = new HashSet<>(); + messageArgs1.add(clientFirst); + + Condition fetchHostStatus = new SimpleCondition(new FieldExpression("HostStatus", "nodeId"), + Operator.EQUALS, new FieldExpression("Host", "id")); + + ConditionAndExpression netbackup = new ConditionAndExpression( + new SimpleCondition( + new FieldExpression("Backupjob", "module"), + Operator.EQUALS, + new ConstantExpression("NetBackupModule", MetadataFieldType.STRING) + ), + new ConstantExpression("bpinetd", MetadataFieldType.STRING) + ); + + ConditionAndExpression networker = new ConditionAndExpression( + new SimpleCondition( + new FieldExpression("Backupjob", "module"), + Operator.EQUALS, + new ConstantExpression("NetWorkerModule", MetadataFieldType.STRING) + ), + new ConstantExpression("nsrexecd", MetadataFieldType.STRING) + ); + + ConditionAndExpression backupExec = new ConditionAndExpression( + new SimpleCondition( + new FieldExpression("Backupjob", "module"), + Operator.EQUALS, + new ConstantExpression("BackupExecModule", MetadataFieldType.STRING) + ), + new ConstantExpression("beremote", MetadataFieldType.STRING) + ); + + ConditionAndExpression dataProtector = new ConditionAndExpression( + new SimpleCondition( + new FieldExpression("Backupjob", "module"), + Operator.EQUALS, + new ConstantExpression("DataProtectorModule", MetadataFieldType.STRING) + ), + new ConstantExpression("omniinet", MetadataFieldType.STRING) + ); + + ConditionAndExpression tsm = new ConditionAndExpression( + new SimpleCondition( + new FieldExpression("Backupjob", "module"), + Operator.EQUALS, + new ConstantExpression("TSMModule", MetadataFieldType.STRING) + ), + new ConstantExpression("dsmcsvc", MetadataFieldType.STRING) + ); + + ConditionAndExpression arcServe = new ConditionAndExpression( + new SimpleCondition( + new FieldExpression("Backupjob", "module"), + Operator.EQUALS, + new ConstantExpression("ArcserveModule", MetadataFieldType.STRING) + ), + new ConstantExpression("UnivAgent", MetadataFieldType.STRING) + ); + + ConditionAndExpression commVault = new ConditionAndExpression( + new SimpleCondition( + new FieldExpression("Backupjob", "module"), + Operator.EQUALS, + new ConstantExpression("CommvaultModule", MetadataFieldType.STRING) + ), + new ConstantExpression("evmgrc", MetadataFieldType.STRING) + ); + + ConditionAndExpression avamar = new ConditionAndExpression( + new SimpleCondition( + new FieldExpression("Backupjob", "module"), + Operator.EQUALS, + new ConstantExpression("AvamarModule", MetadataFieldType.STRING) + ), + new ConstantExpression("avagent", MetadataFieldType.STRING) + ); + + Set possibilities = new HashSet<>(); + possibilities.add(netbackup); + possibilities.add(networker); + possibilities.add(backupExec); + possibilities.add(dataProtector); + possibilities.add(tsm); + possibilities.add(arcServe); + possibilities.add(commVault); + possibilities.add(avamar); + + Condition fetchProcessStatus = new CompoundCondition( + new SimpleCondition(new FieldExpression("ProcessStatus", "nodeId"), + Operator.EQUALS, new FieldExpression("Host", "id")), + new SimpleCondition(new FieldExpression("ProcessStatus", "name"), + Operator.CONTAINS, new ConditionalExpression(MetadataFieldType.STRING, possibilities)), + LogicalOperator.AND + ); + + Set fetchConditions = new HashSet<>(); + fetchConditions.add(fetchHostStatus); + fetchConditions.add(fetchProcessStatus); + + Condition condition = new CompoundCondition( + new SimpleCondition( + new CalculationExpression(DataManipulationFunction.COUNT, "HostStatus", "id"), + Operator.GREATER_THAN, + new ConstantExpression("0", MetadataFieldType.NUMERIC) + ), + new SimpleCondition( + new CalculationExpression(DataManipulationFunction.COUNT, "ProcessStatus", "id"), + Operator.EQUALS, + new ConstantExpression("0", MetadataFieldType.NUMERIC) + ), + LogicalOperator.AND + ); + + Cause noAgentOnClient = new Cause( + NO_AGENT_RAN_ON_CLIENT, "Host", MappingRelationType.JOB_BACKING_UP_HOST, + fetchConditions, condition, "Backup agent was down on host: {param}", DataManipulationFunction.LAST, + messageArgs1, null); + + return new WeightedCause(noAgentOnClient, 100); + } + + private static WeightedCause buildNoAgentOnClient2() { + + Expression agentName = new FieldExpression("BackupClient", "name"); + NumberedExpression agentFirst = new NumberedExpression(1L, agentName); + Set messageArgs1 = new HashSet<>(); + messageArgs1.add(agentFirst); + + Condition fetchCondition6 = new SimpleCondition(new FieldExpression("ClientStatus", "nodeId"), + Operator.EQUALS, new FieldExpression("BackupClient", "id")); + + Set fetchConditions6 = new HashSet<>(); + fetchConditions6.add(fetchCondition6); + + + Cause AgentDown = new Cause(BACKUP_AGENT_IS_DOWN_ON_BACKUP_CLIENT, "BackupClient", MappingRelationType.JOB_USES_BACKUPCLIENT, + fetchConditions6, + new CompoundCondition( + new SimpleCondition(new FieldExpression("ClientStatus", "responding"), Operator.EQUALS, new ConstantExpression("true", MetadataFieldType.BOOLEAN)), + new SimpleCondition(new FieldExpression("ClientStatus", "daemonRunning"), Operator.EQUALS, new ConstantExpression("false", MetadataFieldType.BOOLEAN))), + "Backup agent {param} is down on backup client", DataManipulationFunction.LAST, messageArgs1, null); + + return new WeightedCause(AgentDown, 101); + } + + private static WeightedCause buildPortSwitchForEthernet() { + Expression portName = new FieldExpression("EthernetPort", "name"); + Expression nodeType = new FieldExpression("NetintStatus", "module"); + Expression nodeName = new FieldExpression("NetintStatus", "agentName"); + NumberedExpression portNameFirst = new NumberedExpression(1L, portName); + NumberedExpression nodeTypeSecond = new NumberedExpression(2L, nodeType); + NumberedExpression nodeNameThird = new NumberedExpression(3L, nodeName); + Set messageArgs1 = new HashSet<>(); + messageArgs1.add(portNameFirst); + messageArgs1.add(nodeTypeSecond); + messageArgs1.add(nodeNameThird); + + Condition fetchCondition7 = new SimpleCondition(new FieldExpression("NetintStatus", "nodeId"), + Operator.EQUALS, new FieldExpression("EthernetPort", "id")); + + Set fetchConditions7 = new HashSet<>(); + fetchConditions7.add(fetchCondition7); + + + Cause SwitchPortDown = new Cause("Switch port is down for ethernet", "EthernetPort", null, + fetchConditions7, + new SimpleCondition(new FieldExpression("NetintStatus", "linkup"), Operator.EQUALS, new ConstantExpression("false", MetadataFieldType.BOOLEAN)), + "Ethernet port:{param} for {param}:{param} is down", DataManipulationFunction.EXISTS, messageArgs1, "com.emc.dpa.analysis.rca.RCAPortSwitchIsDownPlugin"); + + return new WeightedCause(SwitchPortDown, 61); + } + + + private static WeightedCause buildDifferentPortSettings() { + Set messageArgs1 = new HashSet<>(); + + Condition fetchCondition1 = new SimpleCondition(new FieldExpression("NetintStatus", "nodeId"), + Operator.EQUALS, new FieldExpression("EthernetPort", "id")); + + Condition fetchCondition2 = new SimpleCondition(new FieldExpression("NetintConfig", "nodeId"), + Operator.EQUALS, new FieldExpression("EthernetPort", "id")); + + Set fetchConditions = new HashSet<>(); + fetchConditions.add(fetchCondition1); + fetchConditions.add(fetchCondition2); + + Cause PortSettings = new Cause("Different Port Settings", "EthernetPort", null, + fetchConditions, + new SimpleCondition(new FieldExpression("NetintStatus", "speed"), Operator.NOT_EQUALS, + new ConstantExpression("0", MetadataFieldType.NUMERIC)), + "", DataManipulationFunction.EXISTS, messageArgs1, "com.emc.dpa.analysis.rca.RCASwitchSettingsPlugin"); + + return new WeightedCause(PortSettings, 63); + } + + + private static WeightedCause buildPortSwitchForFibreChannel() { + Expression portName = new FieldExpression("FibreChannelPort", "name"); + Expression nodeType = new FieldExpression("FcportStatus", "module"); + Expression nodeName = new FieldExpression("FcportStatus", "agentName"); + NumberedExpression portNameFirst = new NumberedExpression(1L, portName); + NumberedExpression nodeTypeSecond = new NumberedExpression(2L, nodeType); + NumberedExpression nodeNameThird = new NumberedExpression(3L, nodeName); + Set messageArgs1 = new HashSet<>(); + messageArgs1.add(portNameFirst); + messageArgs1.add(nodeTypeSecond); + messageArgs1.add(nodeNameThird); + + Condition fetchCondition7 = new SimpleCondition(new FieldExpression("FcportStatus", "nodeId"), + Operator.EQUALS, new FieldExpression("FibreChannelPort", "id")); + + Set fetchConditions7 = new HashSet<>(); + fetchConditions7.add(fetchCondition7); + + + Cause SwitchPortDown = new Cause("Switch port is down for fibre channel", "FibreChannelPort", null, + fetchConditions7, + new SimpleCondition(new FieldExpression("FcportStatus", "linkup"), Operator.EQUALS, new ConstantExpression("false", MetadataFieldType.BOOLEAN)), + "Fibre channel port:{param} for {param}:{param} is down", DataManipulationFunction.EXISTS, messageArgs1, "com.emc.dpa.analysis.rca.RCAPortSwitchIsDownPlugin"); + + return new WeightedCause(SwitchPortDown, 60); + + } + + + private static RCATemplate buildRPAHighLoadTemplate() { + Symptom symptom = new Symptom("CG Copy high load", "CgCopyPerf", + new SimpleCondition(new FieldExpression("CgCopyPerf", "highLoadTime"), Operator.GREATER_THAN, new ConstantExpression("0", MetadataFieldType.NUMERIC))); + symptom.setFilterString("highLoadTime > 0"); + symptom.setNodeTypeString("cg copies"); + symptom.setProcessTypeString("replications"); + + + WeightedCause cause1 = buildHighRPAThroughput(); + WeightedCause cause2 = buildCGCopyFF(); + WeightedCause cause3 = buildPacketLoss(); + + Set causes = new HashSet<>(); + causes.add(cause1); + causes.add(cause2); + causes.add(cause3); + + return new RCATemplate("CG copy High load template", symptom, causes); + } + + private static WeightedCause buildHighRPAThroughput() { + + ParameterExpression highThroughputThreshold = new ParameterExpression("highThroughputThreshold", "122880", MetadataFieldType.NUMERIC); + ConstantExpression megaSize = new ConstantExpression("1024", MetadataFieldType.NUMERIC); + + Expression agentName = new FieldExpression("RpaPerfView", "agentName"); + NumberedExpression rpaFirst = new NumberedExpression(1L, agentName); + Expression rpaName = new FieldExpression("AbstractRecoverPointAppliance", "name"); + NumberedExpression rpaSecond = new NumberedExpression(2L, rpaName); + Expression site = new FieldExpression("RpaPerfView", "site"); + NumberedExpression rpaThird = new NumberedExpression(3L, site); + + MathExpression multiplyWanCompression = new MathExpression( + new FieldExpression("RpaPerfView", "wanThroughput"), + new FieldExpression("RpaPerfView", "compression"), + MathOperator.MULTIPLY); + + Expression wanThroughput = new MathExpression(new FieldExpression("RpaPerfView", "wanThroughput"), multiplyWanCompression, MathOperator.MAX); + Expression maxThroughput = new MathExpression(new FieldExpression("RpaPerfView", "sanThroughput"), wanThroughput, MathOperator.MAX); + Expression maxThroughputMB = new MathExpression(maxThroughput, megaSize, MathOperator.DIVIDE); + NumberedExpression rpaFourth = new NumberedExpression(4L, maxThroughputMB); + Expression limitThroughputMB = new MathExpression(highThroughputThreshold, megaSize, MathOperator.DIVIDE); + NumberedExpression rpaFifth = new NumberedExpression(5L, limitThroughputMB); + Set messageArgs1 = new HashSet<>(); + messageArgs1.add(rpaFirst); + messageArgs1.add(rpaSecond); + messageArgs1.add(rpaThird); + messageArgs1.add(rpaFourth); + messageArgs1.add(rpaFifth); + + Condition fetchCondition = new SimpleCondition(new FieldExpression("RpaPerfView", "nodeId"), + Operator.EQUALS, new FieldExpression("AbstractRecoverPointAppliance", "id")); + Set fetchConditions = new HashSet<>(); + fetchConditions.add(fetchCondition); + + + ///(san != null and san>120) || (wan != null and (((compression = null || compression = 0) and wan > 120) || ((compression != null and compression != 0) and wan*compression > 120))) + Condition condition = new CompoundCondition( + new CompoundCondition( + new SimpleCondition(new FieldExpression("RpaPerfView", "sanThroughput"), Operator.NOT_EQUALS, new ConstantExpression("null", MetadataFieldType.NULL)), + new SimpleCondition(new FieldExpression("RpaPerfView", "sanThroughput"), Operator.GREATER_THAN, highThroughputThreshold), + LogicalOperator.AND), + new CompoundCondition( + new SimpleCondition(new FieldExpression("RpaPerfView", "wanThroughput"), Operator.NOT_EQUALS, new ConstantExpression("null", MetadataFieldType.NULL)), + new CompoundCondition( + new CompoundCondition( + new SimpleCondition(new FieldExpression("RpaPerfView", "wanThroughput"), Operator.GREATER_THAN, highThroughputThreshold), + new CompoundCondition( + new SimpleCondition(new FieldExpression("RpaPerfView", "compression"), Operator.EQUALS, new ConstantExpression("null", MetadataFieldType.NULL)), + new SimpleCondition(new FieldExpression("RpaPerfView", "compression"), Operator.EQUALS, new ConstantExpression("0", MetadataFieldType.NUMERIC)), + LogicalOperator.OR), + LogicalOperator.AND), + new CompoundCondition( + new CompoundCondition( + new SimpleCondition(new FieldExpression("RpaPerfView", "compression"), Operator.NOT_EQUALS, new ConstantExpression("null", MetadataFieldType.NULL)), + new SimpleCondition(new FieldExpression("RpaPerfView", "compression"), Operator.NOT_EQUALS, new ConstantExpression("0", MetadataFieldType.NUMERIC)), + LogicalOperator.AND), + new SimpleCondition(multiplyWanCompression, Operator.GREATER_THAN, highThroughputThreshold), + LogicalOperator.AND), + LogicalOperator.OR), + LogicalOperator.AND), + LogicalOperator.OR); + + Cause rpaThroughputHigh = new Cause("High RPA throughput", "AbstractRecoverPointAppliance", MappingRelationType.RECOVERPOINT_CG_COPY_ACTIVE_RPA, + fetchConditions, condition, "The throughput for RPA: {param}:{param} at site {param} was {param}MB, above the RPA throughput limitation ({param}MB)", DataManipulationFunction.EXISTS, + messageArgs1, null); + + TimeManipulation startTimeManipulation = new TimeManipulation( + ManipulationType.MOVE_IT_BY_ABSOLUTE_TIME, + MathOperator.SUBTRACT, + 5 * 60L, + null, + "rpa_throughput_offset" + ); + rpaThroughputHigh.setStartTimeManipulation(startTimeManipulation); + + rpaThroughputHigh.setGeneralMessageTemplate("The throughput for RPA: {param}:{param} at site {param} was above the RPA throughput limitation ({param}MB)"); + Set generalMessageParams = new HashSet<>(); + generalMessageParams.add(rpaFirst); + generalMessageParams.add(rpaSecond); + generalMessageParams.add(rpaThird); + NumberedExpression rpaFifthWichIsNowForth = new NumberedExpression(4L, limitThroughputMB); + generalMessageParams.add(rpaFifthWichIsNowForth); + rpaThroughputHigh.setGeneralMessageTemplateParams(generalMessageParams); + + WeightedCause cause = new WeightedCause(rpaThroughputHigh, 100); + return cause; + } + + private static WeightedCause buildCGCopyFF() { + Expression cgCopyName = new FieldExpression("AbstractRecoverPointConsistencyGroupCopy", "name"); + NumberedExpression cgCopyFirst = new NumberedExpression(1L, cgCopyName); + Set messageArgs1 = new HashSet<>(); + messageArgs1.add(cgCopyFirst); + + Condition fetchCondition = new SimpleCondition(new FieldExpression("CgCopyStatus", "nodeId"), + Operator.EQUALS, new FieldExpression("AbstractRecoverPointConsistencyGroupCopy", "id")); + Set fetchConditions = new HashSet<>(); + fetchConditions.add(fetchCondition); + + Condition condition = + new CompoundCondition( + new SimpleCondition(new FieldExpression("CgCopyStatus", "journalMode"), Operator.NOT_EQUALS, new ConstantExpression("null", MetadataFieldType.NULL)), + new SimpleCondition(new FieldExpression("CgCopyStatus", "journalMode"), Operator.EQUALS, new ConstantExpression("Fast Forward", MetadataFieldType.STRING)), + LogicalOperator.AND); + + Cause rpaFastForward = new Cause("High RPA throughput", "AbstractRecoverPointConsistencyGroupCopy", null, + fetchConditions, condition, "Cg Copy {param} has a high latency on writing to remote storage (Fast Forward)", DataManipulationFunction.EXISTS, + messageArgs1, null); + + WeightedCause cause = new WeightedCause(rpaFastForward, 80); + return cause; + } + + private static RCATemplate buildRPOViolationTemplate() { + Symptom symptom = new Symptom(CG_COPY_RPO_VIOLATION_SYMPTOM_NAME, "AnalysisAlert", + new CompoundCondition( + // todo - check state - not closed, currently don't bother + new SimpleCondition(new FieldExpression("AnalysisAlert", "message"), Operator.EQUALS, new ConstantExpression("RPO Violation", MetadataFieldType.STRING)), + new SimpleCondition(new FieldExpression("AnalysisAlert", "component"), Operator.CONTAINS, new ConstantExpression("CG Copy", MetadataFieldType.STRING)) + ) + ); + symptom.setFilterString("message = 'RPO Violation' AND component like '%CG Copy%'"); + symptom.setNodeTypeString("cg copies"); + symptom.setProcessTypeString("replications"); + + + WeightedCause cause1 = buildHighRPAThroughput(); + WeightedCause cause2 = buildCGCopyFF(); + WeightedCause cause3 = buildPacketLoss(); + + Set causes = new HashSet<>(); + causes.add(cause1); + causes.add(cause2); + causes.add(cause3); + + return new RCATemplate("CG copy RPO Violation template", symptom, causes); + } + + private static WeightedCause buildPacketLoss() { + + Expression packetLossSize = new FieldExpression("RpaPerfView", "packetLoss"); + NumberedExpression clientFirst = new NumberedExpression(1L, packetLossSize); + Set messageArgs1 = new HashSet<>(); + messageArgs1.add(clientFirst); + + Condition fetchCondition = new SimpleCondition(new FieldExpression("RpaPerfView", "nodeId"), + Operator.EQUALS, new FieldExpression("AbstractRecoverPointAppliance", "id")); + Set fetchConditions = new HashSet<>(); + fetchConditions.add(fetchCondition); + Condition condition = new SimpleCondition(new FieldExpression("RpaPerfView", "packetLoss"), Operator.GREATER_THAN, + new ConstantExpression("1", MetadataFieldType.NUMERIC)); + + Cause packetLoss = new Cause(PACKET_LOSS, "AbstractRecoverPointAppliance", MappingRelationType.RECOVERPOINT_CG_COPY_ACTIVE_RPA, + fetchConditions, condition, + "There is a bottleneck on the bandwidth. Found packet-loss ({param}) on the link.", DataManipulationFunction.EXISTS, + messageArgs1, null); + + packetLoss.setGeneralMessageTemplate("There is a bottleneck on the bandwidth. Found packet-loss (>1) on the link."); + + return new WeightedCause(packetLoss, 200); + } + + public static List getDefaultRCATemplates() { + return Arrays.asList(buildBackupFailedTemplate(), buildRPAHighLoadTemplate(), buildRPOViolationTemplate()); + } + + } + +} diff --git a/hibernate-core/src/test/java/org/hibernate/test/insertordering/InsertOrderingRootEntityNameDependencyTest.java b/hibernate-core/src/test/java/org/hibernate/test/insertordering/InsertOrderingRootEntityNameDependencyTest.java new file mode 100644 index 0000000000..9f746ff445 --- /dev/null +++ b/hibernate-core/src/test/java/org/hibernate/test/insertordering/InsertOrderingRootEntityNameDependencyTest.java @@ -0,0 +1,188 @@ +/* + * Hibernate, Relational Persistence for Idiomatic Java + * + * License: GNU Lesser General Public License (LGPL), version 2.1 or later. + * See the lgpl.txt file in the root directory or . + */ +package org.hibernate.test.insertordering; + +import org.hibernate.cfg.Environment; +import org.hibernate.test.util.jdbc.PreparedStatementSpyConnectionProvider; +import org.hibernate.testing.DialectChecks; +import org.hibernate.testing.RequiresDialectFeature; +import org.hibernate.testing.TestForIssue; +import org.hibernate.testing.junit4.BaseNonConfigCoreFunctionalTestCase; +import org.junit.Test; + +import javax.persistence.CascadeType; +import javax.persistence.Entity; +import javax.persistence.FetchType; +import javax.persistence.Id; +import javax.persistence.OneToMany; +import javax.persistence.OneToOne; +import javax.persistence.Table; +import java.sql.SQLException; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +import static org.hibernate.testing.transaction.TransactionUtil.doInHibernate; + +@TestForIssue(jiraKey = "HHH-16485") +@RequiresDialectFeature(DialectChecks.SupportsJdbcDriverProxying.class) +public class InsertOrderingRootEntityNameDependencyTest extends BaseNonConfigCoreFunctionalTestCase { + + private PreparedStatementSpyConnectionProvider connectionProvider = new PreparedStatementSpyConnectionProvider(true, false); + + @Override + protected Class[] getAnnotatedClasses() { + return new Class[]{ + Wrapper.class, + Condition.class, + SimpleCondition.class, + Expression.class, + ConstantExpression.class, + Condition.class, + CompoundCondition.class, + }; + } + + @Override + protected void addSettings(Map settings) { + settings.put(Environment.ORDER_INSERTS, "true"); + settings.put(Environment.ORDER_UPDATES, "true"); + settings.put(Environment.STATEMENT_BATCH_SIZE, "50"); + settings.put( + org.hibernate.cfg.AvailableSettings.CONNECTION_PROVIDER, + connectionProvider + ); + } + + @Override + public void releaseResources() { + super.releaseResources(); + connectionProvider.stop(); + } + + @Override + protected boolean rebuildSessionFactoryOnError() { + return false; + } + + @Test + public void testBatching() throws SQLException { + doInHibernate(this::sessionFactory, session -> { + connectionProvider.clear(); + session.persist(Wrapper.create()); + }); + } + + @Entity(name = "Wrapper") + public static class Wrapper { + @Id + private String id; + @OneToOne(cascade = CascadeType.ALL, orphanRemoval = true, fetch = FetchType.EAGER) + private Condition condition; + @OneToMany(cascade = CascadeType.ALL, fetch = FetchType.EAGER) + private Set constantExpressions; + + @OneToMany(cascade = CascadeType.ALL, fetch = FetchType.EAGER) + private Set otherConditions; + + public Wrapper() { + } + + public static Wrapper create() { + final Wrapper w = new Wrapper(); + final CompoundCondition cc = new CompoundCondition(); + final SimpleCondition c1 = new SimpleCondition(); + final SimpleCondition c2 = new SimpleCondition(); + final SimpleCondition c3 = new SimpleCondition(); + final ConstantExpression e1 = new ConstantExpression(); + final ConstantExpression e2 = new ConstantExpression(); + final ConstantExpression e3 = new ConstantExpression(); + final ConstantExpression e4 = new ConstantExpression(); + final ConstantExpression e5 = new ConstantExpression(); + final ConstantExpression e6 = new ConstantExpression(); + final ConstantExpression e7 = new ConstantExpression(); + w.id = "w"; + w.condition = cc; + cc.id = "cc"; + cc.first = c1; + cc.second = c2; + c1.id = "c1"; + c1.left = e1; + c1.right = e2; + c2.id = "c2"; + c2.left = e3; + c2.right = e4; + c3.id = "c3"; + c3.left = e6; + c3.right = e7; + e1.id = "e1"; + e1.value = "e1"; + e2.id = "e2"; + e2.value = "e2"; + e3.id = "e3"; + e3.value = "e3"; + e4.id = "e4"; + e4.value = "e4"; + e5.id = "e5"; + e5.value = "e5"; + e6.id = "e6"; + e6.value = "e6"; + e7.id = "e7"; + e7.value = "e7"; + w.constantExpressions = new HashSet<>(); + w.constantExpressions.add(e5); + w.otherConditions = new HashSet<>(); + w.otherConditions.add(c3); + return w; + } + } + + @Entity(name = "Condition") + public static abstract class Condition { + @Id + protected String id; + + public Condition() { + } + } + @Entity(name = "SimpleCondition") + public static class SimpleCondition extends Condition { + @OneToOne(cascade = CascadeType.ALL) + private Expression left; + @OneToOne(cascade = CascadeType.ALL) + private Expression right; + + public SimpleCondition() { + } + } + @Entity(name = "Expression") + public static abstract class Expression { + @Id + protected String id; + + protected Expression() { + } + + } + @Entity(name = "ConstantExpression") + public static class ConstantExpression extends Expression { + private String value; + + public ConstantExpression() { + } + } + @Entity(name = "CompoundCondition") + public static class CompoundCondition extends Condition { + @OneToOne(cascade = CascadeType.ALL) + protected Condition first; + @OneToOne(cascade = CascadeType.ALL) + protected Condition second; + + public CompoundCondition() { + } + } +}