Fix HANA test, skip one SQL Server test due to precision issues and get rid of old cruft in AbstractEntityPersister

This commit is contained in:
Christian Beikov 2022-12-14 11:11:43 +01:00
parent 92d447cddd
commit acd4e0e36a
5 changed files with 25 additions and 185 deletions

View File

@ -6,6 +6,7 @@
*/ */
package org.hibernate.dialect; package org.hibernate.dialect;
import org.hibernate.MappingException;
import org.hibernate.engine.spi.SessionFactoryImplementor; import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.query.sqm.BinaryArithmeticOperator; import org.hibernate.query.sqm.BinaryArithmeticOperator;
import org.hibernate.query.sqm.ComparisonOperator; import org.hibernate.query.sqm.ComparisonOperator;
@ -23,6 +24,7 @@ import org.hibernate.sql.ast.tree.select.QueryGroup;
import org.hibernate.sql.ast.tree.select.QueryPart; import org.hibernate.sql.ast.tree.select.QueryPart;
import org.hibernate.sql.ast.tree.select.QuerySpec; import org.hibernate.sql.ast.tree.select.QuerySpec;
import org.hibernate.sql.exec.spi.JdbcOperation; import org.hibernate.sql.exec.spi.JdbcOperation;
import org.hibernate.sql.model.internal.TableInsertStandard;
/** /**
* A SQL AST translator for HANA. * A SQL AST translator for HANA.
@ -144,4 +146,15 @@ public class HANASqlAstTranslator<T extends JdbcOperation> extends AbstractSqlAs
protected String getFromDualForSelectOnly() { protected String getFromDualForSelectOnly() {
return getFromDual(); return getFromDual();
} }
@Override
protected void renderInsertIntoNoColumns(TableInsertStandard tableInsert) {
throw new MappingException(
String.format(
"The INSERT statement for table [%s] contains no column, and this is not supported by [%s]",
tableInsert.getMutatingTable().getTableId(),
getDialect()
)
);
}
} }

View File

@ -397,9 +397,6 @@ public abstract class AbstractEntityPersister
private String sqlVersionSelectString; private String sqlVersionSelectString;
private Map<String, SingleIdArrayLoadPlan> sqlLazySelectStringsByFetchGroup; private Map<String, SingleIdArrayLoadPlan> sqlLazySelectStringsByFetchGroup;
private String[] sqlLazyUpdateStrings;
private String sqlUpdateByRowIdString;
private String sqlLazyUpdateByRowIdString;
private GeneratedValuesProcessor insertGeneratedValuesProcessor; private GeneratedValuesProcessor insertGeneratedValuesProcessor;
private GeneratedValuesProcessor updateGeneratedValuesProcessor; private GeneratedValuesProcessor updateGeneratedValuesProcessor;
@ -935,10 +932,6 @@ public abstract class AbstractEntityPersister
return deleteCoordinator; return deleteCoordinator;
} }
public String[] getSQLLazyUpdateStrings() {
return sqlLazyUpdateStrings;
}
public String getVersionSelectString() { public String getVersionSelectString() {
return sqlVersionSelectString; return sqlVersionSelectString;
} }
@ -2743,132 +2736,6 @@ public abstract class AbstractEntityPersister
private static final boolean[] SINGLE_TRUE = new boolean[] { true }; private static final boolean[] SINGLE_TRUE = new boolean[] { true };
public String generateUpdateString(boolean[] includeProperty, int j, boolean useRowId) {
return generateUpdateString( includeProperty, j, null, useRowId );
}
/**
* Generate the SQL that updates a row by id (and version)
*/
public String generateUpdateString(
final boolean[] includeProperty,
final int j,
final Object[] oldFields,
final boolean useRowId) {
final Update update = new Update( getFactory().getJdbcServices().getDialect() ).setTableName( getTableName( j ) );
boolean hasColumns = false;
for ( int index = 0; index < attributeMappings.size(); index++ ) {
final AttributeMapping attributeMapping = attributeMappings.get( index );
if ( isPropertyOfTable( index, j ) ) {
// `attributeMapping` is an attribute of the table we are updating
if ( ! lobProperties.contains( index ) ) {
// HHH-4635
// Oracle expects all Lob properties to be last in inserts
// and updates. Insert them at the end - see below
if ( includeProperty[ index ] ) {
update.addColumns(
getPropertyColumnNames( index ),
propertyColumnUpdateable[index ],
propertyColumnWriters[index]
);
hasColumns = true;
}
else {
final Generator generator = attributeMapping.getGenerator();
if ( generator!=null
&& generator.generatesOnUpdate()
&& generator.generatedByDatabase() ) {
final InDatabaseGenerator databaseGenerator = (InDatabaseGenerator) generator;
final Dialect dialect = getFactory().getJdbcServices().getDialect();
if ( databaseGenerator.referenceColumnsInSql(dialect) ) {
update.addColumns(
getPropertyColumnNames(index),
SINGLE_TRUE,
databaseGenerator.getReferencedColumnValues(dialect)
);
hasColumns = true;
}
}
}
}
}
}
// HHH-4635
// Oracle expects all Lob properties to be last in inserts
// and updates. Insert them at the end.
for ( int i : lobProperties ) {
if ( includeProperty[i] && isPropertyOfTable( i, j ) ) {
// this property belongs on the table and is to be inserted
update.addColumns(
getPropertyColumnNames( i ),
propertyColumnUpdateable[i], propertyColumnWriters[i]
);
hasColumns = true;
}
}
// select the correct row by either pk or row id
if ( useRowId ) {
update.addPrimaryKeyColumns( new String[] {rowIdName} ); //TODO: eventually, rowIdName[j]
}
else {
update.addPrimaryKeyColumns( getKeyColumns( j ) );
}
if ( j == 0 && isVersioned() && entityMetamodel.getOptimisticLockStyle().isVersion() ) {
// this is the root (versioned) table, and we are using version-based
// optimistic locking; if we are not updating the version, also don't
// check it (unless this is a "generated" version column)!
if ( checkVersion( includeProperty ) ) {
update.setVersionColumnName( getVersionColumnName() );
hasColumns = true;
}
}
else if ( isAllOrDirtyOptLocking() && oldFields != null ) {
// we are using "all" or "dirty" property-based optimistic locking
boolean[] includeInWhere = entityMetamodel.getOptimisticLockStyle().isAll()
//optimistic-lock="all", include all updatable properties
? getPropertyUpdateability()
//optimistic-lock="dirty", include all properties we are updating this time
: includeProperty;
boolean[] versionability = getPropertyVersionability();
Type[] types = getPropertyTypes();
for ( int i = 0; i < entityMetamodel.getPropertySpan(); i++ ) {
boolean include = includeInWhere[i] &&
isPropertyOfTable( i, j ) &&
versionability[i];
if ( include ) {
// this property belongs to the table, and it is not specifically
// excluded from optimistic locking by optimistic-lock="false"
String[] propertyColumnNames = getPropertyColumnNames( i );
String[] propertyColumnWriters = getPropertyColumnWriters( i );
boolean[] propertyNullness = types[i].toColumnNullness( oldFields[i], getFactory() );
for ( int k = 0; k < propertyNullness.length; k++ ) {
if ( propertyNullness[k] ) {
update.addWhereColumn( propertyColumnNames[k], "=" + propertyColumnWriters[k] );
}
else {
update.addWhereColumn( propertyColumnNames[k], " is null" );
}
}
}
}
}
if ( getFactory().getSessionFactoryOptions().isCommentsEnabled() ) {
update.setComment( "update " + getEntityName() );
}
return hasColumns ? update.toStatementString() : null;
}
public final boolean checkVersion(final boolean[] includeProperty) { public final boolean checkVersion(final boolean[] includeProperty) {
return includeProperty[getVersionProperty()] || entityMetamodel.isVersionGeneratedByDatabase(); return includeProperty[getVersionProperty()] || entityMetamodel.isVersionGeneratedByDatabase();
} }
@ -3095,13 +2962,6 @@ public abstract class AbstractEntityPersister
LOG.debugf( " Delete (%s): %s", tablePosition, ( (JdbcOperation) mutation ).getSqlString() ); LOG.debugf( " Delete (%s): %s", tablePosition, ( (JdbcOperation) mutation ).getSqlString() );
} }
} ); } );
if ( sqlUpdateByRowIdString != null ) {
LOG.debugf( " Update by row id (all fields): %s", sqlUpdateByRowIdString );
}
if ( sqlLazyUpdateByRowIdString != null ) {
LOG.debugf( " Update by row id (non-lazy fields): %s", sqlLazyUpdateByRowIdString );
}
} }
} }
@ -3220,20 +3080,6 @@ public abstract class AbstractEntityPersister
deleteCoordinator = buildDeleteCoordinator(); deleteCoordinator = buildDeleteCoordinator();
final int joinSpan = getTableSpan(); final int joinSpan = getTableSpan();
sqlLazyUpdateStrings = new String[joinSpan];
sqlUpdateByRowIdString = rowIdName == null ?
null :
generateUpdateString( getPropertyUpdateability(), 0, true );
sqlLazyUpdateByRowIdString = rowIdName == null ?
null :
generateUpdateString( getNonLazyPropertyUpdateability(), 0, true );
for ( int j = 0; j < joinSpan; j++ ) {
sqlLazyUpdateStrings[j] = customSQLUpdate[j] == null
? generateUpdateString( getNonLazyPropertyUpdateability(), j, false )
: substituteBrackets( customSQLUpdate[j] );
}
tableHasColumns = new boolean[joinSpan]; tableHasColumns = new boolean[joinSpan];
for ( int j = 0; j < joinSpan; j++ ) { for ( int j = 0; j < joinSpan; j++ ) {

View File

@ -415,9 +415,6 @@ public class DefaultCatalogAndSchemaTest {
} ); } );
verifyOnlyQualifier( sqlUpdateStrings, SqlType.RUNTIME, jpaEntityName, expectedQualifier ); verifyOnlyQualifier( sqlUpdateStrings, SqlType.RUNTIME, jpaEntityName, expectedQualifier );
verifyOnlyQualifier( persister.getSQLLazyUpdateStrings(), SqlType.RUNTIME,
jpaEntityName, expectedQualifier );
final MutationOperationGroup staticDeleteGroup = persister.getDeleteCoordinator().getStaticDeleteGroup(); final MutationOperationGroup staticDeleteGroup = persister.getDeleteCoordinator().getStaticDeleteGroup();
final String[] sqlDeleteStrings = new String[staticDeleteGroup.getNumberOfOperations()]; final String[] sqlDeleteStrings = new String[staticDeleteGroup.getNumberOfOperations()];
staticDeleteGroup.forEachOperation( (tablePosition, operation) -> { staticDeleteGroup.forEachOperation( (tablePosition, operation) -> {

View File

@ -11,23 +11,17 @@ import java.util.Map;
import org.hibernate.LockMode; import org.hibernate.LockMode;
import org.hibernate.LockOptions; import org.hibernate.LockOptions;
import org.hibernate.MappingException;
import org.hibernate.boot.MetadataSources; import org.hibernate.boot.MetadataSources;
import org.hibernate.boot.registry.StandardServiceRegistry; import org.hibernate.boot.registry.StandardServiceRegistry;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder; import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.cfg.AvailableSettings; import org.hibernate.cfg.AvailableSettings;
import org.hibernate.dialect.HANAColumnStoreDialect; import org.hibernate.dialect.HANAColumnStoreDialect;
import org.hibernate.engine.jdbc.mutation.MutationExecutor;
import org.hibernate.engine.jdbc.mutation.group.PreparedStatementDetails;
import org.hibernate.engine.jdbc.mutation.spi.MutationExecutorService;
import org.hibernate.engine.spi.SessionFactoryImplementor; import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.id.PostInsertIdentityPersister;
import org.hibernate.persister.entity.SingleTableEntityPersister;
import org.hibernate.sql.model.MutationOperationGroup;
import org.hibernate.testing.TestForIssue; import org.hibernate.testing.TestForIssue;
import org.hibernate.testing.junit4.BaseUnitTestCase; import org.hibernate.testing.junit4.BaseUnitTestCase;
import org.hibernate.testing.orm.junit.ServiceRegistryScope; import org.hibernate.testing.orm.junit.ServiceRegistryScope;
import org.hibernate.testing.transaction.TransactionUtil2;
import org.junit.Test; import org.junit.Test;
import jakarta.persistence.Entity; import jakarta.persistence.Entity;
@ -35,9 +29,11 @@ import jakarta.persistence.GeneratedValue;
import jakarta.persistence.GenerationType; import jakarta.persistence.GenerationType;
import jakarta.persistence.Id; import jakarta.persistence.Id;
import jakarta.persistence.Table; import jakarta.persistence.Table;
import org.hamcrest.MatcherAssert;
import static org.assertj.core.api.Assertions.assertThat; import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.jupiter.api.Assertions.fail;
public class HANADialectTestCase extends BaseUnitTestCase { public class HANADialectTestCase extends BaseUnitTestCase {
@Test @Test
@ -52,26 +48,12 @@ public class HANADialectTestCase extends BaseUnitTestCase {
metadataSources.addAnnotatedClass( EntityWithIdentity.class ); metadataSources.addAnnotatedClass( EntityWithIdentity.class );
try ( SessionFactoryImplementor sessionFactory = (SessionFactoryImplementor) metadataSources.buildMetadata().buildSessionFactory() ) { try ( SessionFactoryImplementor sessionFactory = (SessionFactoryImplementor) metadataSources.buildMetadata().buildSessionFactory() ) {
final PostInsertIdentityPersister entityDescriptor = (PostInsertIdentityPersister) sessionFactory.getRuntimeMetamodels() fail( "Should have thrown MappingException!" );
.getMappingMetamodel() }
.getEntityDescriptor( EntityWithIdentity.class ); catch (MappingException e) {
final MutationOperationGroup staticInsertGroup = ( (SingleTableEntityPersister) entityDescriptor ).getInsertCoordinator().getStaticInsertGroup(); MatcherAssert.assertThat(
e.getMessage(),
final MutationExecutorService mutationExecutorService = sessionFactory is( "The INSERT statement for table [EntityWithIdentity] contains no column, and this is not supported by [" + HANAColumnStoreDialect.class.getName() + "]" )
.getServiceRegistry()
.getService( MutationExecutorService.class );
TransactionUtil2.inTransaction(
sessionFactory,
(session) -> {
final MutationExecutor mutationExecutor = mutationExecutorService.createExecutor(
() -> null,
staticInsertGroup,
session
);
final PreparedStatementDetails statementDetails = mutationExecutor.getPreparedStatementDetails( "EntityWithIdentity" );
assertThat( statementDetails.getSqlString() ).isEqualTo( "insert into EntityWithIdentity values ( )" );
}
); );
} }
} }

View File

@ -13,6 +13,7 @@ import jakarta.persistence.Table;
import org.hibernate.HibernateError; import org.hibernate.HibernateError;
import org.hibernate.annotations.CurrentTimestamp; import org.hibernate.annotations.CurrentTimestamp;
import org.hibernate.dialect.SQLServerDialect;
import org.hibernate.dialect.SybaseASEDialect; import org.hibernate.dialect.SybaseASEDialect;
import org.hibernate.tuple.GenerationTiming; import org.hibernate.tuple.GenerationTiming;
@ -35,6 +36,7 @@ import static org.assertj.core.api.Assertions.assertThat;
@SessionFactory @SessionFactory
@RequiresDialectFeature(feature = DialectFeatureChecks.CurrentTimestampHasMicrosecondPrecision.class, comment = "Without this, we might not see an update to the timestamp") @RequiresDialectFeature(feature = DialectFeatureChecks.CurrentTimestampHasMicrosecondPrecision.class, comment = "Without this, we might not see an update to the timestamp")
@SkipForDialect( dialectClass = SybaseASEDialect.class, matchSubTypes = true, reason = "CURRENT_TIMESTAMP not supported in insert/update in Sybase ASE. Also see https://groups.google.com/g/comp.databases.sybase/c/j-RxPnF3img" ) @SkipForDialect( dialectClass = SybaseASEDialect.class, matchSubTypes = true, reason = "CURRENT_TIMESTAMP not supported in insert/update in Sybase ASE. Also see https://groups.google.com/g/comp.databases.sybase/c/j-RxPnF3img" )
@SkipForDialect( dialectClass = SQLServerDialect.class, matchSubTypes = true, reason = "CURRENT_TIMESTAMP has millisecond precision" )
public class MultipleGeneratedValuesTests { public class MultipleGeneratedValuesTests {
@Test @Test
public void test(SessionFactoryScope scope) { public void test(SessionFactoryScope scope) {