remove exclamation marks from error messages!

This commit is contained in:
Gavin King 2022-06-18 00:41:19 +02:00
parent bb29e3b060
commit ba48130c3f
70 changed files with 118 additions and 129 deletions

View File

@ -316,7 +316,7 @@ public void addEntityBinding(PersistentClass persistentClass) throws DuplicateMa
if ( matchingPersistentClass != null ) {
throw new DuplicateMappingException(
String.format(
"The [%s] and [%s] entities share the same JPA entity name: [%s], which is not allowed!",
"The [%s] and [%s] entities share the same JPA entity name: [%s], which is not allowed",
matchingPersistentClass.getClassName(),
persistentClass.getClassName(),
jpaEntityName

View File

@ -544,7 +544,7 @@ else if ( jdbcTimeZoneValue instanceof String ) {
this.jdbcTimeZone = TimeZone.getTimeZone( ZoneId.of((String) jdbcTimeZoneValue) );
}
else if ( jdbcTimeZoneValue != null ) {
throw new IllegalArgumentException( "Configuration property " + JDBC_TIME_ZONE + " value [" + jdbcTimeZoneValue + "] is not supported!" );
throw new IllegalArgumentException( "Configuration property " + JDBC_TIME_ZONE + " value [" + jdbcTimeZoneValue + "] is not supported" );
}
this.criteriaValueHandlingMode = ValueHandlingMode.interpret(
@ -629,7 +629,7 @@ else if ( emptyConstructor != null ) {
e
);
}
throw new IllegalArgumentException( "Cannot instantiate the class [" + strategyClass.getName() + "] because it does not have a constructor that accepts a dialect or an empty constructor!" );
throw new IllegalArgumentException( "Cannot instantiate the class [" + strategyClass.getName() + "] because it does not have a constructor that accepts a dialect or an empty constructor" );
}
);
}
@ -678,7 +678,7 @@ else if ( emptyConstructor != null ) {
e
);
}
throw new IllegalArgumentException( "Cannot instantiate the class [" + strategyClass.getName() + "] because it does not have a constructor that accepts a dialect or an empty constructor!" );
throw new IllegalArgumentException( "Cannot instantiate the class [" + strategyClass.getName() + "] because it does not have a constructor that accepts a dialect or an empty constructor" );
}
);
}

View File

@ -170,7 +170,7 @@ public String hashedName(String s) {
return bigInt.toString( 35 );
}
catch ( NoSuchAlgorithmException|UnsupportedEncodingException e ) {
throw new HibernateException( "Unable to generate a hashed name!", e );
throw new HibernateException( "Unable to generate a hashed name", e );
}
}
}

View File

@ -4068,7 +4068,7 @@ public void doSecondPass(Map<String, PersistentClass> persistentClasses) throws
manyToOneBinding,
manyToOneSource.areValuesNullableByDefault(),
context -> {
throw new AssertionFailure( "Argh!!!" );
throw new AssertionFailure( "Should not be called" );
}
);
}

View File

@ -104,7 +104,7 @@ public synchronized void resetAndReactivate(BootstrapServiceRegistry bootstrapSe
List<ProvidedService<?>> providedServices,
Map<?, ?> configurationValues) {
if ( super.isActive() ) {
throw new IllegalStateException( "Can't reactivate an active registry!" );
throw new IllegalStateException( "Can't reactivate an active registry" );
}
super.resetParent( bootstrapServiceRegistry );
this.configurationValues = new HashMap( configurationValues );

View File

@ -146,7 +146,7 @@ private TypePool buildTypePool(final ClassFileLocator classFileLocator) {
private DynamicType.Builder<?> doEnhance(DynamicType.Builder<?> builder, TypeDescription managedCtClass) {
// can't effectively enhance interfaces
if ( managedCtClass.isInterface() ) {
log.debugf( "Skipping enhancement of [%s]: it's an interface!", managedCtClass.getName() );
log.debugf( "Skipping enhancement of [%s]: it's an interface", managedCtClass.getName() );
return null;
}
// skip already enhanced classes

View File

@ -1908,7 +1908,7 @@ private static int addProperty(
if ( incomingIdProperty != null && existingIdProperty == null ) {
throw new MappingException(
String.format(
"You cannot override the [%s] non-identifier property from the [%s] base class or @MappedSuperclass and make it an identifier in the [%s] subclass!",
"You cannot override the [%s] non-identifier property from the [%s] base class or @MappedSuperclass and make it an identifier in the [%s] subclass",
propertyData.getProperty().getName(),
propertyData.getProperty().getDeclaringClass().getName(),
property.getDeclaringClass().getName()

View File

@ -14,7 +14,7 @@
*/
public class NotYetImplementedException extends MappingException {
public NotYetImplementedException() {
this( "Not yet implemented!" );
this( "Not yet implemented" );
}
public NotYetImplementedException(String msg, Throwable root) {

View File

@ -659,7 +659,7 @@ else if ( value instanceof DependantValue ) {
//works cause the pk has to be on the primary table
Table table = referencedEntity.getTable();
if ( idColumns.isEmpty() ) {
LOG.debug( "No column in the identifier!" );
LOG.debug( "No column in the identifier" );
}
for ( Column col: idColumns ) {
boolean match = false;

View File

@ -130,7 +130,7 @@ else if ( expression instanceof Summarization ) {
// This could theoretically be emulated by rendering all grouping variations of the query and
// connect them via union all but that's probably pretty inefficient and would have to happen
// on the query spec level
throw new UnsupportedOperationException( "Summarization is not supported by DBMS!" );
throw new UnsupportedOperationException( "Summarization is not supported by DBMS" );
}
else {
expression.accept( this );

View File

@ -24,8 +24,6 @@
import org.hibernate.sql.ast.tree.expression.SqlTuple;
import org.hibernate.sql.ast.tree.expression.SqlTupleContainer;
import org.hibernate.sql.ast.tree.expression.Summarization;
import org.hibernate.sql.ast.tree.from.DerivedTableReference;
import org.hibernate.sql.ast.tree.from.NamedTableReference;
import org.hibernate.sql.ast.tree.from.QueryPartTableReference;
import org.hibernate.sql.ast.tree.from.TableGroup;
import org.hibernate.sql.ast.tree.from.TableReference;
@ -148,7 +146,7 @@ else if ( expression instanceof Summarization ) {
// This could theoretically be emulated by rendering all grouping variations of the query and
// connect them via union all but that's probably pretty inefficient and would have to happen
// on the query spec level
throw new UnsupportedOperationException( "Summarization is not supported by DBMS!" );
throw new UnsupportedOperationException( "Summarization is not supported by DBMS" );
}
else {
expression.accept( this );

View File

@ -116,7 +116,7 @@ protected void renderPartitionItem(Expression expression) {
appendSql( "grouping sets (())" );
}
else if ( expression instanceof Summarization ) {
throw new UnsupportedOperationException( "Summarization is not supported by DBMS!" );
throw new UnsupportedOperationException( "Summarization is not supported by DBMS" );
}
else {
expression.accept( this );

View File

@ -10,7 +10,6 @@
import java.util.function.Consumer;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
import org.hibernate.query.sqm.BinaryArithmeticOperator;
import org.hibernate.query.sqm.ComparisonOperator;
@ -251,7 +250,7 @@ else if ( expression instanceof Summarization ) {
// This could theoretically be emulated by rendering all grouping variations of the query and
// connect them via union all but that's probably pretty inefficient and would have to happen
// on the query spec level
throw new UnsupportedOperationException( "Summarization is not supported by DBMS!" );
throw new UnsupportedOperationException( "Summarization is not supported by DBMS" );
}
else {
expression.accept( this );

View File

@ -149,7 +149,7 @@ else if( "myisam".equalsIgnoreCase( storageEngine ) ) {
return MyISAMStorageEngine.INSTANCE;
}
else {
throw new UnsupportedOperationException( "The " + storageEngine + " storage engine is not supported!" );
throw new UnsupportedOperationException( "The " + storageEngine + " storage engine is not supported" );
}
}

View File

@ -10,7 +10,6 @@
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.internal.util.collections.Stack;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
import org.hibernate.query.sqm.BinaryArithmeticOperator;
import org.hibernate.query.sqm.ComparisonOperator;
@ -64,7 +63,7 @@ protected LockStrategy determineLockingStrategy(
final boolean followOnLockingDisabled = Boolean.FALSE.equals( followOnLocking );
if ( strategy != LockStrategy.FOLLOW_ON && querySpec.hasSortSpecifications() ) {
if ( followOnLockingDisabled ) {
throw new IllegalQueryOperationException( "Locking with ORDER BY is not supported!" );
throw new IllegalQueryOperationException( "Locking with ORDER BY is not supported" );
}
strategy = LockStrategy.FOLLOW_ON;
}
@ -72,19 +71,19 @@ protected LockStrategy determineLockingStrategy(
// See https://docs.oracle.com/cd/B19306_01/server.102/b14200/statements_10002.htm#i2066346
if ( strategy != LockStrategy.FOLLOW_ON && isPartOfQueryGroup() ) {
if ( followOnLockingDisabled ) {
throw new IllegalQueryOperationException( "Locking with set operators is not supported!" );
throw new IllegalQueryOperationException( "Locking with set operators is not supported" );
}
strategy = LockStrategy.FOLLOW_ON;
}
if ( strategy != LockStrategy.FOLLOW_ON && hasSetOperations( querySpec ) ) {
if ( followOnLockingDisabled ) {
throw new IllegalQueryOperationException( "Locking with set operators is not supported!" );
throw new IllegalQueryOperationException( "Locking with set operators is not supported" );
}
strategy = LockStrategy.FOLLOW_ON;
}
if ( strategy != LockStrategy.FOLLOW_ON && useOffsetFetchClause( querySpec ) && !isRowsOnlyFetchClauseType( querySpec ) ) {
if ( followOnLockingDisabled ) {
throw new IllegalQueryOperationException( "Locking with FETCH is not supported!" );
throw new IllegalQueryOperationException( "Locking with FETCH is not supported" );
}
strategy = LockStrategy.FOLLOW_ON;
}
@ -100,7 +99,7 @@ protected LockStrategy determineLockingStrategy(
}
if ( hasOffset ) {
if ( followOnLockingDisabled ) {
throw new IllegalQueryOperationException( "Locking with OFFSET is not supported!" );
throw new IllegalQueryOperationException( "Locking with OFFSET is not supported" );
}
strategy = LockStrategy.FOLLOW_ON;
}

View File

@ -152,7 +152,7 @@ else if ( expression instanceof Summarization ) {
// This could theoretically be emulated by rendering all grouping variations of the query and
// connect them via union all but that's probably pretty inefficient and would have to happen
// on the query spec level
throw new UnsupportedOperationException( "Summarization is not supported by DBMS!" );
throw new UnsupportedOperationException( "Summarization is not supported by DBMS" );
}
}
else {

View File

@ -93,7 +93,7 @@ else if ( expression instanceof Summarization ) {
// This could theoretically be emulated by rendering all grouping variations of the query and
// connect them via union all but that's probably pretty inefficient and would have to happen
// on the query spec level
throw new UnsupportedOperationException( "Summarization is not supported by DBMS!" );
throw new UnsupportedOperationException( "Summarization is not supported by DBMS" );
}
else {
expression.accept( this );

View File

@ -323,7 +323,7 @@ else if ( expression instanceof Summarization ) {
// This could theoretically be emulated by rendering all grouping variations of the query and
// connect them via union all but that's probably pretty inefficient and would have to happen
// on the query spec level
throw new UnsupportedOperationException( "Summarization is not supported by DBMS!" );
throw new UnsupportedOperationException( "Summarization is not supported by DBMS" );
}
else {
expression.accept( this );

View File

@ -146,7 +146,7 @@ else if ( expression instanceof Summarization ) {
// This could theoretically be emulated by rendering all grouping variations of the query and
// connect them via union all but that's probably pretty inefficient and would have to happen
// on the query spec level
throw new UnsupportedOperationException( "Summarization is not supported by DBMS!" );
throw new UnsupportedOperationException( "Summarization is not supported by DBMS" );
}
else {
expression.accept( this );

View File

@ -63,7 +63,7 @@ public void render(
List<SortSpecification> withinGroup,
SqlAstTranslator<?> translator) {
if ( filter != null && !translator.supportsFilterClause() ) {
throw new IllegalArgumentException( "Can't emulate filter clause for inverse distribution function [" + getName() + "]!" );
throw new IllegalArgumentException( "Can't emulate filter clause for inverse distribution function [" + getName() + "]" );
}
sqlAppender.appendSql( getName() );
sqlAppender.appendSql( '(' );

View File

@ -72,7 +72,7 @@ public Expression convertToSqlAst(SqmToSqlAstConverter walker) {
return super.convertToSqlAst( walker );
}
else if ( currentClause != Clause.SELECT ) {
throw new IllegalArgumentException( "Can't emulate [" + getName() + "] in clause " + currentClause + ". Only the SELECT clause is supported!" );
throw new IllegalArgumentException( "Can't emulate [" + getName() + "] in clause " + currentClause + ". Only the SELECT clause is supported" );
}
final ReturnableType<?> resultType = resolveResultType(
walker.getCreationContext().getMappingMetamodel().getTypeConfiguration()

View File

@ -95,7 +95,7 @@ public void render(
List<SortSpecification> withinGroup,
SqlAstTranslator<?> translator) {
if ( filter != null && !translator.supportsFilterClause() ) {
throw new IllegalArgumentException( "Can't emulate filter clause for inverse distribution function [" + getName() + "]!" );
throw new IllegalArgumentException( "Can't emulate filter clause for inverse distribution function [" + getName() + "]" );
}
sqlAppender.appendSql( getName() );
sqlAppender.appendSql( '(' );

View File

@ -67,7 +67,7 @@ public Expression convertToSqlAst(SqmToSqlAstConverter walker) {
return super.convertToSqlAst( walker );
}
else if ( currentClause != Clause.SELECT ) {
throw new IllegalArgumentException( "Can't emulate [" + getName() + "] in clause " + currentClause + ". Only the SELECT clause is supported!" );
throw new IllegalArgumentException( "Can't emulate [" + getName() + "] in clause " + currentClause + ". Only the SELECT clause is supported" );
}
final ReturnableType<?> resultType = resolveResultType(
walker.getCreationContext().getMappingMetamodel().getTypeConfiguration()

View File

@ -41,7 +41,7 @@ public void render(
final boolean caseWrapper = filter != null && !translator.supportsFilterClause();
sqlAppender.appendSql( "stats_mode(" );
if ( withinGroup == null || withinGroup.size() != 1 ) {
throw new IllegalArgumentException( "MODE function requires a WITHIN GROUP clause with exactly one order by item!" );
throw new IllegalArgumentException( "MODE function requires a WITHIN GROUP clause with exactly one order by item" );
}
if ( caseWrapper ) {
translator.getCurrentClauseStack().push( Clause.WHERE );

View File

@ -117,7 +117,7 @@ public void stop() {
@Override
public Connection getConnection() throws SQLException {
if ( !available ) {
throw new HibernateException( "Provider is closed!" );
throw new HibernateException( "Provider is closed" );
}
return useCredentials ? dataSource.getConnection( user, pass ) : dataSource.getConnection();
}

View File

@ -295,7 +295,7 @@ public <T> T unwrap(Class<T> unwrapType) {
protected void validateConnectionsReturned() {
int allocationCount = state.pool.allConnections.size() - state.pool.availableConnections.size();
if ( allocationCount != 0 ) {
CONNECTIONS_MESSAGE_LOGGER.error( "Connection leak detected: there are " + allocationCount + " unclosed connections!");
CONNECTIONS_MESSAGE_LOGGER.error( "Connection leak detected: there are " + allocationCount + " unclosed connections");
}
}
@ -422,7 +422,7 @@ public Connection poll() throws SQLException {
}
}
throw new HibernateException(
"The internal connection pool has reached its maximum size and no connection is currently available!" );
"The internal connection pool has reached its maximum size and no connection is currently available" );
}
conn = prepareConnection( conn );
} while ( conn == null );

View File

@ -46,7 +46,7 @@ public void injectServices(ServiceRegistryImplementor serviceRegistry) {
@Override
public void configure(Map<String, Object> configValues) {
this.jdbcEnvironment = serviceRegistry.getService( JdbcEnvironment.class );
assert jdbcEnvironment != null : "JdbcEnvironment was not found!";
assert jdbcEnvironment != null : "JdbcEnvironment was not found";
this.multiTenancyEnabled = serviceRegistry.getService(MultiTenantConnectionProvider.class)!=null;

View File

@ -163,7 +163,7 @@ private void logSlowQuery(Supplier<String> sqlSupplier, long startTimeNanos) {
return;
}
if ( startTimeNanos <= 0 ) {
throw new IllegalArgumentException( "startTimeNanos [" + startTimeNanos + "] should be greater than 0!" );
throw new IllegalArgumentException( "startTimeNanos [" + startTimeNanos + "] should be greater than 0" );
}
long queryExecutionMillis = TimeUnit.NANOSECONDS.toMillis( System.nanoTime() - startTimeNanos );

View File

@ -35,7 +35,7 @@ public ContextualJdbcConnectionAccess(
@Override
public Connection obtainConnection() throws SQLException {
if ( tenantIdentifier == null ) {
throw new HibernateException( "Tenant identifier required!" );
throw new HibernateException( "Tenant identifier required" );
}
try {
@ -50,7 +50,7 @@ public Connection obtainConnection() throws SQLException {
@Override
public void releaseConnection(Connection connection) throws SQLException {
if ( tenantIdentifier == null ) {
throw new HibernateException( "Tenant identifier required!" );
throw new HibernateException( "Tenant identifier required" );
}
try {

View File

@ -33,8 +33,6 @@
import org.hibernate.engine.spi.CollectionKey;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.id.IntegralDataTypeHolder;
import org.hibernate.query.QueryLogging;
import org.hibernate.type.BasicType;
import org.hibernate.type.SerializationException;
import org.hibernate.type.Type;
@ -61,7 +59,7 @@
public interface CoreMessageLogger extends BasicLogger {
@LogMessage(level = WARN)
@Message(value = "Already session bound on call to bind(); make sure you clean up your sessions!", id = 2)
@Message(value = "Already session bound on call to bind(); make sure you clean up your sessions", id = 2)
void alreadySessionBound();
@LogMessage(level = INFO)
@ -445,7 +443,7 @@ void JavaSqlTypesMappedSameCodeMultipleTimes(
@Message(value = "Bytecode enhancement failed: %s", id = 142)
String bytecodeEnhancementFailed(String entityName);
@Message(value = "Bytecode enhancement failed because no public, protected or package-private default constructor was found for entity: %s. Private constructors don't work with runtime proxies!", id = 143)
@Message(value = "Bytecode enhancement failed because no public, protected or package-private default constructor was found for entity: %s. Private constructors don't work with runtime proxies", id = 143)
String bytecodeEnhancementFailedBecauseOfDefaultConstructor(String entityName);
@LogMessage(level = WARN)
@ -515,7 +513,7 @@ void namedQueryError(
void narrowingProxy(Class concreteProxyClass);
@LogMessage(level = WARN)
@Message(value = "FirstResult/maxResults specified on polymorphic query; applying in memory!", id = 180)
@Message(value = "FirstResult/maxResults specified on polymorphic query; applying in memory", id = 180)
void needsLimit();
@LogMessage(level = WARN)
@ -599,7 +597,7 @@ void parsingXmlWarningForFile(
void preparedStatementAlreadyInBatch(String sql);
@LogMessage(level = WARN)
@Message(value = "processEqualityExpression() : No expression to process!", id = 203)
@Message(value = "processEqualityExpression() : No expression to process", id = 203)
void processEqualityExpression();
@LogMessage(level = INFO)
@ -666,7 +664,7 @@ void parsingXmlWarningForFile(
void readOnlyCacheConfiguredForMutableCollection(String name);
@LogMessage(level = WARN)
@Message(value = "Recognized obsolete hibernate namespace %s. Use namespace %s instead. Refer to Hibernate 3.6 Migration Guide!",
@Message(value = "Recognized obsolete hibernate namespace %s. Use namespace %s instead. Refer to Hibernate 3.6 Migration Guide",
id = 223)
void recognizedObsoleteHibernateNamespace(
String oldHibernateNamespace,
@ -1348,7 +1346,7 @@ void usingDefaultIdGeneratorSegmentValue(
void usingDialect(Dialect dialect);
@LogMessage(level = ERROR)
@Message(value = "Don't use old DTDs, read the Hibernate 3.x Migration Guide!", id = 404)
@Message(value = "Don't use old DTDs, read the Hibernate 3.x Migration Guide", id = 404)
void usingOldDtd();
@LogMessage(level = INFO)

View File

@ -96,7 +96,7 @@ public Filter setParameter(String name, Object value) throws IllegalArgumentExce
public Filter setParameterList(String name, Collection<?> values) throws HibernateException {
// Make sure this is a defined parameter and check the incoming value type
if ( values == null ) {
throw new IllegalArgumentException( "Collection must be not null!" );
throw new IllegalArgumentException( "Collection must be not null" );
}
JdbcMapping type = definition.getParameterJdbcMapping( name );
if ( type == null ) {

View File

@ -506,7 +506,7 @@ public Session openTemporarySession() throws HibernateException {
public Session getCurrentSession() throws HibernateException {
if ( currentSessionContext == null ) {
throw new HibernateException( "No CurrentSessionContext configured!" );
throw new HibernateException( "No CurrentSessionContext configured" );
}
return currentSessionContext.currentSession();
}

View File

@ -457,7 +457,7 @@ private boolean isFlushModeNever() {
private void managedClose() {
if ( isClosed() ) {
throw new SessionException( "Session was already closed!" );
throw new SessionException( "Session was already closed" );
}
close();
}

View File

@ -498,7 +498,7 @@ public void hit(Set<HashEntry<K, V>> evicted) {
coldHit( evicted );
break;
case HIR_NONRESIDENT:
throw new IllegalStateException( "Can't hit a non-resident entry!" );
throw new IllegalStateException( "Can't hit a non-resident entry" );
default:
throw new AssertionError( "Hit with unknown status: " + state );
}

View File

@ -1593,7 +1593,7 @@ else if ( settingValue instanceof String ) {
}
else {
throw new IllegalArgumentException(
"The provided " + settingName + " setting value [" + settingValue + "] is not supported!"
"The provided " + settingName + " setting value [" + settingValue + "] is not supported"
);
}
@ -1603,7 +1603,7 @@ else if ( settingValue instanceof String ) {
}
catch (InstantiationException | IllegalAccessException e) {
throw new IllegalArgumentException(
"The " + clazz.getSimpleName() +" class [" + instanceClass + "] could not be instantiated!",
"The " + clazz.getSimpleName() +" class [" + instanceClass + "] could not be instantiated",
e
);
}

View File

@ -110,7 +110,7 @@ public SqlAliasBaseGenerator getSqlAliasBaseGenerator() {
@Override
public void registerLockMode(String identificationVariable, LockMode explicitLockMode) {
throw new UnsupportedOperationException( "Registering lock modes should only be done for result set mappings!" );
throw new UnsupportedOperationException( "Registering lock modes should only be done for result set mappings" );
}
@Override

View File

@ -113,7 +113,7 @@ public static String hashedName(String s) {
return bigInt.toString( 35 );
}
catch ( NoSuchAlgorithmException e ) {
throw new HibernateException( "Unable to generate a hashed Constraint name!", e );
throw new HibernateException( "Unable to generate a hashed Constraint name", e );
}
}

View File

@ -644,7 +644,7 @@ public Index getIndex(String indexName) {
public Index addIndex(Index index) {
Index current = indexes.get( index.getName() );
if ( current != null ) {
throw new MappingException( "Index " + index.getName() + " already exists!" );
throw new MappingException( "Index " + index.getName() + " already exists" );
}
indexes.put( index.getName(), index );
return index;
@ -653,7 +653,7 @@ public Index addIndex(Index index) {
public UniqueKey addUniqueKey(UniqueKey uniqueKey) {
UniqueKey current = uniqueKeys.get( uniqueKey.getName() );
if ( current != null ) {
throw new MappingException( "UniqueKey " + uniqueKey.getName() + " already exists!" );
throw new MappingException( "UniqueKey " + uniqueKey.getName() + " already exists" );
}
uniqueKeys.put( uniqueKey.getName(), uniqueKey );
return uniqueKey;

View File

@ -3804,7 +3804,7 @@ public void update(
// Ensure that an immutable or non-modifiable entity is not being updated unless it is
// in the process of being deleted.
if ( entry == null && !isMutable() ) {
throw new IllegalStateException( "Updating immutable entity that is not in session yet!" );
throw new IllegalStateException( "Updating immutable entity that is not in session yet" );
}
if ( ( entityMetamodel.isDynamicUpdate() && dirtyFields != null ) ) {
// We need to generate the UPDATE SQL when dynamic-update="true"

View File

@ -64,6 +64,6 @@ static <T> T subLogger(String subName, Class<T> loggerJavaType) {
void ignoringUnrecognizedQueryHint(String hintName);
@LogMessage(level = WARN)
@Message(value = "firstResult/maxResults specified with collection fetch; applying in memory!", id = 90003004)
@Message(value = "firstResult/maxResults specified with collection fetch; applying in memory", id = 90003004)
void firstOrMaxResultsSpecifiedWithCollectionFetch();
}

View File

@ -343,7 +343,7 @@ public Fetch generateFetch(
boolean selected,
String resultVariable,
DomainResultCreationState creationState) {
throw new UnsupportedOperationException( "AnonymousTupleEmbeddableValuedModelPart is not fetchable!" );
throw new UnsupportedOperationException( "AnonymousTupleEmbeddableValuedModelPart is not fetchable" );
}
@Override

View File

@ -175,7 +175,7 @@ else if ( domainType instanceof ManagedDomainType<?> ) {
final EmbeddableValuedModelPart modelPartContainer = (EmbeddableValuedModelPart) existingModelPart;
for ( Attribute<?, ?> attribute : attributes ) {
if ( !( attribute instanceof SingularPersistentAttribute<?, ?> ) ) {
throw new IllegalArgumentException( "Only embeddables without collections are supported!" );
throw new IllegalArgumentException( "Only embeddables without collections are supported" );
}
final DomainType<?> attributeType = ( (SingularPersistentAttribute<?, ?>) attribute ).getType();
final ModelPart modelPart = createModelPart(

View File

@ -52,7 +52,7 @@ public AnonymousTupleType(SqmSelectableNode<?>[] components) {
final SqmSelectableNode<?> component = components[i];
final String alias = component.getAlias();
if ( alias == null ) {
throw new IllegalArgumentException( "Component at index " + i + " has no alias, but alias is required!" );
throw new IllegalArgumentException( "Component at index " + i + " has no alias, but alias is required" );
}
map.put( alias, i );
}
@ -62,7 +62,7 @@ public AnonymousTupleType(SqmSelectableNode<?>[] components) {
private static SqmSelectableNode<?>[] extractSqmExpressibles(SqmSubQuery<?> subQuery) {
final SqmSelectClause selectClause = subQuery.getQuerySpec().getSelectClause();
if ( selectClause == null || selectClause.getSelectionItems().isEmpty() ) {
throw new IllegalArgumentException( "subquery has no selection items!" );
throw new IllegalArgumentException( "subquery has no selection items" );
}
// todo: right now, we "snapshot" the state of the subquery when creating this type, but maybe we shouldn't?
// i.e. what if the subquery changes later on? Or should we somehow mark the subquery to signal,

View File

@ -129,7 +129,7 @@ private static SqmRoot<?> findUnmappedPolymorphicReference(SqmQueryPart<?> query
final SqmQueryGroup<?> queryGroup = (SqmQueryGroup<?>) queryPart;
final SqmRoot<?> root = findUnmappedPolymorphicReference( queryGroup.getQueryParts().get( 0 ) );
if ( root != null ) {
throw new UnsupportedOperationException( "Polymorphic query group is unsupported!" );
throw new UnsupportedOperationException( "Polymorphic query group is unsupported" );
}
return null;
}

View File

@ -3610,7 +3610,7 @@ public Object visitListaggFunction(ListaggFunctionContext ctx) {
final SqmFunctionDescriptor functionTemplate = getFunctionDescriptor( "listagg" );
if ( functionTemplate == null ) {
throw new SemanticException(
"The listagg function was not registered for the dialect!"
"The listagg function was not registered for the dialect"
);
}
final int argumentStartIndex;

View File

@ -14,11 +14,6 @@
import org.hibernate.query.sqm.tree.SqmTypedNode;
import org.hibernate.query.sqm.tree.predicate.SqmPredicate;
import org.hibernate.query.sqm.tree.select.SqmOrderByClause;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.sql.ast.tree.SqlAstNode;
import org.hibernate.sql.ast.tree.predicate.Predicate;
import org.hibernate.sql.ast.tree.select.SortSpecification;
import org.hibernate.type.spi.TypeConfiguration;
import java.util.List;
@ -111,7 +106,7 @@ public <T> SelfRenderingSqmAggregateFunction<T> generateSqmAggregateFunctionExpr
QueryEngine queryEngine,
TypeConfiguration typeConfiguration) {
if ( functionKind != FunctionKind.AGGREGATE ) {
throw new UnsupportedOperationException( "The function " + getName() + " is not an aggregate function!" );
throw new UnsupportedOperationException( "The function " + getName() + " is not an aggregate function" );
}
return new SelfRenderingSqmAggregateFunction<>(
this,
@ -135,7 +130,7 @@ public <T> SelfRenderingSqmOrderedSetAggregateFunction<T> generateSqmOrderedSetA
QueryEngine queryEngine,
TypeConfiguration typeConfiguration) {
if ( functionKind != FunctionKind.ORDERED_SET_AGGREGATE ) {
throw new UnsupportedOperationException( "The function " + getName() + " is not an ordered set-aggregate function!" );
throw new UnsupportedOperationException( "The function " + getName() + " is not an ordered set-aggregate function" );
}
return new SelfRenderingSqmOrderedSetAggregateFunction<>(
this,
@ -161,7 +156,7 @@ protected <T> SelfRenderingSqmWindowFunction<T> generateSqmWindowFunctionExpress
QueryEngine queryEngine,
TypeConfiguration typeConfiguration) {
if ( functionKind != FunctionKind.WINDOW ) {
throw new UnsupportedOperationException( "The function " + getName() + " is not a window function!" );
throw new UnsupportedOperationException( "The function " + getName() + " is not a window function" );
}
return new SelfRenderingSqmWindowFunction<>(
this,

View File

@ -58,7 +58,7 @@ default <T> SelfRenderingSqmFunction<T> generateAggregateSqmExpression(
ReturnableType<T> impliedResultType,
QueryEngine queryEngine,
TypeConfiguration typeConfiguration) {
throw new UnsupportedOperationException( "Not an aggregate function!" );
throw new UnsupportedOperationException( "Not an aggregate function" );
}
/**
@ -72,7 +72,7 @@ default <T> SelfRenderingSqmFunction<T> generateOrderedSetAggregateSqmExpression
ReturnableType<T> impliedResultType,
QueryEngine queryEngine,
TypeConfiguration typeConfiguration) {
throw new UnsupportedOperationException( "Not an ordered set-aggregate function!" );
throw new UnsupportedOperationException( "Not an ordered set-aggregate function" );
}
/**
@ -87,7 +87,7 @@ default <T> SelfRenderingSqmFunction<T> generateWindowSqmExpression(
ReturnableType<T> impliedResultType,
QueryEngine queryEngine,
TypeConfiguration typeConfiguration) {
throw new UnsupportedOperationException( "Not an aggregate function!" );
throw new UnsupportedOperationException( "Not an aggregate function" );
}
/**

View File

@ -332,7 +332,7 @@ private void verifyImmutableEntityUpdate(
);
default:
throw new UnsupportedOperationException(
"The " + immutableEntityUpdateQueryHandlingMode + " is not supported!"
"The " + immutableEntityUpdateQueryHandlingMode + " is not supported"
);
}
}

View File

@ -304,7 +304,7 @@ private <T> JpaCriteriaQuery<T> setOperation(
queryParts.add( ( (SqmSelectQuery<T>) query1 ).getQueryPart() );
for ( CriteriaQuery<?> query : queries ) {
if ( query.getResultType() != resultType ) {
throw new IllegalArgumentException( "Result type of all operands must match!" );
throw new IllegalArgumentException( "Result type of all operands must match" );
}
queryParts.add( ( (SqmSelectQuery<T>) query ).getQueryPart() );
}

View File

@ -667,7 +667,7 @@ public SqlAliasBaseGenerator getSqlAliasBaseGenerator() {
@Override
public void registerLockMode(String identificationVariable, LockMode explicitLockMode) {
throw new UnsupportedOperationException( "Registering lock modes should only be done for result set mappings!" );
throw new UnsupportedOperationException( "Registering lock modes should only be done for result set mappings" );
}
public QueryOptions getQueryOptions() {
@ -1593,7 +1593,7 @@ protected CteColumn findCteColumn(CteTable cteTable, SqmCteTableColumn cteColumn
}
throw new IllegalArgumentException(
String.format(
"Couldn't find cte column %s in cte %s!",
"Couldn't find cte column %s in cte %s",
cteColumn.getColumnName(),
cteTable.getTableExpression()
)
@ -3066,7 +3066,7 @@ private void addColumnNames(List<String> columnNames, DomainType<?> domainType,
else if ( domainType instanceof ManagedDomainType<?> ) {
for ( Attribute<?, ?> attribute : ( (ManagedDomainType<?>) domainType ).getAttributes() ) {
if ( !( attribute instanceof SingularPersistentAttribute<?, ?> ) ) {
throw new IllegalArgumentException( "Only embeddables without collections are supported!" );
throw new IllegalArgumentException( "Only embeddables without collections are supported" );
}
final DomainType<?> attributeType = ( (SingularPersistentAttribute<?, ?>) attribute ).getType();
addColumnNames( columnNames, attributeType, componentName + "_" + attribute.getName() );

View File

@ -82,7 +82,7 @@ public SqmCteStatement<?> getCteStatement(String cteLabel) {
@Override
public void addCteStatement(SqmCteStatement<?> cteStatement) {
if ( cteStatements.putIfAbsent( cteStatement.getCteTable().getCteName(), cteStatement ) != null ) {
throw new IllegalArgumentException( "A CTE with the label " + cteStatement.getCteTable().getCteName() + " already exists!" );
throw new IllegalArgumentException( "A CTE with the label " + cteStatement.getCteTable().getCteName() + " already exists" );
}
}

View File

@ -113,21 +113,21 @@ public SqmCorrelatedRoot<T> createCorrelation() {
@Override
public <S extends T> SqmTreatedRoot<T, S> treatAs(Class<S> treatJavaType) throws PathException {
throw new UnsupportedOperationException( "Derived roots can not be treated!" );
throw new UnsupportedOperationException( "Derived roots can not be treated" );
}
@Override
public <S extends T> SqmTreatedRoot<T, S> treatAs(EntityDomainType<S> treatTarget) throws PathException {
throw new UnsupportedOperationException( "Derived roots can not be treated!" );
throw new UnsupportedOperationException( "Derived roots can not be treated" );
}
@Override
public <S extends T> SqmFrom<?, S> treatAs(Class<S> treatJavaType, String alias) {
throw new UnsupportedOperationException( "Derived roots can not be treated!" );
throw new UnsupportedOperationException( "Derived roots can not be treated" );
}
@Override
public <S extends T> SqmFrom<?, S> treatAs(EntityDomainType<S> treatTarget, String alias) {
throw new UnsupportedOperationException( "Derived roots can not be treated!" );
throw new UnsupportedOperationException( "Derived roots can not be treated" );
}
}

View File

@ -86,7 +86,7 @@ public SqmPredicate getJoinPredicate() {
@Override
public void setJoinPredicate(SqmPredicate predicate) {
throw new UnsupportedOperationException( "Setting a predicate for a plural part join is unsupported!" );
throw new UnsupportedOperationException( "Setting a predicate for a plural part join is unsupported" );
}
@Override

View File

@ -163,21 +163,21 @@ public SqmCorrelatedEntityJoin<T> createCorrelation() {
@Override
public <S extends T> SqmTreatedEntityJoin<T,S> treatAs(Class<S> treatJavaType) throws PathException {
throw new UnsupportedOperationException( "Derived joins can not be treated!" );
throw new UnsupportedOperationException( "Derived joins can not be treated" );
}
@Override
public <S extends T> SqmTreatedEntityJoin<T,S> treatAs(EntityDomainType<S> treatTarget) throws PathException {
throw new UnsupportedOperationException( "Derived joins can not be treated!" );
throw new UnsupportedOperationException( "Derived joins can not be treated" );
}
@Override
public <S extends T> SqmFrom<?, S> treatAs(Class<S> treatJavaType, String alias) {
throw new UnsupportedOperationException( "Derived joins can not be treated!" );
throw new UnsupportedOperationException( "Derived joins can not be treated" );
}
@Override
public <S extends T> SqmFrom<?, S> treatAs(EntityDomainType<S> treatTarget, String alias) {
throw new UnsupportedOperationException( "Derived joins can not be treated!" );
throw new UnsupportedOperationException( "Derived joins can not be treated" );
}
}

View File

@ -95,7 +95,7 @@ public SqmCteStatement<?> getCteStatement(String cteLabel) {
@Override
public void addCteStatement(SqmCteStatement<?> cteStatement) {
if ( cteStatements.putIfAbsent( cteStatement.getCteTable().getCteName(), cteStatement ) != null ) {
throw new IllegalArgumentException( "A CTE with the label " + cteStatement.getCteTable().getCteName() + " already exists!" );
throw new IllegalArgumentException( "A CTE with the label " + cteStatement.getCteTable().getCteName() + " already exists" );
}
}

View File

@ -161,14 +161,14 @@ private void validateQueryGroupFetchStructure(List<? extends SqmTypedNode<?>> ty
final SqmQuerySpec<?> querySpec = (SqmQuerySpec<?>) queryPart;
final List<SqmSelection<?>> selections = querySpec.getSelectClause().getSelections();
if ( firstSelectionSize != selections.size() ) {
throw new SemanticException( "All query parts in a query group must have the same arity!" );
throw new SemanticException( "All query parts in a query group must have the same arity" );
}
for ( int j = 0; j < firstSelectionSize; j++ ) {
final SqmTypedNode<?> firstSqmSelection = typedNodes.get( j );
final JavaType<?> firstJavaType = firstSqmSelection.getNodeJavaType();
if ( firstJavaType != selections.get( j ).getNodeJavaType() ) {
throw new SemanticException(
"Select items of the same index must have the same java type across all query parts!"
"Select items of the same index must have the same java type across all query parts"
);
}
if ( firstSqmSelection instanceof SqmFrom<?, ?> ) {
@ -202,7 +202,7 @@ private void validateFetchesMatch(SqmFrom<?, ?> firstFrom, SqmFrom<?, ?> from) {
}
if ( matchingAttrJoin == null || firstAttrJoin.getModel() != matchingAttrJoin.getModel() ) {
throw new SemanticException(
"All query parts in a query group must have the same join fetches in the same order!"
"All query parts in a query group must have the same join fetches in the same order"
);
}
validateFetchesMatch( firstAttrJoin, matchingAttrJoin );
@ -216,7 +216,7 @@ private void validateFetchesMatch(SqmFrom<?, ?> firstFrom, SqmFrom<?, ?> from) {
final SqmAttributeJoin<?, ?> attrJoin = (SqmAttributeJoin<?, ?>) sqmJoin;
if ( attrJoin.isFetched() ) {
throw new SemanticException(
"All query parts in a query group must have the same join fetches in the same order!"
"All query parts in a query group must have the same join fetches in the same order"
);
}
}

View File

@ -111,7 +111,7 @@ public void setFetchExpression(SqmExpression<?> fetchExpression, FetchClauseType
}
else {
if ( fetchClauseType == null ) {
throw new IllegalArgumentException( "Fetch clause may not be null!" );
throw new IllegalArgumentException( "Fetch clause may not be null" );
}
fetchExpression.applyInferableType( nodeBuilder.getIntegerType() );
this.fetchExpression = fetchExpression;

View File

@ -87,7 +87,7 @@ public void processAnyDelayedAfterCompletion() {
doAfterCompletion( false, true );
// NOTE : doAfterCompletion calls reset
throw new HibernateException( "Transaction was rolled back in a different thread!" );
throw new HibernateException( "Transaction was rolled back in a different thread" );
}
}
}

View File

@ -468,7 +468,7 @@ public static <T extends Service> T fromRegistryOrChildren(
*/
public synchronized void reactivate() {
if ( !active.compareAndSet( false, true ) ) {
throw new IllegalStateException( "Was not inactive, could not reactivate!" );
throw new IllegalStateException( "Was not inactive, could not reactivate" );
}
}

View File

@ -533,13 +533,13 @@ protected <R> R interpretExpression(Expression expression, JdbcParameterBindings
}
else if ( expression instanceof JdbcParameter ) {
if ( jdbcParameterBindings == null ) {
throw new IllegalArgumentException( "Can't interpret expression because no parameter bindings are available!" );
throw new IllegalArgumentException( "Can't interpret expression because no parameter bindings are available" );
}
return (R) getParameterBindValue( (JdbcParameter) expression );
}
else if ( expression instanceof SqmParameterInterpretation ) {
if ( jdbcParameterBindings == null ) {
throw new IllegalArgumentException( "Can't interpret expression because no parameter bindings are available!" );
throw new IllegalArgumentException( "Can't interpret expression because no parameter bindings are available" );
}
return (R) getParameterBindValue( (JdbcParameter) ( (SqmParameterInterpretation) expression).getResolvedExpression() );
}
@ -553,7 +553,7 @@ protected void renderExpressionAsLiteral(Expression expression, JdbcParameterBin
}
else if ( expression instanceof JdbcParameter ) {
if ( jdbcParameterBindings == null ) {
throw new IllegalArgumentException( "Can't interpret expression because no parameter bindings are available!" );
throw new IllegalArgumentException( "Can't interpret expression because no parameter bindings are available" );
}
final JdbcParameter parameter = (JdbcParameter) expression;
renderAsLiteral( parameter, getParameterBindValue( parameter ) );
@ -561,7 +561,7 @@ else if ( expression instanceof JdbcParameter ) {
}
else if ( expression instanceof SqmParameterInterpretation ) {
if ( jdbcParameterBindings == null ) {
throw new IllegalArgumentException( "Can't interpret expression because no parameter bindings are available!" );
throw new IllegalArgumentException( "Can't interpret expression because no parameter bindings are available" );
}
final JdbcParameter parameter = (JdbcParameter) ( (SqmParameterInterpretation) expression).getResolvedExpression();
renderAsLiteral( parameter, getParameterBindValue( parameter ) );
@ -644,7 +644,7 @@ else if ( statement instanceof SelectStatement ) {
jdbcOperation = translateSelect( (SelectStatement) statement );
}
else {
throw new IllegalArgumentException( "Unexpected statement!" );
throw new IllegalArgumentException( "Unexpected statement" );
}
if ( jdbcParameterBindings != null && CollectionHelper.isNotEmpty( getFilterJdbcParameters() ) ) {
@ -1241,19 +1241,19 @@ protected LockStrategy determineLockingStrategy(
LockStrategy strategy = LockStrategy.CLAUSE;
if ( !querySpec.getGroupByClauseExpressions().isEmpty() ) {
if ( Boolean.FALSE.equals( followOnLocking ) ) {
throw new IllegalQueryOperationException( "Locking with GROUP BY is not supported!" );
throw new IllegalQueryOperationException( "Locking with GROUP BY is not supported" );
}
strategy = LockStrategy.FOLLOW_ON;
}
if ( querySpec.getHavingClauseRestrictions() != null ) {
if ( Boolean.FALSE.equals( followOnLocking ) ) {
throw new IllegalQueryOperationException( "Locking with HAVING is not supported!" );
throw new IllegalQueryOperationException( "Locking with HAVING is not supported" );
}
strategy = LockStrategy.FOLLOW_ON;
}
if ( querySpec.getSelectClause().isDistinct() ) {
if ( Boolean.FALSE.equals( followOnLocking ) ) {
throw new IllegalQueryOperationException( "Locking with DISTINCT is not supported!" );
throw new IllegalQueryOperationException( "Locking with DISTINCT is not supported" );
}
strategy = LockStrategy.FOLLOW_ON;
}
@ -1267,7 +1267,7 @@ protected LockStrategy determineLockingStrategy(
if ( tableGroupJoin.getJoinType() != SqlAstJoinType.INNER && !( group instanceof VirtualTableGroup ) ) {
if ( Boolean.FALSE.equals( followOnLocking ) ) {
throw new IllegalQueryOperationException(
"Locking with OUTER joins is not supported!" );
"Locking with OUTER joins is not supported" );
}
return Boolean.TRUE;
}
@ -1285,7 +1285,7 @@ protected LockStrategy determineLockingStrategy(
if ( tableJoin.getJoinType() != SqlAstJoinType.INNER && !( tableJoin.getJoinedNode() instanceof VirtualTableGroup ) ) {
if ( Boolean.FALSE.equals( followOnLocking ) ) {
throw new IllegalQueryOperationException(
"Locking with OUTER joins is not supported!" );
"Locking with OUTER joins is not supported" );
}
return Boolean.TRUE;
}
@ -1298,7 +1298,7 @@ protected LockStrategy determineLockingStrategy(
}
if ( hasAggregateFunctions( querySpec ) ) {
if ( Boolean.FALSE.equals( followOnLocking ) ) {
throw new IllegalQueryOperationException( "Locking with aggregate functions is not supported!" );
throw new IllegalQueryOperationException( "Locking with aggregate functions is not supported" );
}
strategy = LockStrategy.FOLLOW_ON;
}
@ -4213,13 +4213,13 @@ private QuerySpec stripToSelectClause(QuerySpec querySpec) {
@Override
public void visitTableGroup(TableGroup tableGroup) {
// TableGroup and TableGroup handling should be performed as part of `#visitFromClause`...
throw new UnsupportedOperationException( "This should never be invoked as org.hibernate.query.sqm.sql.BaseSqmToSqlAstConverter.visitTableGroup should handle this!" );
throw new UnsupportedOperationException( "This should never be invoked as org.hibernate.query.sqm.sql.BaseSqmToSqlAstConverter.visitTableGroup should handle this" );
}
@Override
public void visitTableGroupJoin(TableGroupJoin tableGroupJoin) {
// TableGroup and TableGroupJoin handling should be performed as part of `#visitFromClause`...
throw new UnsupportedOperationException( "This should never be invoked as org.hibernate.query.sqm.sql.BaseSqmToSqlAstConverter.visitTableGroup should handle this!" );
throw new UnsupportedOperationException( "This should never be invoked as org.hibernate.query.sqm.sql.BaseSqmToSqlAstConverter.visitTableGroup should handle this" );
}
@Override

View File

@ -47,7 +47,7 @@ public CteStatement getCteStatement(String cteLabel) {
@Override
public void addCteStatement(CteStatement cteStatement) {
if ( cteStatements.putIfAbsent( cteStatement.getCteTable().getTableExpression(), cteStatement ) != null ) {
throw new IllegalArgumentException( "A CTE with the label " + cteStatement.getCteTable().getTableExpression() + " already exists!" );
throw new IllegalArgumentException( "A CTE with the label " + cteStatement.getCteTable().getTableExpression() + " already exists" );
}
}
}

View File

@ -113,7 +113,7 @@ public void setFetchClauseExpression(Expression fetchClauseExpression, FetchClau
}
else {
if ( fetchClauseType == null ) {
throw new IllegalArgumentException( "Fetch clause may not be null!" );
throw new IllegalArgumentException( "Fetch clause may not be null" );
}
this.fetchClauseExpression = fetchClauseExpression;
this.fetchClauseType = fetchClauseType;

View File

@ -186,7 +186,7 @@ public DomainResult createDomainResult(String resultVariable, DomainResultCreati
);
}
else {
throw new UnsupportedOperationException("Domain result for non-scalar subquery shouldn't be created!");
throw new UnsupportedOperationException("Domain result for non-scalar subquery shouldn't be created");
}
}
}

View File

@ -183,7 +183,7 @@ public List<R> consume(
}
}
}
throw new IllegalStateException( "Should not reach this!" );
throw new IllegalStateException( "Should not reach this" );
}
/**

View File

@ -70,12 +70,12 @@ protected void doBindNull(CallableStatement st, String name, WrapperOptions opti
@Override
protected void doBind(PreparedStatement st, X value, int index, WrapperOptions options) {
throw new UnsupportedOperationException( getClass().getName() + " should only be used to bind null!" );
throw new UnsupportedOperationException( getClass().getName() + " should only be used to bind null" );
}
@Override
protected void doBind(CallableStatement st, X value, String name, WrapperOptions options) {
throw new UnsupportedOperationException( getClass().getName() + " should only be used to bind null!" );
throw new UnsupportedOperationException( getClass().getName() + " should only be used to bind null" );
}
};
}

View File

@ -33,7 +33,7 @@ public void test() {
}
catch (MappingException expected) {
assertEquals(
"You cannot override the [uid] non-identifier property from the [org.hibernate.orm.test.annotations.override.mappedsuperclass.MappedSuperClassWithUuidAsBasic] base class or @MappedSuperclass and make it an identifier in the [org.hibernate.orm.test.annotations.override.mappedsuperclass.SubclassWithUuidAsId] subclass!",
"You cannot override the [uid] non-identifier property from the [org.hibernate.orm.test.annotations.override.mappedsuperclass.MappedSuperClassWithUuidAsBasic] base class or @MappedSuperclass and make it an identifier in the [org.hibernate.orm.test.annotations.override.mappedsuperclass.SubclassWithUuidAsId] subclass",
expected.getMessage()
);
}

View File

@ -198,7 +198,7 @@ public void testPessimisticLockWithFirstResultsWhileExplicitlyDisablingFollowOnL
);
assertTrue(
expected.getCause().getMessage().contains(
"Locking with OFFSET is not supported!"
"Locking with OFFSET is not supported"
)
);
}
@ -278,7 +278,7 @@ public void testPessimisticLockWithMaxResultsAndOrderByWhileExplicitlyDisablingF
);
assertTrue(
expected.getCause().getMessage().contains(
"Locking with ORDER BY is not supported!"
"Locking with ORDER BY is not supported"
)
);
}
@ -356,7 +356,7 @@ public void testPessimisticLockWithDistinctWhileExplicitlyDisablingFollowOnLocki
);
assertTrue(
expected.getCause().getMessage().contains(
"Locking with DISTINCT is not supported!"
"Locking with DISTINCT is not supported"
)
);
}
@ -434,7 +434,7 @@ public void testPessimisticLockWithGroupByWhileExplicitlyDisablingFollowOnLockin
);
assertTrue(
expected.getCause().getMessage().contains(
"Locking with GROUP BY is not supported!"
"Locking with GROUP BY is not supported"
)
);
}
@ -505,7 +505,7 @@ public void testPessimisticLockWithUnionWhileExplicitlyDisablingFollowOnLockingT
);
assertTrue(
expected.getCause().getMessage().contains(
"Locking with set operators is not supported!"
"Locking with set operators is not supported"
)
);
}

View File

@ -39,7 +39,7 @@ protected void buildSessionFactory() {
fail("Should throw DuplicateMappingException");
}
catch (DuplicateMappingException e) {
assertEquals( "The [org.hibernate.orm.test.entityname.DuplicateEntityNameTest$Purchase1] and [org.hibernate.orm.test.entityname.DuplicateEntityNameTest$Purchase2] entities share the same JPA entity name: [Purchase], which is not allowed!", e.getMessage() );
assertEquals( "The [org.hibernate.orm.test.entityname.DuplicateEntityNameTest$Purchase1] and [org.hibernate.orm.test.entityname.DuplicateEntityNameTest$Purchase2] entities share the same JPA entity name: [Purchase], which is not allowed", e.getMessage() );
}
}

View File

@ -90,7 +90,7 @@ public void testAfterCompletionCallbackExecutedAfterTransactionTimeout() throws
}
catch (HibernateException e) {
// This is expected
assertEquals( "Transaction was rolled back in a different thread!", e.getMessage() );
assertEquals( "Transaction was rolled back in a different thread", e.getMessage() );
}
// verify that the callback was fired.

View File

@ -98,7 +98,7 @@ public void testAuditProcessManagerFlushedOnTransactionTimeout() throws Exceptio
}
catch ( PersistenceException e ) {
// we expect this
assertTrue( e.getMessage().contains( "Transaction was rolled back in a different thread!" ) );
assertTrue( e.getMessage().contains( "Transaction was rolled back in a different thread" ) );
}
// test the audit process manager was flushed