remove exclamation marks from error messages!
This commit is contained in:
parent
bb29e3b060
commit
ba48130c3f
|
@ -316,7 +316,7 @@ public class InFlightMetadataCollectorImpl implements InFlightMetadataCollector
|
|||
if ( matchingPersistentClass != null ) {
|
||||
throw new DuplicateMappingException(
|
||||
String.format(
|
||||
"The [%s] and [%s] entities share the same JPA entity name: [%s], which is not allowed!",
|
||||
"The [%s] and [%s] entities share the same JPA entity name: [%s], which is not allowed",
|
||||
matchingPersistentClass.getClassName(),
|
||||
persistentClass.getClassName(),
|
||||
jpaEntityName
|
||||
|
|
|
@ -544,7 +544,7 @@ public class SessionFactoryOptionsBuilder implements SessionFactoryOptions {
|
|||
this.jdbcTimeZone = TimeZone.getTimeZone( ZoneId.of((String) jdbcTimeZoneValue) );
|
||||
}
|
||||
else if ( jdbcTimeZoneValue != null ) {
|
||||
throw new IllegalArgumentException( "Configuration property " + JDBC_TIME_ZONE + " value [" + jdbcTimeZoneValue + "] is not supported!" );
|
||||
throw new IllegalArgumentException( "Configuration property " + JDBC_TIME_ZONE + " value [" + jdbcTimeZoneValue + "] is not supported" );
|
||||
}
|
||||
|
||||
this.criteriaValueHandlingMode = ValueHandlingMode.interpret(
|
||||
|
@ -629,7 +629,7 @@ public class SessionFactoryOptionsBuilder implements SessionFactoryOptions {
|
|||
e
|
||||
);
|
||||
}
|
||||
throw new IllegalArgumentException( "Cannot instantiate the class [" + strategyClass.getName() + "] because it does not have a constructor that accepts a dialect or an empty constructor!" );
|
||||
throw new IllegalArgumentException( "Cannot instantiate the class [" + strategyClass.getName() + "] because it does not have a constructor that accepts a dialect or an empty constructor" );
|
||||
}
|
||||
);
|
||||
}
|
||||
|
@ -678,7 +678,7 @@ public class SessionFactoryOptionsBuilder implements SessionFactoryOptions {
|
|||
e
|
||||
);
|
||||
}
|
||||
throw new IllegalArgumentException( "Cannot instantiate the class [" + strategyClass.getName() + "] because it does not have a constructor that accepts a dialect or an empty constructor!" );
|
||||
throw new IllegalArgumentException( "Cannot instantiate the class [" + strategyClass.getName() + "] because it does not have a constructor that accepts a dialect or an empty constructor" );
|
||||
}
|
||||
);
|
||||
}
|
||||
|
|
|
@ -170,7 +170,7 @@ public class NamingHelper {
|
|||
return bigInt.toString( 35 );
|
||||
}
|
||||
catch ( NoSuchAlgorithmException|UnsupportedEncodingException e ) {
|
||||
throw new HibernateException( "Unable to generate a hashed name!", e );
|
||||
throw new HibernateException( "Unable to generate a hashed name", e );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4068,7 +4068,7 @@ public class ModelBinder {
|
|||
manyToOneBinding,
|
||||
manyToOneSource.areValuesNullableByDefault(),
|
||||
context -> {
|
||||
throw new AssertionFailure( "Argh!!!" );
|
||||
throw new AssertionFailure( "Should not be called" );
|
||||
}
|
||||
);
|
||||
}
|
||||
|
|
|
@ -104,7 +104,7 @@ public class StandardServiceRegistryImpl extends AbstractServiceRegistryImpl imp
|
|||
List<ProvidedService<?>> providedServices,
|
||||
Map<?, ?> configurationValues) {
|
||||
if ( super.isActive() ) {
|
||||
throw new IllegalStateException( "Can't reactivate an active registry!" );
|
||||
throw new IllegalStateException( "Can't reactivate an active registry" );
|
||||
}
|
||||
super.resetParent( bootstrapServiceRegistry );
|
||||
this.configurationValues = new HashMap( configurationValues );
|
||||
|
|
|
@ -146,7 +146,7 @@ public class EnhancerImpl implements Enhancer {
|
|||
private DynamicType.Builder<?> doEnhance(DynamicType.Builder<?> builder, TypeDescription managedCtClass) {
|
||||
// can't effectively enhance interfaces
|
||||
if ( managedCtClass.isInterface() ) {
|
||||
log.debugf( "Skipping enhancement of [%s]: it's an interface!", managedCtClass.getName() );
|
||||
log.debugf( "Skipping enhancement of [%s]: it's an interface", managedCtClass.getName() );
|
||||
return null;
|
||||
}
|
||||
// skip already enhanced classes
|
||||
|
|
|
@ -1908,7 +1908,7 @@ public final class AnnotationBinder {
|
|||
if ( incomingIdProperty != null && existingIdProperty == null ) {
|
||||
throw new MappingException(
|
||||
String.format(
|
||||
"You cannot override the [%s] non-identifier property from the [%s] base class or @MappedSuperclass and make it an identifier in the [%s] subclass!",
|
||||
"You cannot override the [%s] non-identifier property from the [%s] base class or @MappedSuperclass and make it an identifier in the [%s] subclass",
|
||||
propertyData.getProperty().getName(),
|
||||
propertyData.getProperty().getDeclaringClass().getName(),
|
||||
property.getDeclaringClass().getName()
|
||||
|
|
|
@ -14,7 +14,7 @@ import org.hibernate.MappingException;
|
|||
*/
|
||||
public class NotYetImplementedException extends MappingException {
|
||||
public NotYetImplementedException() {
|
||||
this( "Not yet implemented!" );
|
||||
this( "Not yet implemented" );
|
||||
}
|
||||
|
||||
public NotYetImplementedException(String msg, Throwable root) {
|
||||
|
|
|
@ -659,7 +659,7 @@ public class TableBinder {
|
|||
//works cause the pk has to be on the primary table
|
||||
Table table = referencedEntity.getTable();
|
||||
if ( idColumns.isEmpty() ) {
|
||||
LOG.debug( "No column in the identifier!" );
|
||||
LOG.debug( "No column in the identifier" );
|
||||
}
|
||||
for ( Column col: idColumns ) {
|
||||
boolean match = false;
|
||||
|
|
|
@ -130,7 +130,7 @@ public class CockroachSqlAstTranslator<T extends JdbcOperation> extends Abstract
|
|||
// This could theoretically be emulated by rendering all grouping variations of the query and
|
||||
// connect them via union all but that's probably pretty inefficient and would have to happen
|
||||
// on the query spec level
|
||||
throw new UnsupportedOperationException( "Summarization is not supported by DBMS!" );
|
||||
throw new UnsupportedOperationException( "Summarization is not supported by DBMS" );
|
||||
}
|
||||
else {
|
||||
expression.accept( this );
|
||||
|
|
|
@ -24,8 +24,6 @@ import org.hibernate.sql.ast.tree.expression.Literal;
|
|||
import org.hibernate.sql.ast.tree.expression.SqlTuple;
|
||||
import org.hibernate.sql.ast.tree.expression.SqlTupleContainer;
|
||||
import org.hibernate.sql.ast.tree.expression.Summarization;
|
||||
import org.hibernate.sql.ast.tree.from.DerivedTableReference;
|
||||
import org.hibernate.sql.ast.tree.from.NamedTableReference;
|
||||
import org.hibernate.sql.ast.tree.from.QueryPartTableReference;
|
||||
import org.hibernate.sql.ast.tree.from.TableGroup;
|
||||
import org.hibernate.sql.ast.tree.from.TableReference;
|
||||
|
@ -148,7 +146,7 @@ public class H2SqlAstTranslator<T extends JdbcOperation> extends AbstractSqlAstT
|
|||
// This could theoretically be emulated by rendering all grouping variations of the query and
|
||||
// connect them via union all but that's probably pretty inefficient and would have to happen
|
||||
// on the query spec level
|
||||
throw new UnsupportedOperationException( "Summarization is not supported by DBMS!" );
|
||||
throw new UnsupportedOperationException( "Summarization is not supported by DBMS" );
|
||||
}
|
||||
else {
|
||||
expression.accept( this );
|
||||
|
|
|
@ -116,7 +116,7 @@ public class HANASqlAstTranslator<T extends JdbcOperation> extends AbstractSqlAs
|
|||
appendSql( "grouping sets (())" );
|
||||
}
|
||||
else if ( expression instanceof Summarization ) {
|
||||
throw new UnsupportedOperationException( "Summarization is not supported by DBMS!" );
|
||||
throw new UnsupportedOperationException( "Summarization is not supported by DBMS" );
|
||||
}
|
||||
else {
|
||||
expression.accept( this );
|
||||
|
|
|
@ -10,7 +10,6 @@ import java.util.List;
|
|||
import java.util.function.Consumer;
|
||||
|
||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||
import org.hibernate.metamodel.mapping.JdbcMapping;
|
||||
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
|
||||
import org.hibernate.query.sqm.BinaryArithmeticOperator;
|
||||
import org.hibernate.query.sqm.ComparisonOperator;
|
||||
|
@ -251,7 +250,7 @@ public class HSQLSqlAstTranslator<T extends JdbcOperation> extends AbstractSqlAs
|
|||
// This could theoretically be emulated by rendering all grouping variations of the query and
|
||||
// connect them via union all but that's probably pretty inefficient and would have to happen
|
||||
// on the query spec level
|
||||
throw new UnsupportedOperationException( "Summarization is not supported by DBMS!" );
|
||||
throw new UnsupportedOperationException( "Summarization is not supported by DBMS" );
|
||||
}
|
||||
else {
|
||||
expression.accept( this );
|
||||
|
|
|
@ -149,7 +149,7 @@ public class MySQLDialect extends Dialect {
|
|||
return MyISAMStorageEngine.INSTANCE;
|
||||
}
|
||||
else {
|
||||
throw new UnsupportedOperationException( "The " + storageEngine + " storage engine is not supported!" );
|
||||
throw new UnsupportedOperationException( "The " + storageEngine + " storage engine is not supported" );
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -10,7 +10,6 @@ import java.util.List;
|
|||
|
||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||
import org.hibernate.internal.util.collections.Stack;
|
||||
import org.hibernate.metamodel.mapping.JdbcMapping;
|
||||
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
|
||||
import org.hibernate.query.sqm.BinaryArithmeticOperator;
|
||||
import org.hibernate.query.sqm.ComparisonOperator;
|
||||
|
@ -64,7 +63,7 @@ public class OracleSqlAstTranslator<T extends JdbcOperation> extends AbstractSql
|
|||
final boolean followOnLockingDisabled = Boolean.FALSE.equals( followOnLocking );
|
||||
if ( strategy != LockStrategy.FOLLOW_ON && querySpec.hasSortSpecifications() ) {
|
||||
if ( followOnLockingDisabled ) {
|
||||
throw new IllegalQueryOperationException( "Locking with ORDER BY is not supported!" );
|
||||
throw new IllegalQueryOperationException( "Locking with ORDER BY is not supported" );
|
||||
}
|
||||
strategy = LockStrategy.FOLLOW_ON;
|
||||
}
|
||||
|
@ -72,19 +71,19 @@ public class OracleSqlAstTranslator<T extends JdbcOperation> extends AbstractSql
|
|||
// See https://docs.oracle.com/cd/B19306_01/server.102/b14200/statements_10002.htm#i2066346
|
||||
if ( strategy != LockStrategy.FOLLOW_ON && isPartOfQueryGroup() ) {
|
||||
if ( followOnLockingDisabled ) {
|
||||
throw new IllegalQueryOperationException( "Locking with set operators is not supported!" );
|
||||
throw new IllegalQueryOperationException( "Locking with set operators is not supported" );
|
||||
}
|
||||
strategy = LockStrategy.FOLLOW_ON;
|
||||
}
|
||||
if ( strategy != LockStrategy.FOLLOW_ON && hasSetOperations( querySpec ) ) {
|
||||
if ( followOnLockingDisabled ) {
|
||||
throw new IllegalQueryOperationException( "Locking with set operators is not supported!" );
|
||||
throw new IllegalQueryOperationException( "Locking with set operators is not supported" );
|
||||
}
|
||||
strategy = LockStrategy.FOLLOW_ON;
|
||||
}
|
||||
if ( strategy != LockStrategy.FOLLOW_ON && useOffsetFetchClause( querySpec ) && !isRowsOnlyFetchClauseType( querySpec ) ) {
|
||||
if ( followOnLockingDisabled ) {
|
||||
throw new IllegalQueryOperationException( "Locking with FETCH is not supported!" );
|
||||
throw new IllegalQueryOperationException( "Locking with FETCH is not supported" );
|
||||
}
|
||||
strategy = LockStrategy.FOLLOW_ON;
|
||||
}
|
||||
|
@ -100,7 +99,7 @@ public class OracleSqlAstTranslator<T extends JdbcOperation> extends AbstractSql
|
|||
}
|
||||
if ( hasOffset ) {
|
||||
if ( followOnLockingDisabled ) {
|
||||
throw new IllegalQueryOperationException( "Locking with OFFSET is not supported!" );
|
||||
throw new IllegalQueryOperationException( "Locking with OFFSET is not supported" );
|
||||
}
|
||||
strategy = LockStrategy.FOLLOW_ON;
|
||||
}
|
||||
|
|
|
@ -152,7 +152,7 @@ public class PostgreSQLSqlAstTranslator<T extends JdbcOperation> extends Abstrac
|
|||
// This could theoretically be emulated by rendering all grouping variations of the query and
|
||||
// connect them via union all but that's probably pretty inefficient and would have to happen
|
||||
// on the query spec level
|
||||
throw new UnsupportedOperationException( "Summarization is not supported by DBMS!" );
|
||||
throw new UnsupportedOperationException( "Summarization is not supported by DBMS" );
|
||||
}
|
||||
}
|
||||
else {
|
||||
|
|
|
@ -93,7 +93,7 @@ public class SpannerSqlAstTranslator<T extends JdbcOperation> extends AbstractSq
|
|||
// This could theoretically be emulated by rendering all grouping variations of the query and
|
||||
// connect them via union all but that's probably pretty inefficient and would have to happen
|
||||
// on the query spec level
|
||||
throw new UnsupportedOperationException( "Summarization is not supported by DBMS!" );
|
||||
throw new UnsupportedOperationException( "Summarization is not supported by DBMS" );
|
||||
}
|
||||
else {
|
||||
expression.accept( this );
|
||||
|
|
|
@ -323,7 +323,7 @@ public class SybaseASESqlAstTranslator<T extends JdbcOperation> extends Abstract
|
|||
// This could theoretically be emulated by rendering all grouping variations of the query and
|
||||
// connect them via union all but that's probably pretty inefficient and would have to happen
|
||||
// on the query spec level
|
||||
throw new UnsupportedOperationException( "Summarization is not supported by DBMS!" );
|
||||
throw new UnsupportedOperationException( "Summarization is not supported by DBMS" );
|
||||
}
|
||||
else {
|
||||
expression.accept( this );
|
||||
|
|
|
@ -146,7 +146,7 @@ public class SybaseSqlAstTranslator<T extends JdbcOperation> extends AbstractSql
|
|||
// This could theoretically be emulated by rendering all grouping variations of the query and
|
||||
// connect them via union all but that's probably pretty inefficient and would have to happen
|
||||
// on the query spec level
|
||||
throw new UnsupportedOperationException( "Summarization is not supported by DBMS!" );
|
||||
throw new UnsupportedOperationException( "Summarization is not supported by DBMS" );
|
||||
}
|
||||
else {
|
||||
expression.accept( this );
|
||||
|
|
|
@ -63,7 +63,7 @@ public class HypotheticalSetFunction extends AbstractSqmSelfRenderingFunctionDes
|
|||
List<SortSpecification> withinGroup,
|
||||
SqlAstTranslator<?> translator) {
|
||||
if ( filter != null && !translator.supportsFilterClause() ) {
|
||||
throw new IllegalArgumentException( "Can't emulate filter clause for inverse distribution function [" + getName() + "]!" );
|
||||
throw new IllegalArgumentException( "Can't emulate filter clause for inverse distribution function [" + getName() + "]" );
|
||||
}
|
||||
sqlAppender.appendSql( getName() );
|
||||
sqlAppender.appendSql( '(' );
|
||||
|
|
|
@ -72,7 +72,7 @@ public class HypotheticalSetWindowEmulation extends HypotheticalSetFunction {
|
|||
return super.convertToSqlAst( walker );
|
||||
}
|
||||
else if ( currentClause != Clause.SELECT ) {
|
||||
throw new IllegalArgumentException( "Can't emulate [" + getName() + "] in clause " + currentClause + ". Only the SELECT clause is supported!" );
|
||||
throw new IllegalArgumentException( "Can't emulate [" + getName() + "] in clause " + currentClause + ". Only the SELECT clause is supported" );
|
||||
}
|
||||
final ReturnableType<?> resultType = resolveResultType(
|
||||
walker.getCreationContext().getMappingMetamodel().getTypeConfiguration()
|
||||
|
|
|
@ -95,7 +95,7 @@ public class InverseDistributionFunction extends AbstractSqmSelfRenderingFunctio
|
|||
List<SortSpecification> withinGroup,
|
||||
SqlAstTranslator<?> translator) {
|
||||
if ( filter != null && !translator.supportsFilterClause() ) {
|
||||
throw new IllegalArgumentException( "Can't emulate filter clause for inverse distribution function [" + getName() + "]!" );
|
||||
throw new IllegalArgumentException( "Can't emulate filter clause for inverse distribution function [" + getName() + "]" );
|
||||
}
|
||||
sqlAppender.appendSql( getName() );
|
||||
sqlAppender.appendSql( '(' );
|
||||
|
|
|
@ -67,7 +67,7 @@ public class InverseDistributionWindowEmulation extends InverseDistributionFunct
|
|||
return super.convertToSqlAst( walker );
|
||||
}
|
||||
else if ( currentClause != Clause.SELECT ) {
|
||||
throw new IllegalArgumentException( "Can't emulate [" + getName() + "] in clause " + currentClause + ". Only the SELECT clause is supported!" );
|
||||
throw new IllegalArgumentException( "Can't emulate [" + getName() + "] in clause " + currentClause + ". Only the SELECT clause is supported" );
|
||||
}
|
||||
final ReturnableType<?> resultType = resolveResultType(
|
||||
walker.getCreationContext().getMappingMetamodel().getTypeConfiguration()
|
||||
|
|
|
@ -41,7 +41,7 @@ public class ModeStatsModeEmulation extends InverseDistributionFunction {
|
|||
final boolean caseWrapper = filter != null && !translator.supportsFilterClause();
|
||||
sqlAppender.appendSql( "stats_mode(" );
|
||||
if ( withinGroup == null || withinGroup.size() != 1 ) {
|
||||
throw new IllegalArgumentException( "MODE function requires a WITHIN GROUP clause with exactly one order by item!" );
|
||||
throw new IllegalArgumentException( "MODE function requires a WITHIN GROUP clause with exactly one order by item" );
|
||||
}
|
||||
if ( caseWrapper ) {
|
||||
translator.getCurrentClauseStack().push( Clause.WHERE );
|
||||
|
|
|
@ -117,7 +117,7 @@ public class DatasourceConnectionProviderImpl implements ConnectionProvider, Con
|
|||
@Override
|
||||
public Connection getConnection() throws SQLException {
|
||||
if ( !available ) {
|
||||
throw new HibernateException( "Provider is closed!" );
|
||||
throw new HibernateException( "Provider is closed" );
|
||||
}
|
||||
return useCredentials ? dataSource.getConnection( user, pass ) : dataSource.getConnection();
|
||||
}
|
||||
|
|
|
@ -295,7 +295,7 @@ public class DriverManagerConnectionProviderImpl
|
|||
protected void validateConnectionsReturned() {
|
||||
int allocationCount = state.pool.allConnections.size() - state.pool.availableConnections.size();
|
||||
if ( allocationCount != 0 ) {
|
||||
CONNECTIONS_MESSAGE_LOGGER.error( "Connection leak detected: there are " + allocationCount + " unclosed connections!");
|
||||
CONNECTIONS_MESSAGE_LOGGER.error( "Connection leak detected: there are " + allocationCount + " unclosed connections");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -422,7 +422,7 @@ public class DriverManagerConnectionProviderImpl
|
|||
}
|
||||
}
|
||||
throw new HibernateException(
|
||||
"The internal connection pool has reached its maximum size and no connection is currently available!" );
|
||||
"The internal connection pool has reached its maximum size and no connection is currently available" );
|
||||
}
|
||||
conn = prepareConnection( conn );
|
||||
} while ( conn == null );
|
||||
|
|
|
@ -46,7 +46,7 @@ public class JdbcServicesImpl implements JdbcServices, ServiceRegistryAwareServi
|
|||
@Override
|
||||
public void configure(Map<String, Object> configValues) {
|
||||
this.jdbcEnvironment = serviceRegistry.getService( JdbcEnvironment.class );
|
||||
assert jdbcEnvironment != null : "JdbcEnvironment was not found!";
|
||||
assert jdbcEnvironment != null : "JdbcEnvironment was not found";
|
||||
|
||||
this.multiTenancyEnabled = serviceRegistry.getService(MultiTenantConnectionProvider.class)!=null;
|
||||
|
||||
|
|
|
@ -163,7 +163,7 @@ public class SqlStatementLogger {
|
|||
return;
|
||||
}
|
||||
if ( startTimeNanos <= 0 ) {
|
||||
throw new IllegalArgumentException( "startTimeNanos [" + startTimeNanos + "] should be greater than 0!" );
|
||||
throw new IllegalArgumentException( "startTimeNanos [" + startTimeNanos + "] should be greater than 0" );
|
||||
}
|
||||
|
||||
long queryExecutionMillis = TimeUnit.NANOSECONDS.toMillis( System.nanoTime() - startTimeNanos );
|
||||
|
|
|
@ -35,7 +35,7 @@ public class ContextualJdbcConnectionAccess implements JdbcConnectionAccess, Ser
|
|||
@Override
|
||||
public Connection obtainConnection() throws SQLException {
|
||||
if ( tenantIdentifier == null ) {
|
||||
throw new HibernateException( "Tenant identifier required!" );
|
||||
throw new HibernateException( "Tenant identifier required" );
|
||||
}
|
||||
|
||||
try {
|
||||
|
@ -50,7 +50,7 @@ public class ContextualJdbcConnectionAccess implements JdbcConnectionAccess, Ser
|
|||
@Override
|
||||
public void releaseConnection(Connection connection) throws SQLException {
|
||||
if ( tenantIdentifier == null ) {
|
||||
throw new HibernateException( "Tenant identifier required!" );
|
||||
throw new HibernateException( "Tenant identifier required" );
|
||||
}
|
||||
|
||||
try {
|
||||
|
|
|
@ -33,8 +33,6 @@ import org.hibernate.engine.jndi.JndiNameException;
|
|||
import org.hibernate.engine.spi.CollectionKey;
|
||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||
import org.hibernate.id.IntegralDataTypeHolder;
|
||||
import org.hibernate.query.QueryLogging;
|
||||
import org.hibernate.type.BasicType;
|
||||
import org.hibernate.type.SerializationException;
|
||||
import org.hibernate.type.Type;
|
||||
|
||||
|
@ -61,7 +59,7 @@ import static org.jboss.logging.Logger.Level.WARN;
|
|||
public interface CoreMessageLogger extends BasicLogger {
|
||||
|
||||
@LogMessage(level = WARN)
|
||||
@Message(value = "Already session bound on call to bind(); make sure you clean up your sessions!", id = 2)
|
||||
@Message(value = "Already session bound on call to bind(); make sure you clean up your sessions", id = 2)
|
||||
void alreadySessionBound();
|
||||
|
||||
@LogMessage(level = INFO)
|
||||
|
@ -445,7 +443,7 @@ public interface CoreMessageLogger extends BasicLogger {
|
|||
@Message(value = "Bytecode enhancement failed: %s", id = 142)
|
||||
String bytecodeEnhancementFailed(String entityName);
|
||||
|
||||
@Message(value = "Bytecode enhancement failed because no public, protected or package-private default constructor was found for entity: %s. Private constructors don't work with runtime proxies!", id = 143)
|
||||
@Message(value = "Bytecode enhancement failed because no public, protected or package-private default constructor was found for entity: %s. Private constructors don't work with runtime proxies", id = 143)
|
||||
String bytecodeEnhancementFailedBecauseOfDefaultConstructor(String entityName);
|
||||
|
||||
@LogMessage(level = WARN)
|
||||
|
@ -515,7 +513,7 @@ public interface CoreMessageLogger extends BasicLogger {
|
|||
void narrowingProxy(Class concreteProxyClass);
|
||||
|
||||
@LogMessage(level = WARN)
|
||||
@Message(value = "FirstResult/maxResults specified on polymorphic query; applying in memory!", id = 180)
|
||||
@Message(value = "FirstResult/maxResults specified on polymorphic query; applying in memory", id = 180)
|
||||
void needsLimit();
|
||||
|
||||
@LogMessage(level = WARN)
|
||||
|
@ -599,7 +597,7 @@ public interface CoreMessageLogger extends BasicLogger {
|
|||
void preparedStatementAlreadyInBatch(String sql);
|
||||
|
||||
@LogMessage(level = WARN)
|
||||
@Message(value = "processEqualityExpression() : No expression to process!", id = 203)
|
||||
@Message(value = "processEqualityExpression() : No expression to process", id = 203)
|
||||
void processEqualityExpression();
|
||||
|
||||
@LogMessage(level = INFO)
|
||||
|
@ -666,7 +664,7 @@ public interface CoreMessageLogger extends BasicLogger {
|
|||
void readOnlyCacheConfiguredForMutableCollection(String name);
|
||||
|
||||
@LogMessage(level = WARN)
|
||||
@Message(value = "Recognized obsolete hibernate namespace %s. Use namespace %s instead. Refer to Hibernate 3.6 Migration Guide!",
|
||||
@Message(value = "Recognized obsolete hibernate namespace %s. Use namespace %s instead. Refer to Hibernate 3.6 Migration Guide",
|
||||
id = 223)
|
||||
void recognizedObsoleteHibernateNamespace(
|
||||
String oldHibernateNamespace,
|
||||
|
@ -1348,7 +1346,7 @@ public interface CoreMessageLogger extends BasicLogger {
|
|||
void usingDialect(Dialect dialect);
|
||||
|
||||
@LogMessage(level = ERROR)
|
||||
@Message(value = "Don't use old DTDs, read the Hibernate 3.x Migration Guide!", id = 404)
|
||||
@Message(value = "Don't use old DTDs, read the Hibernate 3.x Migration Guide", id = 404)
|
||||
void usingOldDtd();
|
||||
|
||||
@LogMessage(level = INFO)
|
||||
|
|
|
@ -96,7 +96,7 @@ public class FilterImpl implements Filter, Serializable {
|
|||
public Filter setParameterList(String name, Collection<?> values) throws HibernateException {
|
||||
// Make sure this is a defined parameter and check the incoming value type
|
||||
if ( values == null ) {
|
||||
throw new IllegalArgumentException( "Collection must be not null!" );
|
||||
throw new IllegalArgumentException( "Collection must be not null" );
|
||||
}
|
||||
JdbcMapping type = definition.getParameterJdbcMapping( name );
|
||||
if ( type == null ) {
|
||||
|
|
|
@ -506,7 +506,7 @@ public class SessionFactoryImpl implements SessionFactoryImplementor {
|
|||
|
||||
public Session getCurrentSession() throws HibernateException {
|
||||
if ( currentSessionContext == null ) {
|
||||
throw new HibernateException( "No CurrentSessionContext configured!" );
|
||||
throw new HibernateException( "No CurrentSessionContext configured" );
|
||||
}
|
||||
return currentSessionContext.currentSession();
|
||||
}
|
||||
|
|
|
@ -457,7 +457,7 @@ public class StatelessSessionImpl extends AbstractSharedSessionContract implemen
|
|||
|
||||
private void managedClose() {
|
||||
if ( isClosed() ) {
|
||||
throw new SessionException( "Session was already closed!" );
|
||||
throw new SessionException( "Session was already closed" );
|
||||
}
|
||||
close();
|
||||
}
|
||||
|
|
|
@ -498,7 +498,7 @@ public class BoundedConcurrentHashMap<K, V> extends AbstractMap<K, V>
|
|||
coldHit( evicted );
|
||||
break;
|
||||
case HIR_NONRESIDENT:
|
||||
throw new IllegalStateException( "Can't hit a non-resident entry!" );
|
||||
throw new IllegalStateException( "Can't hit a non-resident entry" );
|
||||
default:
|
||||
throw new AssertionError( "Hit with unknown status: " + state );
|
||||
}
|
||||
|
|
|
@ -1593,7 +1593,7 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil
|
|||
}
|
||||
else {
|
||||
throw new IllegalArgumentException(
|
||||
"The provided " + settingName + " setting value [" + settingValue + "] is not supported!"
|
||||
"The provided " + settingName + " setting value [" + settingValue + "] is not supported"
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -1603,7 +1603,7 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil
|
|||
}
|
||||
catch (InstantiationException | IllegalAccessException e) {
|
||||
throw new IllegalArgumentException(
|
||||
"The " + clazz.getSimpleName() +" class [" + instanceClass + "] could not be instantiated!",
|
||||
"The " + clazz.getSimpleName() +" class [" + instanceClass + "] could not be instantiated",
|
||||
e
|
||||
);
|
||||
}
|
||||
|
|
|
@ -110,7 +110,7 @@ public class LoaderSqlAstCreationState
|
|||
|
||||
@Override
|
||||
public void registerLockMode(String identificationVariable, LockMode explicitLockMode) {
|
||||
throw new UnsupportedOperationException( "Registering lock modes should only be done for result set mappings!" );
|
||||
throw new UnsupportedOperationException( "Registering lock modes should only be done for result set mappings" );
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -113,7 +113,7 @@ public abstract class Constraint implements RelationalModel, Exportable, Seriali
|
|||
return bigInt.toString( 35 );
|
||||
}
|
||||
catch ( NoSuchAlgorithmException e ) {
|
||||
throw new HibernateException( "Unable to generate a hashed Constraint name!", e );
|
||||
throw new HibernateException( "Unable to generate a hashed Constraint name", e );
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -644,7 +644,7 @@ public class Table implements RelationalModel, Serializable, ContributableDataba
|
|||
public Index addIndex(Index index) {
|
||||
Index current = indexes.get( index.getName() );
|
||||
if ( current != null ) {
|
||||
throw new MappingException( "Index " + index.getName() + " already exists!" );
|
||||
throw new MappingException( "Index " + index.getName() + " already exists" );
|
||||
}
|
||||
indexes.put( index.getName(), index );
|
||||
return index;
|
||||
|
@ -653,7 +653,7 @@ public class Table implements RelationalModel, Serializable, ContributableDataba
|
|||
public UniqueKey addUniqueKey(UniqueKey uniqueKey) {
|
||||
UniqueKey current = uniqueKeys.get( uniqueKey.getName() );
|
||||
if ( current != null ) {
|
||||
throw new MappingException( "UniqueKey " + uniqueKey.getName() + " already exists!" );
|
||||
throw new MappingException( "UniqueKey " + uniqueKey.getName() + " already exists" );
|
||||
}
|
||||
uniqueKeys.put( uniqueKey.getName(), uniqueKey );
|
||||
return uniqueKey;
|
||||
|
|
|
@ -3804,7 +3804,7 @@ public abstract class AbstractEntityPersister
|
|||
// Ensure that an immutable or non-modifiable entity is not being updated unless it is
|
||||
// in the process of being deleted.
|
||||
if ( entry == null && !isMutable() ) {
|
||||
throw new IllegalStateException( "Updating immutable entity that is not in session yet!" );
|
||||
throw new IllegalStateException( "Updating immutable entity that is not in session yet" );
|
||||
}
|
||||
if ( ( entityMetamodel.isDynamicUpdate() && dirtyFields != null ) ) {
|
||||
// We need to generate the UPDATE SQL when dynamic-update="true"
|
||||
|
|
|
@ -64,6 +64,6 @@ public interface QueryLogging extends BasicLogger {
|
|||
void ignoringUnrecognizedQueryHint(String hintName);
|
||||
|
||||
@LogMessage(level = WARN)
|
||||
@Message(value = "firstResult/maxResults specified with collection fetch; applying in memory!", id = 90003004)
|
||||
@Message(value = "firstResult/maxResults specified with collection fetch; applying in memory", id = 90003004)
|
||||
void firstOrMaxResultsSpecifiedWithCollectionFetch();
|
||||
}
|
||||
|
|
|
@ -343,7 +343,7 @@ public class AnonymousTupleEmbeddableValuedModelPart implements EmbeddableValued
|
|||
boolean selected,
|
||||
String resultVariable,
|
||||
DomainResultCreationState creationState) {
|
||||
throw new UnsupportedOperationException( "AnonymousTupleEmbeddableValuedModelPart is not fetchable!" );
|
||||
throw new UnsupportedOperationException( "AnonymousTupleEmbeddableValuedModelPart is not fetchable" );
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -175,7 +175,7 @@ public class AnonymousTupleTableGroupProducer implements TableGroupProducer, Map
|
|||
final EmbeddableValuedModelPart modelPartContainer = (EmbeddableValuedModelPart) existingModelPart;
|
||||
for ( Attribute<?, ?> attribute : attributes ) {
|
||||
if ( !( attribute instanceof SingularPersistentAttribute<?, ?> ) ) {
|
||||
throw new IllegalArgumentException( "Only embeddables without collections are supported!" );
|
||||
throw new IllegalArgumentException( "Only embeddables without collections are supported" );
|
||||
}
|
||||
final DomainType<?> attributeType = ( (SingularPersistentAttribute<?, ?>) attribute ).getType();
|
||||
final ModelPart modelPart = createModelPart(
|
||||
|
|
|
@ -52,7 +52,7 @@ public class AnonymousTupleType<T> implements TupleType<T>, DomainType<T>, Retur
|
|||
final SqmSelectableNode<?> component = components[i];
|
||||
final String alias = component.getAlias();
|
||||
if ( alias == null ) {
|
||||
throw new IllegalArgumentException( "Component at index " + i + " has no alias, but alias is required!" );
|
||||
throw new IllegalArgumentException( "Component at index " + i + " has no alias, but alias is required" );
|
||||
}
|
||||
map.put( alias, i );
|
||||
}
|
||||
|
@ -62,7 +62,7 @@ public class AnonymousTupleType<T> implements TupleType<T>, DomainType<T>, Retur
|
|||
private static SqmSelectableNode<?>[] extractSqmExpressibles(SqmSubQuery<?> subQuery) {
|
||||
final SqmSelectClause selectClause = subQuery.getQuerySpec().getSelectClause();
|
||||
if ( selectClause == null || selectClause.getSelectionItems().isEmpty() ) {
|
||||
throw new IllegalArgumentException( "subquery has no selection items!" );
|
||||
throw new IllegalArgumentException( "subquery has no selection items" );
|
||||
}
|
||||
// todo: right now, we "snapshot" the state of the subquery when creating this type, but maybe we shouldn't?
|
||||
// i.e. what if the subquery changes later on? Or should we somehow mark the subquery to signal,
|
||||
|
|
|
@ -129,7 +129,7 @@ public class QuerySplitter {
|
|||
final SqmQueryGroup<?> queryGroup = (SqmQueryGroup<?>) queryPart;
|
||||
final SqmRoot<?> root = findUnmappedPolymorphicReference( queryGroup.getQueryParts().get( 0 ) );
|
||||
if ( root != null ) {
|
||||
throw new UnsupportedOperationException( "Polymorphic query group is unsupported!" );
|
||||
throw new UnsupportedOperationException( "Polymorphic query group is unsupported" );
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
|
|
@ -3610,7 +3610,7 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
|
|||
final SqmFunctionDescriptor functionTemplate = getFunctionDescriptor( "listagg" );
|
||||
if ( functionTemplate == null ) {
|
||||
throw new SemanticException(
|
||||
"The listagg function was not registered for the dialect!"
|
||||
"The listagg function was not registered for the dialect"
|
||||
);
|
||||
}
|
||||
final int argumentStartIndex;
|
||||
|
|
|
@ -14,11 +14,6 @@ import org.hibernate.query.sqm.produce.function.FunctionReturnTypeResolver;
|
|||
import org.hibernate.query.sqm.tree.SqmTypedNode;
|
||||
import org.hibernate.query.sqm.tree.predicate.SqmPredicate;
|
||||
import org.hibernate.query.sqm.tree.select.SqmOrderByClause;
|
||||
import org.hibernate.sql.ast.SqlAstTranslator;
|
||||
import org.hibernate.sql.ast.spi.SqlAppender;
|
||||
import org.hibernate.sql.ast.tree.SqlAstNode;
|
||||
import org.hibernate.sql.ast.tree.predicate.Predicate;
|
||||
import org.hibernate.sql.ast.tree.select.SortSpecification;
|
||||
import org.hibernate.type.spi.TypeConfiguration;
|
||||
|
||||
import java.util.List;
|
||||
|
@ -111,7 +106,7 @@ public abstract class AbstractSqmSelfRenderingFunctionDescriptor
|
|||
QueryEngine queryEngine,
|
||||
TypeConfiguration typeConfiguration) {
|
||||
if ( functionKind != FunctionKind.AGGREGATE ) {
|
||||
throw new UnsupportedOperationException( "The function " + getName() + " is not an aggregate function!" );
|
||||
throw new UnsupportedOperationException( "The function " + getName() + " is not an aggregate function" );
|
||||
}
|
||||
return new SelfRenderingSqmAggregateFunction<>(
|
||||
this,
|
||||
|
@ -135,7 +130,7 @@ public abstract class AbstractSqmSelfRenderingFunctionDescriptor
|
|||
QueryEngine queryEngine,
|
||||
TypeConfiguration typeConfiguration) {
|
||||
if ( functionKind != FunctionKind.ORDERED_SET_AGGREGATE ) {
|
||||
throw new UnsupportedOperationException( "The function " + getName() + " is not an ordered set-aggregate function!" );
|
||||
throw new UnsupportedOperationException( "The function " + getName() + " is not an ordered set-aggregate function" );
|
||||
}
|
||||
return new SelfRenderingSqmOrderedSetAggregateFunction<>(
|
||||
this,
|
||||
|
@ -161,7 +156,7 @@ public abstract class AbstractSqmSelfRenderingFunctionDescriptor
|
|||
QueryEngine queryEngine,
|
||||
TypeConfiguration typeConfiguration) {
|
||||
if ( functionKind != FunctionKind.WINDOW ) {
|
||||
throw new UnsupportedOperationException( "The function " + getName() + " is not a window function!" );
|
||||
throw new UnsupportedOperationException( "The function " + getName() + " is not a window function" );
|
||||
}
|
||||
return new SelfRenderingSqmWindowFunction<>(
|
||||
this,
|
||||
|
|
|
@ -58,7 +58,7 @@ public interface SqmFunctionDescriptor {
|
|||
ReturnableType<T> impliedResultType,
|
||||
QueryEngine queryEngine,
|
||||
TypeConfiguration typeConfiguration) {
|
||||
throw new UnsupportedOperationException( "Not an aggregate function!" );
|
||||
throw new UnsupportedOperationException( "Not an aggregate function" );
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -72,7 +72,7 @@ public interface SqmFunctionDescriptor {
|
|||
ReturnableType<T> impliedResultType,
|
||||
QueryEngine queryEngine,
|
||||
TypeConfiguration typeConfiguration) {
|
||||
throw new UnsupportedOperationException( "Not an ordered set-aggregate function!" );
|
||||
throw new UnsupportedOperationException( "Not an ordered set-aggregate function" );
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -87,7 +87,7 @@ public interface SqmFunctionDescriptor {
|
|||
ReturnableType<T> impliedResultType,
|
||||
QueryEngine queryEngine,
|
||||
TypeConfiguration typeConfiguration) {
|
||||
throw new UnsupportedOperationException( "Not an aggregate function!" );
|
||||
throw new UnsupportedOperationException( "Not an aggregate function" );
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -332,7 +332,7 @@ public class QuerySqmImpl<R>
|
|||
);
|
||||
default:
|
||||
throw new UnsupportedOperationException(
|
||||
"The " + immutableEntityUpdateQueryHandlingMode + " is not supported!"
|
||||
"The " + immutableEntityUpdateQueryHandlingMode + " is not supported"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -304,7 +304,7 @@ public class SqmCriteriaNodeBuilder implements NodeBuilder, SqmCreationContext,
|
|||
queryParts.add( ( (SqmSelectQuery<T>) query1 ).getQueryPart() );
|
||||
for ( CriteriaQuery<?> query : queries ) {
|
||||
if ( query.getResultType() != resultType ) {
|
||||
throw new IllegalArgumentException( "Result type of all operands must match!" );
|
||||
throw new IllegalArgumentException( "Result type of all operands must match" );
|
||||
}
|
||||
queryParts.add( ( (SqmSelectQuery<T>) query ).getQueryPart() );
|
||||
}
|
||||
|
|
|
@ -667,7 +667,7 @@ public abstract class BaseSqmToSqlAstConverter<T extends Statement> extends Base
|
|||
|
||||
@Override
|
||||
public void registerLockMode(String identificationVariable, LockMode explicitLockMode) {
|
||||
throw new UnsupportedOperationException( "Registering lock modes should only be done for result set mappings!" );
|
||||
throw new UnsupportedOperationException( "Registering lock modes should only be done for result set mappings" );
|
||||
}
|
||||
|
||||
public QueryOptions getQueryOptions() {
|
||||
|
@ -1593,7 +1593,7 @@ public abstract class BaseSqmToSqlAstConverter<T extends Statement> extends Base
|
|||
}
|
||||
throw new IllegalArgumentException(
|
||||
String.format(
|
||||
"Couldn't find cte column %s in cte %s!",
|
||||
"Couldn't find cte column %s in cte %s",
|
||||
cteColumn.getColumnName(),
|
||||
cteTable.getTableExpression()
|
||||
)
|
||||
|
@ -3066,7 +3066,7 @@ public abstract class BaseSqmToSqlAstConverter<T extends Statement> extends Base
|
|||
else if ( domainType instanceof ManagedDomainType<?> ) {
|
||||
for ( Attribute<?, ?> attribute : ( (ManagedDomainType<?>) domainType ).getAttributes() ) {
|
||||
if ( !( attribute instanceof SingularPersistentAttribute<?, ?> ) ) {
|
||||
throw new IllegalArgumentException( "Only embeddables without collections are supported!" );
|
||||
throw new IllegalArgumentException( "Only embeddables without collections are supported" );
|
||||
}
|
||||
final DomainType<?> attributeType = ( (SingularPersistentAttribute<?, ?>) attribute ).getType();
|
||||
addColumnNames( columnNames, attributeType, componentName + "_" + attribute.getName() );
|
||||
|
|
|
@ -82,7 +82,7 @@ public abstract class AbstractSqmDmlStatement<E>
|
|||
@Override
|
||||
public void addCteStatement(SqmCteStatement<?> cteStatement) {
|
||||
if ( cteStatements.putIfAbsent( cteStatement.getCteTable().getCteName(), cteStatement ) != null ) {
|
||||
throw new IllegalArgumentException( "A CTE with the label " + cteStatement.getCteTable().getCteName() + " already exists!" );
|
||||
throw new IllegalArgumentException( "A CTE with the label " + cteStatement.getCteTable().getCteName() + " already exists" );
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -113,21 +113,21 @@ public class SqmDerivedRoot<T> extends SqmRoot<T> implements JpaDerivedRoot<T> {
|
|||
|
||||
@Override
|
||||
public <S extends T> SqmTreatedRoot<T, S> treatAs(Class<S> treatJavaType) throws PathException {
|
||||
throw new UnsupportedOperationException( "Derived roots can not be treated!" );
|
||||
throw new UnsupportedOperationException( "Derived roots can not be treated" );
|
||||
}
|
||||
|
||||
@Override
|
||||
public <S extends T> SqmTreatedRoot<T, S> treatAs(EntityDomainType<S> treatTarget) throws PathException {
|
||||
throw new UnsupportedOperationException( "Derived roots can not be treated!" );
|
||||
throw new UnsupportedOperationException( "Derived roots can not be treated" );
|
||||
}
|
||||
|
||||
@Override
|
||||
public <S extends T> SqmFrom<?, S> treatAs(Class<S> treatJavaType, String alias) {
|
||||
throw new UnsupportedOperationException( "Derived roots can not be treated!" );
|
||||
throw new UnsupportedOperationException( "Derived roots can not be treated" );
|
||||
}
|
||||
|
||||
@Override
|
||||
public <S extends T> SqmFrom<?, S> treatAs(EntityDomainType<S> treatTarget, String alias) {
|
||||
throw new UnsupportedOperationException( "Derived roots can not be treated!" );
|
||||
throw new UnsupportedOperationException( "Derived roots can not be treated" );
|
||||
}
|
||||
}
|
||||
|
|
|
@ -86,7 +86,7 @@ public class SqmPluralPartJoin<O,T> extends AbstractSqmJoin<O,T> implements SqmQ
|
|||
|
||||
@Override
|
||||
public void setJoinPredicate(SqmPredicate predicate) {
|
||||
throw new UnsupportedOperationException( "Setting a predicate for a plural part join is unsupported!" );
|
||||
throw new UnsupportedOperationException( "Setting a predicate for a plural part join is unsupported" );
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -163,21 +163,21 @@ public class SqmDerivedJoin<T> extends AbstractSqmJoin<T, T> implements JpaDeriv
|
|||
|
||||
@Override
|
||||
public <S extends T> SqmTreatedEntityJoin<T,S> treatAs(Class<S> treatJavaType) throws PathException {
|
||||
throw new UnsupportedOperationException( "Derived joins can not be treated!" );
|
||||
throw new UnsupportedOperationException( "Derived joins can not be treated" );
|
||||
}
|
||||
@Override
|
||||
public <S extends T> SqmTreatedEntityJoin<T,S> treatAs(EntityDomainType<S> treatTarget) throws PathException {
|
||||
throw new UnsupportedOperationException( "Derived joins can not be treated!" );
|
||||
throw new UnsupportedOperationException( "Derived joins can not be treated" );
|
||||
}
|
||||
|
||||
@Override
|
||||
public <S extends T> SqmFrom<?, S> treatAs(Class<S> treatJavaType, String alias) {
|
||||
throw new UnsupportedOperationException( "Derived joins can not be treated!" );
|
||||
throw new UnsupportedOperationException( "Derived joins can not be treated" );
|
||||
}
|
||||
|
||||
@Override
|
||||
public <S extends T> SqmFrom<?, S> treatAs(EntityDomainType<S> treatTarget, String alias) {
|
||||
throw new UnsupportedOperationException( "Derived joins can not be treated!" );
|
||||
throw new UnsupportedOperationException( "Derived joins can not be treated" );
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -95,7 +95,7 @@ public abstract class AbstractSqmSelectQuery<T>
|
|||
@Override
|
||||
public void addCteStatement(SqmCteStatement<?> cteStatement) {
|
||||
if ( cteStatements.putIfAbsent( cteStatement.getCteTable().getCteName(), cteStatement ) != null ) {
|
||||
throw new IllegalArgumentException( "A CTE with the label " + cteStatement.getCteTable().getCteName() + " already exists!" );
|
||||
throw new IllegalArgumentException( "A CTE with the label " + cteStatement.getCteTable().getCteName() + " already exists" );
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -161,14 +161,14 @@ public class SqmQueryGroup<T> extends SqmQueryPart<T> implements JpaQueryGroup<T
|
|||
final SqmQuerySpec<?> querySpec = (SqmQuerySpec<?>) queryPart;
|
||||
final List<SqmSelection<?>> selections = querySpec.getSelectClause().getSelections();
|
||||
if ( firstSelectionSize != selections.size() ) {
|
||||
throw new SemanticException( "All query parts in a query group must have the same arity!" );
|
||||
throw new SemanticException( "All query parts in a query group must have the same arity" );
|
||||
}
|
||||
for ( int j = 0; j < firstSelectionSize; j++ ) {
|
||||
final SqmTypedNode<?> firstSqmSelection = typedNodes.get( j );
|
||||
final JavaType<?> firstJavaType = firstSqmSelection.getNodeJavaType();
|
||||
if ( firstJavaType != selections.get( j ).getNodeJavaType() ) {
|
||||
throw new SemanticException(
|
||||
"Select items of the same index must have the same java type across all query parts!"
|
||||
"Select items of the same index must have the same java type across all query parts"
|
||||
);
|
||||
}
|
||||
if ( firstSqmSelection instanceof SqmFrom<?, ?> ) {
|
||||
|
@ -202,7 +202,7 @@ public class SqmQueryGroup<T> extends SqmQueryPart<T> implements JpaQueryGroup<T
|
|||
}
|
||||
if ( matchingAttrJoin == null || firstAttrJoin.getModel() != matchingAttrJoin.getModel() ) {
|
||||
throw new SemanticException(
|
||||
"All query parts in a query group must have the same join fetches in the same order!"
|
||||
"All query parts in a query group must have the same join fetches in the same order"
|
||||
);
|
||||
}
|
||||
validateFetchesMatch( firstAttrJoin, matchingAttrJoin );
|
||||
|
@ -216,7 +216,7 @@ public class SqmQueryGroup<T> extends SqmQueryPart<T> implements JpaQueryGroup<T
|
|||
final SqmAttributeJoin<?, ?> attrJoin = (SqmAttributeJoin<?, ?>) sqmJoin;
|
||||
if ( attrJoin.isFetched() ) {
|
||||
throw new SemanticException(
|
||||
"All query parts in a query group must have the same join fetches in the same order!"
|
||||
"All query parts in a query group must have the same join fetches in the same order"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -111,7 +111,7 @@ public abstract class SqmQueryPart<T> implements SqmVisitableNode, JpaQueryPart<
|
|||
}
|
||||
else {
|
||||
if ( fetchClauseType == null ) {
|
||||
throw new IllegalArgumentException( "Fetch clause may not be null!" );
|
||||
throw new IllegalArgumentException( "Fetch clause may not be null" );
|
||||
}
|
||||
fetchExpression.applyInferableType( nodeBuilder.getIntegerType() );
|
||||
this.fetchExpression = fetchExpression;
|
||||
|
|
|
@ -87,7 +87,7 @@ public class SynchronizationCallbackCoordinatorTrackingImpl extends Synchronizat
|
|||
doAfterCompletion( false, true );
|
||||
|
||||
// NOTE : doAfterCompletion calls reset
|
||||
throw new HibernateException( "Transaction was rolled back in a different thread!" );
|
||||
throw new HibernateException( "Transaction was rolled back in a different thread" );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -468,7 +468,7 @@ public abstract class AbstractServiceRegistryImpl
|
|||
*/
|
||||
public synchronized void reactivate() {
|
||||
if ( !active.compareAndSet( false, true ) ) {
|
||||
throw new IllegalStateException( "Was not inactive, could not reactivate!" );
|
||||
throw new IllegalStateException( "Was not inactive, could not reactivate" );
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -533,13 +533,13 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
|
|||
}
|
||||
else if ( expression instanceof JdbcParameter ) {
|
||||
if ( jdbcParameterBindings == null ) {
|
||||
throw new IllegalArgumentException( "Can't interpret expression because no parameter bindings are available!" );
|
||||
throw new IllegalArgumentException( "Can't interpret expression because no parameter bindings are available" );
|
||||
}
|
||||
return (R) getParameterBindValue( (JdbcParameter) expression );
|
||||
}
|
||||
else if ( expression instanceof SqmParameterInterpretation ) {
|
||||
if ( jdbcParameterBindings == null ) {
|
||||
throw new IllegalArgumentException( "Can't interpret expression because no parameter bindings are available!" );
|
||||
throw new IllegalArgumentException( "Can't interpret expression because no parameter bindings are available" );
|
||||
}
|
||||
return (R) getParameterBindValue( (JdbcParameter) ( (SqmParameterInterpretation) expression).getResolvedExpression() );
|
||||
}
|
||||
|
@ -553,7 +553,7 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
|
|||
}
|
||||
else if ( expression instanceof JdbcParameter ) {
|
||||
if ( jdbcParameterBindings == null ) {
|
||||
throw new IllegalArgumentException( "Can't interpret expression because no parameter bindings are available!" );
|
||||
throw new IllegalArgumentException( "Can't interpret expression because no parameter bindings are available" );
|
||||
}
|
||||
final JdbcParameter parameter = (JdbcParameter) expression;
|
||||
renderAsLiteral( parameter, getParameterBindValue( parameter ) );
|
||||
|
@ -561,7 +561,7 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
|
|||
}
|
||||
else if ( expression instanceof SqmParameterInterpretation ) {
|
||||
if ( jdbcParameterBindings == null ) {
|
||||
throw new IllegalArgumentException( "Can't interpret expression because no parameter bindings are available!" );
|
||||
throw new IllegalArgumentException( "Can't interpret expression because no parameter bindings are available" );
|
||||
}
|
||||
final JdbcParameter parameter = (JdbcParameter) ( (SqmParameterInterpretation) expression).getResolvedExpression();
|
||||
renderAsLiteral( parameter, getParameterBindValue( parameter ) );
|
||||
|
@ -644,7 +644,7 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
|
|||
jdbcOperation = translateSelect( (SelectStatement) statement );
|
||||
}
|
||||
else {
|
||||
throw new IllegalArgumentException( "Unexpected statement!" );
|
||||
throw new IllegalArgumentException( "Unexpected statement" );
|
||||
}
|
||||
|
||||
if ( jdbcParameterBindings != null && CollectionHelper.isNotEmpty( getFilterJdbcParameters() ) ) {
|
||||
|
@ -1241,19 +1241,19 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
|
|||
LockStrategy strategy = LockStrategy.CLAUSE;
|
||||
if ( !querySpec.getGroupByClauseExpressions().isEmpty() ) {
|
||||
if ( Boolean.FALSE.equals( followOnLocking ) ) {
|
||||
throw new IllegalQueryOperationException( "Locking with GROUP BY is not supported!" );
|
||||
throw new IllegalQueryOperationException( "Locking with GROUP BY is not supported" );
|
||||
}
|
||||
strategy = LockStrategy.FOLLOW_ON;
|
||||
}
|
||||
if ( querySpec.getHavingClauseRestrictions() != null ) {
|
||||
if ( Boolean.FALSE.equals( followOnLocking ) ) {
|
||||
throw new IllegalQueryOperationException( "Locking with HAVING is not supported!" );
|
||||
throw new IllegalQueryOperationException( "Locking with HAVING is not supported" );
|
||||
}
|
||||
strategy = LockStrategy.FOLLOW_ON;
|
||||
}
|
||||
if ( querySpec.getSelectClause().isDistinct() ) {
|
||||
if ( Boolean.FALSE.equals( followOnLocking ) ) {
|
||||
throw new IllegalQueryOperationException( "Locking with DISTINCT is not supported!" );
|
||||
throw new IllegalQueryOperationException( "Locking with DISTINCT is not supported" );
|
||||
}
|
||||
strategy = LockStrategy.FOLLOW_ON;
|
||||
}
|
||||
|
@ -1267,7 +1267,7 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
|
|||
if ( tableGroupJoin.getJoinType() != SqlAstJoinType.INNER && !( group instanceof VirtualTableGroup ) ) {
|
||||
if ( Boolean.FALSE.equals( followOnLocking ) ) {
|
||||
throw new IllegalQueryOperationException(
|
||||
"Locking with OUTER joins is not supported!" );
|
||||
"Locking with OUTER joins is not supported" );
|
||||
}
|
||||
return Boolean.TRUE;
|
||||
}
|
||||
|
@ -1285,7 +1285,7 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
|
|||
if ( tableJoin.getJoinType() != SqlAstJoinType.INNER && !( tableJoin.getJoinedNode() instanceof VirtualTableGroup ) ) {
|
||||
if ( Boolean.FALSE.equals( followOnLocking ) ) {
|
||||
throw new IllegalQueryOperationException(
|
||||
"Locking with OUTER joins is not supported!" );
|
||||
"Locking with OUTER joins is not supported" );
|
||||
}
|
||||
return Boolean.TRUE;
|
||||
}
|
||||
|
@ -1298,7 +1298,7 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
|
|||
}
|
||||
if ( hasAggregateFunctions( querySpec ) ) {
|
||||
if ( Boolean.FALSE.equals( followOnLocking ) ) {
|
||||
throw new IllegalQueryOperationException( "Locking with aggregate functions is not supported!" );
|
||||
throw new IllegalQueryOperationException( "Locking with aggregate functions is not supported" );
|
||||
}
|
||||
strategy = LockStrategy.FOLLOW_ON;
|
||||
}
|
||||
|
@ -4213,13 +4213,13 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
|
|||
@Override
|
||||
public void visitTableGroup(TableGroup tableGroup) {
|
||||
// TableGroup and TableGroup handling should be performed as part of `#visitFromClause`...
|
||||
throw new UnsupportedOperationException( "This should never be invoked as org.hibernate.query.sqm.sql.BaseSqmToSqlAstConverter.visitTableGroup should handle this!" );
|
||||
throw new UnsupportedOperationException( "This should never be invoked as org.hibernate.query.sqm.sql.BaseSqmToSqlAstConverter.visitTableGroup should handle this" );
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visitTableGroupJoin(TableGroupJoin tableGroupJoin) {
|
||||
// TableGroup and TableGroupJoin handling should be performed as part of `#visitFromClause`...
|
||||
throw new UnsupportedOperationException( "This should never be invoked as org.hibernate.query.sqm.sql.BaseSqmToSqlAstConverter.visitTableGroup should handle this!" );
|
||||
throw new UnsupportedOperationException( "This should never be invoked as org.hibernate.query.sqm.sql.BaseSqmToSqlAstConverter.visitTableGroup should handle this" );
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -47,7 +47,7 @@ public abstract class AbstractStatement implements Statement, CteContainer {
|
|||
@Override
|
||||
public void addCteStatement(CteStatement cteStatement) {
|
||||
if ( cteStatements.putIfAbsent( cteStatement.getCteTable().getTableExpression(), cteStatement ) != null ) {
|
||||
throw new IllegalArgumentException( "A CTE with the label " + cteStatement.getCteTable().getTableExpression() + " already exists!" );
|
||||
throw new IllegalArgumentException( "A CTE with the label " + cteStatement.getCteTable().getTableExpression() + " already exists" );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -113,7 +113,7 @@ public abstract class QueryPart implements SqlAstNode, Expression, DomainResultP
|
|||
}
|
||||
else {
|
||||
if ( fetchClauseType == null ) {
|
||||
throw new IllegalArgumentException( "Fetch clause may not be null!" );
|
||||
throw new IllegalArgumentException( "Fetch clause may not be null" );
|
||||
}
|
||||
this.fetchClauseExpression = fetchClauseExpression;
|
||||
this.fetchClauseType = fetchClauseType;
|
||||
|
|
|
@ -186,7 +186,7 @@ public class QuerySpec extends QueryPart implements SqlAstNode, PredicateContain
|
|||
);
|
||||
}
|
||||
else {
|
||||
throw new UnsupportedOperationException("Domain result for non-scalar subquery shouldn't be created!");
|
||||
throw new UnsupportedOperationException("Domain result for non-scalar subquery shouldn't be created");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -183,7 +183,7 @@ public class ListResultsConsumer<R> implements ResultsConsumer<List<R>, R> {
|
|||
}
|
||||
}
|
||||
}
|
||||
throw new IllegalStateException( "Should not reach this!" );
|
||||
throw new IllegalStateException( "Should not reach this" );
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -70,12 +70,12 @@ public class NullJdbcType implements JdbcType {
|
|||
|
||||
@Override
|
||||
protected void doBind(PreparedStatement st, X value, int index, WrapperOptions options) {
|
||||
throw new UnsupportedOperationException( getClass().getName() + " should only be used to bind null!" );
|
||||
throw new UnsupportedOperationException( getClass().getName() + " should only be used to bind null" );
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doBind(CallableStatement st, X value, String name, WrapperOptions options) {
|
||||
throw new UnsupportedOperationException( getClass().getName() + " should only be used to bind null!" );
|
||||
throw new UnsupportedOperationException( getClass().getName() + " should only be used to bind null" );
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -33,7 +33,7 @@ public class MappedSuperClassIdPropertyBasicAttributeOverrideTest {
|
|||
}
|
||||
catch (MappingException expected) {
|
||||
assertEquals(
|
||||
"You cannot override the [uid] non-identifier property from the [org.hibernate.orm.test.annotations.override.mappedsuperclass.MappedSuperClassWithUuidAsBasic] base class or @MappedSuperclass and make it an identifier in the [org.hibernate.orm.test.annotations.override.mappedsuperclass.SubclassWithUuidAsId] subclass!",
|
||||
"You cannot override the [uid] non-identifier property from the [org.hibernate.orm.test.annotations.override.mappedsuperclass.MappedSuperClassWithUuidAsBasic] base class or @MappedSuperclass and make it an identifier in the [org.hibernate.orm.test.annotations.override.mappedsuperclass.SubclassWithUuidAsId] subclass",
|
||||
expected.getMessage()
|
||||
);
|
||||
}
|
||||
|
|
|
@ -198,7 +198,7 @@ public class OracleFollowOnLockingTest extends
|
|||
);
|
||||
assertTrue(
|
||||
expected.getCause().getMessage().contains(
|
||||
"Locking with OFFSET is not supported!"
|
||||
"Locking with OFFSET is not supported"
|
||||
)
|
||||
);
|
||||
}
|
||||
|
@ -278,7 +278,7 @@ public class OracleFollowOnLockingTest extends
|
|||
);
|
||||
assertTrue(
|
||||
expected.getCause().getMessage().contains(
|
||||
"Locking with ORDER BY is not supported!"
|
||||
"Locking with ORDER BY is not supported"
|
||||
)
|
||||
);
|
||||
}
|
||||
|
@ -356,7 +356,7 @@ public class OracleFollowOnLockingTest extends
|
|||
);
|
||||
assertTrue(
|
||||
expected.getCause().getMessage().contains(
|
||||
"Locking with DISTINCT is not supported!"
|
||||
"Locking with DISTINCT is not supported"
|
||||
)
|
||||
);
|
||||
}
|
||||
|
@ -434,7 +434,7 @@ public class OracleFollowOnLockingTest extends
|
|||
);
|
||||
assertTrue(
|
||||
expected.getCause().getMessage().contains(
|
||||
"Locking with GROUP BY is not supported!"
|
||||
"Locking with GROUP BY is not supported"
|
||||
)
|
||||
);
|
||||
}
|
||||
|
@ -505,7 +505,7 @@ public class OracleFollowOnLockingTest extends
|
|||
);
|
||||
assertTrue(
|
||||
expected.getCause().getMessage().contains(
|
||||
"Locking with set operators is not supported!"
|
||||
"Locking with set operators is not supported"
|
||||
)
|
||||
);
|
||||
}
|
||||
|
|
|
@ -39,7 +39,7 @@ public class DuplicateEntityNameTest extends BaseCoreFunctionalTestCase {
|
|||
fail("Should throw DuplicateMappingException");
|
||||
}
|
||||
catch (DuplicateMappingException e) {
|
||||
assertEquals( "The [org.hibernate.orm.test.entityname.DuplicateEntityNameTest$Purchase1] and [org.hibernate.orm.test.entityname.DuplicateEntityNameTest$Purchase2] entities share the same JPA entity name: [Purchase], which is not allowed!", e.getMessage() );
|
||||
assertEquals( "The [org.hibernate.orm.test.entityname.DuplicateEntityNameTest$Purchase1] and [org.hibernate.orm.test.entityname.DuplicateEntityNameTest$Purchase2] entities share the same JPA entity name: [Purchase], which is not allowed", e.getMessage() );
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -90,7 +90,7 @@ public class JtaAfterCompletionTest extends BaseSessionFactoryFunctionalTest {
|
|||
}
|
||||
catch (HibernateException e) {
|
||||
// This is expected
|
||||
assertEquals( "Transaction was rolled back in a different thread!", e.getMessage() );
|
||||
assertEquals( "Transaction was rolled back in a different thread", e.getMessage() );
|
||||
}
|
||||
|
||||
// verify that the callback was fired.
|
||||
|
|
|
@ -98,7 +98,7 @@ public class JtaTransactionAfterCallbackTest extends BaseEnversJPAFunctionalTest
|
|||
}
|
||||
catch ( PersistenceException e ) {
|
||||
// we expect this
|
||||
assertTrue( e.getMessage().contains( "Transaction was rolled back in a different thread!" ) );
|
||||
assertTrue( e.getMessage().contains( "Transaction was rolled back in a different thread" ) );
|
||||
}
|
||||
|
||||
// test the audit process manager was flushed
|
||||
|
|
Loading…
Reference in New Issue