remove exclamation marks from error messages!
This commit is contained in:
parent
bb29e3b060
commit
ba48130c3f
|
@ -316,7 +316,7 @@ public class InFlightMetadataCollectorImpl implements InFlightMetadataCollector
|
||||||
if ( matchingPersistentClass != null ) {
|
if ( matchingPersistentClass != null ) {
|
||||||
throw new DuplicateMappingException(
|
throw new DuplicateMappingException(
|
||||||
String.format(
|
String.format(
|
||||||
"The [%s] and [%s] entities share the same JPA entity name: [%s], which is not allowed!",
|
"The [%s] and [%s] entities share the same JPA entity name: [%s], which is not allowed",
|
||||||
matchingPersistentClass.getClassName(),
|
matchingPersistentClass.getClassName(),
|
||||||
persistentClass.getClassName(),
|
persistentClass.getClassName(),
|
||||||
jpaEntityName
|
jpaEntityName
|
||||||
|
|
|
@ -544,7 +544,7 @@ public class SessionFactoryOptionsBuilder implements SessionFactoryOptions {
|
||||||
this.jdbcTimeZone = TimeZone.getTimeZone( ZoneId.of((String) jdbcTimeZoneValue) );
|
this.jdbcTimeZone = TimeZone.getTimeZone( ZoneId.of((String) jdbcTimeZoneValue) );
|
||||||
}
|
}
|
||||||
else if ( jdbcTimeZoneValue != null ) {
|
else if ( jdbcTimeZoneValue != null ) {
|
||||||
throw new IllegalArgumentException( "Configuration property " + JDBC_TIME_ZONE + " value [" + jdbcTimeZoneValue + "] is not supported!" );
|
throw new IllegalArgumentException( "Configuration property " + JDBC_TIME_ZONE + " value [" + jdbcTimeZoneValue + "] is not supported" );
|
||||||
}
|
}
|
||||||
|
|
||||||
this.criteriaValueHandlingMode = ValueHandlingMode.interpret(
|
this.criteriaValueHandlingMode = ValueHandlingMode.interpret(
|
||||||
|
@ -629,7 +629,7 @@ public class SessionFactoryOptionsBuilder implements SessionFactoryOptions {
|
||||||
e
|
e
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
throw new IllegalArgumentException( "Cannot instantiate the class [" + strategyClass.getName() + "] because it does not have a constructor that accepts a dialect or an empty constructor!" );
|
throw new IllegalArgumentException( "Cannot instantiate the class [" + strategyClass.getName() + "] because it does not have a constructor that accepts a dialect or an empty constructor" );
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -678,7 +678,7 @@ public class SessionFactoryOptionsBuilder implements SessionFactoryOptions {
|
||||||
e
|
e
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
throw new IllegalArgumentException( "Cannot instantiate the class [" + strategyClass.getName() + "] because it does not have a constructor that accepts a dialect or an empty constructor!" );
|
throw new IllegalArgumentException( "Cannot instantiate the class [" + strategyClass.getName() + "] because it does not have a constructor that accepts a dialect or an empty constructor" );
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -170,7 +170,7 @@ public class NamingHelper {
|
||||||
return bigInt.toString( 35 );
|
return bigInt.toString( 35 );
|
||||||
}
|
}
|
||||||
catch ( NoSuchAlgorithmException|UnsupportedEncodingException e ) {
|
catch ( NoSuchAlgorithmException|UnsupportedEncodingException e ) {
|
||||||
throw new HibernateException( "Unable to generate a hashed name!", e );
|
throw new HibernateException( "Unable to generate a hashed name", e );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -4068,7 +4068,7 @@ public class ModelBinder {
|
||||||
manyToOneBinding,
|
manyToOneBinding,
|
||||||
manyToOneSource.areValuesNullableByDefault(),
|
manyToOneSource.areValuesNullableByDefault(),
|
||||||
context -> {
|
context -> {
|
||||||
throw new AssertionFailure( "Argh!!!" );
|
throw new AssertionFailure( "Should not be called" );
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -104,7 +104,7 @@ public class StandardServiceRegistryImpl extends AbstractServiceRegistryImpl imp
|
||||||
List<ProvidedService<?>> providedServices,
|
List<ProvidedService<?>> providedServices,
|
||||||
Map<?, ?> configurationValues) {
|
Map<?, ?> configurationValues) {
|
||||||
if ( super.isActive() ) {
|
if ( super.isActive() ) {
|
||||||
throw new IllegalStateException( "Can't reactivate an active registry!" );
|
throw new IllegalStateException( "Can't reactivate an active registry" );
|
||||||
}
|
}
|
||||||
super.resetParent( bootstrapServiceRegistry );
|
super.resetParent( bootstrapServiceRegistry );
|
||||||
this.configurationValues = new HashMap( configurationValues );
|
this.configurationValues = new HashMap( configurationValues );
|
||||||
|
|
|
@ -146,7 +146,7 @@ public class EnhancerImpl implements Enhancer {
|
||||||
private DynamicType.Builder<?> doEnhance(DynamicType.Builder<?> builder, TypeDescription managedCtClass) {
|
private DynamicType.Builder<?> doEnhance(DynamicType.Builder<?> builder, TypeDescription managedCtClass) {
|
||||||
// can't effectively enhance interfaces
|
// can't effectively enhance interfaces
|
||||||
if ( managedCtClass.isInterface() ) {
|
if ( managedCtClass.isInterface() ) {
|
||||||
log.debugf( "Skipping enhancement of [%s]: it's an interface!", managedCtClass.getName() );
|
log.debugf( "Skipping enhancement of [%s]: it's an interface", managedCtClass.getName() );
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
// skip already enhanced classes
|
// skip already enhanced classes
|
||||||
|
|
|
@ -1908,7 +1908,7 @@ public final class AnnotationBinder {
|
||||||
if ( incomingIdProperty != null && existingIdProperty == null ) {
|
if ( incomingIdProperty != null && existingIdProperty == null ) {
|
||||||
throw new MappingException(
|
throw new MappingException(
|
||||||
String.format(
|
String.format(
|
||||||
"You cannot override the [%s] non-identifier property from the [%s] base class or @MappedSuperclass and make it an identifier in the [%s] subclass!",
|
"You cannot override the [%s] non-identifier property from the [%s] base class or @MappedSuperclass and make it an identifier in the [%s] subclass",
|
||||||
propertyData.getProperty().getName(),
|
propertyData.getProperty().getName(),
|
||||||
propertyData.getProperty().getDeclaringClass().getName(),
|
propertyData.getProperty().getDeclaringClass().getName(),
|
||||||
property.getDeclaringClass().getName()
|
property.getDeclaringClass().getName()
|
||||||
|
|
|
@ -14,7 +14,7 @@ import org.hibernate.MappingException;
|
||||||
*/
|
*/
|
||||||
public class NotYetImplementedException extends MappingException {
|
public class NotYetImplementedException extends MappingException {
|
||||||
public NotYetImplementedException() {
|
public NotYetImplementedException() {
|
||||||
this( "Not yet implemented!" );
|
this( "Not yet implemented" );
|
||||||
}
|
}
|
||||||
|
|
||||||
public NotYetImplementedException(String msg, Throwable root) {
|
public NotYetImplementedException(String msg, Throwable root) {
|
||||||
|
|
|
@ -659,7 +659,7 @@ public class TableBinder {
|
||||||
//works cause the pk has to be on the primary table
|
//works cause the pk has to be on the primary table
|
||||||
Table table = referencedEntity.getTable();
|
Table table = referencedEntity.getTable();
|
||||||
if ( idColumns.isEmpty() ) {
|
if ( idColumns.isEmpty() ) {
|
||||||
LOG.debug( "No column in the identifier!" );
|
LOG.debug( "No column in the identifier" );
|
||||||
}
|
}
|
||||||
for ( Column col: idColumns ) {
|
for ( Column col: idColumns ) {
|
||||||
boolean match = false;
|
boolean match = false;
|
||||||
|
|
|
@ -130,7 +130,7 @@ public class CockroachSqlAstTranslator<T extends JdbcOperation> extends Abstract
|
||||||
// This could theoretically be emulated by rendering all grouping variations of the query and
|
// This could theoretically be emulated by rendering all grouping variations of the query and
|
||||||
// connect them via union all but that's probably pretty inefficient and would have to happen
|
// connect them via union all but that's probably pretty inefficient and would have to happen
|
||||||
// on the query spec level
|
// on the query spec level
|
||||||
throw new UnsupportedOperationException( "Summarization is not supported by DBMS!" );
|
throw new UnsupportedOperationException( "Summarization is not supported by DBMS" );
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
expression.accept( this );
|
expression.accept( this );
|
||||||
|
|
|
@ -24,8 +24,6 @@ import org.hibernate.sql.ast.tree.expression.Literal;
|
||||||
import org.hibernate.sql.ast.tree.expression.SqlTuple;
|
import org.hibernate.sql.ast.tree.expression.SqlTuple;
|
||||||
import org.hibernate.sql.ast.tree.expression.SqlTupleContainer;
|
import org.hibernate.sql.ast.tree.expression.SqlTupleContainer;
|
||||||
import org.hibernate.sql.ast.tree.expression.Summarization;
|
import org.hibernate.sql.ast.tree.expression.Summarization;
|
||||||
import org.hibernate.sql.ast.tree.from.DerivedTableReference;
|
|
||||||
import org.hibernate.sql.ast.tree.from.NamedTableReference;
|
|
||||||
import org.hibernate.sql.ast.tree.from.QueryPartTableReference;
|
import org.hibernate.sql.ast.tree.from.QueryPartTableReference;
|
||||||
import org.hibernate.sql.ast.tree.from.TableGroup;
|
import org.hibernate.sql.ast.tree.from.TableGroup;
|
||||||
import org.hibernate.sql.ast.tree.from.TableReference;
|
import org.hibernate.sql.ast.tree.from.TableReference;
|
||||||
|
@ -148,7 +146,7 @@ public class H2SqlAstTranslator<T extends JdbcOperation> extends AbstractSqlAstT
|
||||||
// This could theoretically be emulated by rendering all grouping variations of the query and
|
// This could theoretically be emulated by rendering all grouping variations of the query and
|
||||||
// connect them via union all but that's probably pretty inefficient and would have to happen
|
// connect them via union all but that's probably pretty inefficient and would have to happen
|
||||||
// on the query spec level
|
// on the query spec level
|
||||||
throw new UnsupportedOperationException( "Summarization is not supported by DBMS!" );
|
throw new UnsupportedOperationException( "Summarization is not supported by DBMS" );
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
expression.accept( this );
|
expression.accept( this );
|
||||||
|
|
|
@ -116,7 +116,7 @@ public class HANASqlAstTranslator<T extends JdbcOperation> extends AbstractSqlAs
|
||||||
appendSql( "grouping sets (())" );
|
appendSql( "grouping sets (())" );
|
||||||
}
|
}
|
||||||
else if ( expression instanceof Summarization ) {
|
else if ( expression instanceof Summarization ) {
|
||||||
throw new UnsupportedOperationException( "Summarization is not supported by DBMS!" );
|
throw new UnsupportedOperationException( "Summarization is not supported by DBMS" );
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
expression.accept( this );
|
expression.accept( this );
|
||||||
|
|
|
@ -10,7 +10,6 @@ import java.util.List;
|
||||||
import java.util.function.Consumer;
|
import java.util.function.Consumer;
|
||||||
|
|
||||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||||
import org.hibernate.metamodel.mapping.JdbcMapping;
|
|
||||||
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
|
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
|
||||||
import org.hibernate.query.sqm.BinaryArithmeticOperator;
|
import org.hibernate.query.sqm.BinaryArithmeticOperator;
|
||||||
import org.hibernate.query.sqm.ComparisonOperator;
|
import org.hibernate.query.sqm.ComparisonOperator;
|
||||||
|
@ -251,7 +250,7 @@ public class HSQLSqlAstTranslator<T extends JdbcOperation> extends AbstractSqlAs
|
||||||
// This could theoretically be emulated by rendering all grouping variations of the query and
|
// This could theoretically be emulated by rendering all grouping variations of the query and
|
||||||
// connect them via union all but that's probably pretty inefficient and would have to happen
|
// connect them via union all but that's probably pretty inefficient and would have to happen
|
||||||
// on the query spec level
|
// on the query spec level
|
||||||
throw new UnsupportedOperationException( "Summarization is not supported by DBMS!" );
|
throw new UnsupportedOperationException( "Summarization is not supported by DBMS" );
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
expression.accept( this );
|
expression.accept( this );
|
||||||
|
|
|
@ -149,7 +149,7 @@ public class MySQLDialect extends Dialect {
|
||||||
return MyISAMStorageEngine.INSTANCE;
|
return MyISAMStorageEngine.INSTANCE;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
throw new UnsupportedOperationException( "The " + storageEngine + " storage engine is not supported!" );
|
throw new UnsupportedOperationException( "The " + storageEngine + " storage engine is not supported" );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,6 @@ import java.util.List;
|
||||||
|
|
||||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||||
import org.hibernate.internal.util.collections.Stack;
|
import org.hibernate.internal.util.collections.Stack;
|
||||||
import org.hibernate.metamodel.mapping.JdbcMapping;
|
|
||||||
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
|
import org.hibernate.metamodel.mapping.JdbcMappingContainer;
|
||||||
import org.hibernate.query.sqm.BinaryArithmeticOperator;
|
import org.hibernate.query.sqm.BinaryArithmeticOperator;
|
||||||
import org.hibernate.query.sqm.ComparisonOperator;
|
import org.hibernate.query.sqm.ComparisonOperator;
|
||||||
|
@ -64,7 +63,7 @@ public class OracleSqlAstTranslator<T extends JdbcOperation> extends AbstractSql
|
||||||
final boolean followOnLockingDisabled = Boolean.FALSE.equals( followOnLocking );
|
final boolean followOnLockingDisabled = Boolean.FALSE.equals( followOnLocking );
|
||||||
if ( strategy != LockStrategy.FOLLOW_ON && querySpec.hasSortSpecifications() ) {
|
if ( strategy != LockStrategy.FOLLOW_ON && querySpec.hasSortSpecifications() ) {
|
||||||
if ( followOnLockingDisabled ) {
|
if ( followOnLockingDisabled ) {
|
||||||
throw new IllegalQueryOperationException( "Locking with ORDER BY is not supported!" );
|
throw new IllegalQueryOperationException( "Locking with ORDER BY is not supported" );
|
||||||
}
|
}
|
||||||
strategy = LockStrategy.FOLLOW_ON;
|
strategy = LockStrategy.FOLLOW_ON;
|
||||||
}
|
}
|
||||||
|
@ -72,19 +71,19 @@ public class OracleSqlAstTranslator<T extends JdbcOperation> extends AbstractSql
|
||||||
// See https://docs.oracle.com/cd/B19306_01/server.102/b14200/statements_10002.htm#i2066346
|
// See https://docs.oracle.com/cd/B19306_01/server.102/b14200/statements_10002.htm#i2066346
|
||||||
if ( strategy != LockStrategy.FOLLOW_ON && isPartOfQueryGroup() ) {
|
if ( strategy != LockStrategy.FOLLOW_ON && isPartOfQueryGroup() ) {
|
||||||
if ( followOnLockingDisabled ) {
|
if ( followOnLockingDisabled ) {
|
||||||
throw new IllegalQueryOperationException( "Locking with set operators is not supported!" );
|
throw new IllegalQueryOperationException( "Locking with set operators is not supported" );
|
||||||
}
|
}
|
||||||
strategy = LockStrategy.FOLLOW_ON;
|
strategy = LockStrategy.FOLLOW_ON;
|
||||||
}
|
}
|
||||||
if ( strategy != LockStrategy.FOLLOW_ON && hasSetOperations( querySpec ) ) {
|
if ( strategy != LockStrategy.FOLLOW_ON && hasSetOperations( querySpec ) ) {
|
||||||
if ( followOnLockingDisabled ) {
|
if ( followOnLockingDisabled ) {
|
||||||
throw new IllegalQueryOperationException( "Locking with set operators is not supported!" );
|
throw new IllegalQueryOperationException( "Locking with set operators is not supported" );
|
||||||
}
|
}
|
||||||
strategy = LockStrategy.FOLLOW_ON;
|
strategy = LockStrategy.FOLLOW_ON;
|
||||||
}
|
}
|
||||||
if ( strategy != LockStrategy.FOLLOW_ON && useOffsetFetchClause( querySpec ) && !isRowsOnlyFetchClauseType( querySpec ) ) {
|
if ( strategy != LockStrategy.FOLLOW_ON && useOffsetFetchClause( querySpec ) && !isRowsOnlyFetchClauseType( querySpec ) ) {
|
||||||
if ( followOnLockingDisabled ) {
|
if ( followOnLockingDisabled ) {
|
||||||
throw new IllegalQueryOperationException( "Locking with FETCH is not supported!" );
|
throw new IllegalQueryOperationException( "Locking with FETCH is not supported" );
|
||||||
}
|
}
|
||||||
strategy = LockStrategy.FOLLOW_ON;
|
strategy = LockStrategy.FOLLOW_ON;
|
||||||
}
|
}
|
||||||
|
@ -100,7 +99,7 @@ public class OracleSqlAstTranslator<T extends JdbcOperation> extends AbstractSql
|
||||||
}
|
}
|
||||||
if ( hasOffset ) {
|
if ( hasOffset ) {
|
||||||
if ( followOnLockingDisabled ) {
|
if ( followOnLockingDisabled ) {
|
||||||
throw new IllegalQueryOperationException( "Locking with OFFSET is not supported!" );
|
throw new IllegalQueryOperationException( "Locking with OFFSET is not supported" );
|
||||||
}
|
}
|
||||||
strategy = LockStrategy.FOLLOW_ON;
|
strategy = LockStrategy.FOLLOW_ON;
|
||||||
}
|
}
|
||||||
|
|
|
@ -152,7 +152,7 @@ public class PostgreSQLSqlAstTranslator<T extends JdbcOperation> extends Abstrac
|
||||||
// This could theoretically be emulated by rendering all grouping variations of the query and
|
// This could theoretically be emulated by rendering all grouping variations of the query and
|
||||||
// connect them via union all but that's probably pretty inefficient and would have to happen
|
// connect them via union all but that's probably pretty inefficient and would have to happen
|
||||||
// on the query spec level
|
// on the query spec level
|
||||||
throw new UnsupportedOperationException( "Summarization is not supported by DBMS!" );
|
throw new UnsupportedOperationException( "Summarization is not supported by DBMS" );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
|
|
@ -93,7 +93,7 @@ public class SpannerSqlAstTranslator<T extends JdbcOperation> extends AbstractSq
|
||||||
// This could theoretically be emulated by rendering all grouping variations of the query and
|
// This could theoretically be emulated by rendering all grouping variations of the query and
|
||||||
// connect them via union all but that's probably pretty inefficient and would have to happen
|
// connect them via union all but that's probably pretty inefficient and would have to happen
|
||||||
// on the query spec level
|
// on the query spec level
|
||||||
throw new UnsupportedOperationException( "Summarization is not supported by DBMS!" );
|
throw new UnsupportedOperationException( "Summarization is not supported by DBMS" );
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
expression.accept( this );
|
expression.accept( this );
|
||||||
|
|
|
@ -323,7 +323,7 @@ public class SybaseASESqlAstTranslator<T extends JdbcOperation> extends Abstract
|
||||||
// This could theoretically be emulated by rendering all grouping variations of the query and
|
// This could theoretically be emulated by rendering all grouping variations of the query and
|
||||||
// connect them via union all but that's probably pretty inefficient and would have to happen
|
// connect them via union all but that's probably pretty inefficient and would have to happen
|
||||||
// on the query spec level
|
// on the query spec level
|
||||||
throw new UnsupportedOperationException( "Summarization is not supported by DBMS!" );
|
throw new UnsupportedOperationException( "Summarization is not supported by DBMS" );
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
expression.accept( this );
|
expression.accept( this );
|
||||||
|
|
|
@ -146,7 +146,7 @@ public class SybaseSqlAstTranslator<T extends JdbcOperation> extends AbstractSql
|
||||||
// This could theoretically be emulated by rendering all grouping variations of the query and
|
// This could theoretically be emulated by rendering all grouping variations of the query and
|
||||||
// connect them via union all but that's probably pretty inefficient and would have to happen
|
// connect them via union all but that's probably pretty inefficient and would have to happen
|
||||||
// on the query spec level
|
// on the query spec level
|
||||||
throw new UnsupportedOperationException( "Summarization is not supported by DBMS!" );
|
throw new UnsupportedOperationException( "Summarization is not supported by DBMS" );
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
expression.accept( this );
|
expression.accept( this );
|
||||||
|
|
|
@ -63,7 +63,7 @@ public class HypotheticalSetFunction extends AbstractSqmSelfRenderingFunctionDes
|
||||||
List<SortSpecification> withinGroup,
|
List<SortSpecification> withinGroup,
|
||||||
SqlAstTranslator<?> translator) {
|
SqlAstTranslator<?> translator) {
|
||||||
if ( filter != null && !translator.supportsFilterClause() ) {
|
if ( filter != null && !translator.supportsFilterClause() ) {
|
||||||
throw new IllegalArgumentException( "Can't emulate filter clause for inverse distribution function [" + getName() + "]!" );
|
throw new IllegalArgumentException( "Can't emulate filter clause for inverse distribution function [" + getName() + "]" );
|
||||||
}
|
}
|
||||||
sqlAppender.appendSql( getName() );
|
sqlAppender.appendSql( getName() );
|
||||||
sqlAppender.appendSql( '(' );
|
sqlAppender.appendSql( '(' );
|
||||||
|
|
|
@ -72,7 +72,7 @@ public class HypotheticalSetWindowEmulation extends HypotheticalSetFunction {
|
||||||
return super.convertToSqlAst( walker );
|
return super.convertToSqlAst( walker );
|
||||||
}
|
}
|
||||||
else if ( currentClause != Clause.SELECT ) {
|
else if ( currentClause != Clause.SELECT ) {
|
||||||
throw new IllegalArgumentException( "Can't emulate [" + getName() + "] in clause " + currentClause + ". Only the SELECT clause is supported!" );
|
throw new IllegalArgumentException( "Can't emulate [" + getName() + "] in clause " + currentClause + ". Only the SELECT clause is supported" );
|
||||||
}
|
}
|
||||||
final ReturnableType<?> resultType = resolveResultType(
|
final ReturnableType<?> resultType = resolveResultType(
|
||||||
walker.getCreationContext().getMappingMetamodel().getTypeConfiguration()
|
walker.getCreationContext().getMappingMetamodel().getTypeConfiguration()
|
||||||
|
|
|
@ -95,7 +95,7 @@ public class InverseDistributionFunction extends AbstractSqmSelfRenderingFunctio
|
||||||
List<SortSpecification> withinGroup,
|
List<SortSpecification> withinGroup,
|
||||||
SqlAstTranslator<?> translator) {
|
SqlAstTranslator<?> translator) {
|
||||||
if ( filter != null && !translator.supportsFilterClause() ) {
|
if ( filter != null && !translator.supportsFilterClause() ) {
|
||||||
throw new IllegalArgumentException( "Can't emulate filter clause for inverse distribution function [" + getName() + "]!" );
|
throw new IllegalArgumentException( "Can't emulate filter clause for inverse distribution function [" + getName() + "]" );
|
||||||
}
|
}
|
||||||
sqlAppender.appendSql( getName() );
|
sqlAppender.appendSql( getName() );
|
||||||
sqlAppender.appendSql( '(' );
|
sqlAppender.appendSql( '(' );
|
||||||
|
|
|
@ -67,7 +67,7 @@ public class InverseDistributionWindowEmulation extends InverseDistributionFunct
|
||||||
return super.convertToSqlAst( walker );
|
return super.convertToSqlAst( walker );
|
||||||
}
|
}
|
||||||
else if ( currentClause != Clause.SELECT ) {
|
else if ( currentClause != Clause.SELECT ) {
|
||||||
throw new IllegalArgumentException( "Can't emulate [" + getName() + "] in clause " + currentClause + ". Only the SELECT clause is supported!" );
|
throw new IllegalArgumentException( "Can't emulate [" + getName() + "] in clause " + currentClause + ". Only the SELECT clause is supported" );
|
||||||
}
|
}
|
||||||
final ReturnableType<?> resultType = resolveResultType(
|
final ReturnableType<?> resultType = resolveResultType(
|
||||||
walker.getCreationContext().getMappingMetamodel().getTypeConfiguration()
|
walker.getCreationContext().getMappingMetamodel().getTypeConfiguration()
|
||||||
|
|
|
@ -41,7 +41,7 @@ public class ModeStatsModeEmulation extends InverseDistributionFunction {
|
||||||
final boolean caseWrapper = filter != null && !translator.supportsFilterClause();
|
final boolean caseWrapper = filter != null && !translator.supportsFilterClause();
|
||||||
sqlAppender.appendSql( "stats_mode(" );
|
sqlAppender.appendSql( "stats_mode(" );
|
||||||
if ( withinGroup == null || withinGroup.size() != 1 ) {
|
if ( withinGroup == null || withinGroup.size() != 1 ) {
|
||||||
throw new IllegalArgumentException( "MODE function requires a WITHIN GROUP clause with exactly one order by item!" );
|
throw new IllegalArgumentException( "MODE function requires a WITHIN GROUP clause with exactly one order by item" );
|
||||||
}
|
}
|
||||||
if ( caseWrapper ) {
|
if ( caseWrapper ) {
|
||||||
translator.getCurrentClauseStack().push( Clause.WHERE );
|
translator.getCurrentClauseStack().push( Clause.WHERE );
|
||||||
|
|
|
@ -117,7 +117,7 @@ public class DatasourceConnectionProviderImpl implements ConnectionProvider, Con
|
||||||
@Override
|
@Override
|
||||||
public Connection getConnection() throws SQLException {
|
public Connection getConnection() throws SQLException {
|
||||||
if ( !available ) {
|
if ( !available ) {
|
||||||
throw new HibernateException( "Provider is closed!" );
|
throw new HibernateException( "Provider is closed" );
|
||||||
}
|
}
|
||||||
return useCredentials ? dataSource.getConnection( user, pass ) : dataSource.getConnection();
|
return useCredentials ? dataSource.getConnection( user, pass ) : dataSource.getConnection();
|
||||||
}
|
}
|
||||||
|
|
|
@ -295,7 +295,7 @@ public class DriverManagerConnectionProviderImpl
|
||||||
protected void validateConnectionsReturned() {
|
protected void validateConnectionsReturned() {
|
||||||
int allocationCount = state.pool.allConnections.size() - state.pool.availableConnections.size();
|
int allocationCount = state.pool.allConnections.size() - state.pool.availableConnections.size();
|
||||||
if ( allocationCount != 0 ) {
|
if ( allocationCount != 0 ) {
|
||||||
CONNECTIONS_MESSAGE_LOGGER.error( "Connection leak detected: there are " + allocationCount + " unclosed connections!");
|
CONNECTIONS_MESSAGE_LOGGER.error( "Connection leak detected: there are " + allocationCount + " unclosed connections");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -422,7 +422,7 @@ public class DriverManagerConnectionProviderImpl
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
throw new HibernateException(
|
throw new HibernateException(
|
||||||
"The internal connection pool has reached its maximum size and no connection is currently available!" );
|
"The internal connection pool has reached its maximum size and no connection is currently available" );
|
||||||
}
|
}
|
||||||
conn = prepareConnection( conn );
|
conn = prepareConnection( conn );
|
||||||
} while ( conn == null );
|
} while ( conn == null );
|
||||||
|
|
|
@ -46,7 +46,7 @@ public class JdbcServicesImpl implements JdbcServices, ServiceRegistryAwareServi
|
||||||
@Override
|
@Override
|
||||||
public void configure(Map<String, Object> configValues) {
|
public void configure(Map<String, Object> configValues) {
|
||||||
this.jdbcEnvironment = serviceRegistry.getService( JdbcEnvironment.class );
|
this.jdbcEnvironment = serviceRegistry.getService( JdbcEnvironment.class );
|
||||||
assert jdbcEnvironment != null : "JdbcEnvironment was not found!";
|
assert jdbcEnvironment != null : "JdbcEnvironment was not found";
|
||||||
|
|
||||||
this.multiTenancyEnabled = serviceRegistry.getService(MultiTenantConnectionProvider.class)!=null;
|
this.multiTenancyEnabled = serviceRegistry.getService(MultiTenantConnectionProvider.class)!=null;
|
||||||
|
|
||||||
|
|
|
@ -163,7 +163,7 @@ public class SqlStatementLogger {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if ( startTimeNanos <= 0 ) {
|
if ( startTimeNanos <= 0 ) {
|
||||||
throw new IllegalArgumentException( "startTimeNanos [" + startTimeNanos + "] should be greater than 0!" );
|
throw new IllegalArgumentException( "startTimeNanos [" + startTimeNanos + "] should be greater than 0" );
|
||||||
}
|
}
|
||||||
|
|
||||||
long queryExecutionMillis = TimeUnit.NANOSECONDS.toMillis( System.nanoTime() - startTimeNanos );
|
long queryExecutionMillis = TimeUnit.NANOSECONDS.toMillis( System.nanoTime() - startTimeNanos );
|
||||||
|
|
|
@ -35,7 +35,7 @@ public class ContextualJdbcConnectionAccess implements JdbcConnectionAccess, Ser
|
||||||
@Override
|
@Override
|
||||||
public Connection obtainConnection() throws SQLException {
|
public Connection obtainConnection() throws SQLException {
|
||||||
if ( tenantIdentifier == null ) {
|
if ( tenantIdentifier == null ) {
|
||||||
throw new HibernateException( "Tenant identifier required!" );
|
throw new HibernateException( "Tenant identifier required" );
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
@ -50,7 +50,7 @@ public class ContextualJdbcConnectionAccess implements JdbcConnectionAccess, Ser
|
||||||
@Override
|
@Override
|
||||||
public void releaseConnection(Connection connection) throws SQLException {
|
public void releaseConnection(Connection connection) throws SQLException {
|
||||||
if ( tenantIdentifier == null ) {
|
if ( tenantIdentifier == null ) {
|
||||||
throw new HibernateException( "Tenant identifier required!" );
|
throw new HibernateException( "Tenant identifier required" );
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -33,8 +33,6 @@ import org.hibernate.engine.jndi.JndiNameException;
|
||||||
import org.hibernate.engine.spi.CollectionKey;
|
import org.hibernate.engine.spi.CollectionKey;
|
||||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||||
import org.hibernate.id.IntegralDataTypeHolder;
|
import org.hibernate.id.IntegralDataTypeHolder;
|
||||||
import org.hibernate.query.QueryLogging;
|
|
||||||
import org.hibernate.type.BasicType;
|
|
||||||
import org.hibernate.type.SerializationException;
|
import org.hibernate.type.SerializationException;
|
||||||
import org.hibernate.type.Type;
|
import org.hibernate.type.Type;
|
||||||
|
|
||||||
|
@ -61,7 +59,7 @@ import static org.jboss.logging.Logger.Level.WARN;
|
||||||
public interface CoreMessageLogger extends BasicLogger {
|
public interface CoreMessageLogger extends BasicLogger {
|
||||||
|
|
||||||
@LogMessage(level = WARN)
|
@LogMessage(level = WARN)
|
||||||
@Message(value = "Already session bound on call to bind(); make sure you clean up your sessions!", id = 2)
|
@Message(value = "Already session bound on call to bind(); make sure you clean up your sessions", id = 2)
|
||||||
void alreadySessionBound();
|
void alreadySessionBound();
|
||||||
|
|
||||||
@LogMessage(level = INFO)
|
@LogMessage(level = INFO)
|
||||||
|
@ -445,7 +443,7 @@ public interface CoreMessageLogger extends BasicLogger {
|
||||||
@Message(value = "Bytecode enhancement failed: %s", id = 142)
|
@Message(value = "Bytecode enhancement failed: %s", id = 142)
|
||||||
String bytecodeEnhancementFailed(String entityName);
|
String bytecodeEnhancementFailed(String entityName);
|
||||||
|
|
||||||
@Message(value = "Bytecode enhancement failed because no public, protected or package-private default constructor was found for entity: %s. Private constructors don't work with runtime proxies!", id = 143)
|
@Message(value = "Bytecode enhancement failed because no public, protected or package-private default constructor was found for entity: %s. Private constructors don't work with runtime proxies", id = 143)
|
||||||
String bytecodeEnhancementFailedBecauseOfDefaultConstructor(String entityName);
|
String bytecodeEnhancementFailedBecauseOfDefaultConstructor(String entityName);
|
||||||
|
|
||||||
@LogMessage(level = WARN)
|
@LogMessage(level = WARN)
|
||||||
|
@ -515,7 +513,7 @@ public interface CoreMessageLogger extends BasicLogger {
|
||||||
void narrowingProxy(Class concreteProxyClass);
|
void narrowingProxy(Class concreteProxyClass);
|
||||||
|
|
||||||
@LogMessage(level = WARN)
|
@LogMessage(level = WARN)
|
||||||
@Message(value = "FirstResult/maxResults specified on polymorphic query; applying in memory!", id = 180)
|
@Message(value = "FirstResult/maxResults specified on polymorphic query; applying in memory", id = 180)
|
||||||
void needsLimit();
|
void needsLimit();
|
||||||
|
|
||||||
@LogMessage(level = WARN)
|
@LogMessage(level = WARN)
|
||||||
|
@ -599,7 +597,7 @@ public interface CoreMessageLogger extends BasicLogger {
|
||||||
void preparedStatementAlreadyInBatch(String sql);
|
void preparedStatementAlreadyInBatch(String sql);
|
||||||
|
|
||||||
@LogMessage(level = WARN)
|
@LogMessage(level = WARN)
|
||||||
@Message(value = "processEqualityExpression() : No expression to process!", id = 203)
|
@Message(value = "processEqualityExpression() : No expression to process", id = 203)
|
||||||
void processEqualityExpression();
|
void processEqualityExpression();
|
||||||
|
|
||||||
@LogMessage(level = INFO)
|
@LogMessage(level = INFO)
|
||||||
|
@ -666,7 +664,7 @@ public interface CoreMessageLogger extends BasicLogger {
|
||||||
void readOnlyCacheConfiguredForMutableCollection(String name);
|
void readOnlyCacheConfiguredForMutableCollection(String name);
|
||||||
|
|
||||||
@LogMessage(level = WARN)
|
@LogMessage(level = WARN)
|
||||||
@Message(value = "Recognized obsolete hibernate namespace %s. Use namespace %s instead. Refer to Hibernate 3.6 Migration Guide!",
|
@Message(value = "Recognized obsolete hibernate namespace %s. Use namespace %s instead. Refer to Hibernate 3.6 Migration Guide",
|
||||||
id = 223)
|
id = 223)
|
||||||
void recognizedObsoleteHibernateNamespace(
|
void recognizedObsoleteHibernateNamespace(
|
||||||
String oldHibernateNamespace,
|
String oldHibernateNamespace,
|
||||||
|
@ -1348,7 +1346,7 @@ public interface CoreMessageLogger extends BasicLogger {
|
||||||
void usingDialect(Dialect dialect);
|
void usingDialect(Dialect dialect);
|
||||||
|
|
||||||
@LogMessage(level = ERROR)
|
@LogMessage(level = ERROR)
|
||||||
@Message(value = "Don't use old DTDs, read the Hibernate 3.x Migration Guide!", id = 404)
|
@Message(value = "Don't use old DTDs, read the Hibernate 3.x Migration Guide", id = 404)
|
||||||
void usingOldDtd();
|
void usingOldDtd();
|
||||||
|
|
||||||
@LogMessage(level = INFO)
|
@LogMessage(level = INFO)
|
||||||
|
|
|
@ -96,7 +96,7 @@ public class FilterImpl implements Filter, Serializable {
|
||||||
public Filter setParameterList(String name, Collection<?> values) throws HibernateException {
|
public Filter setParameterList(String name, Collection<?> values) throws HibernateException {
|
||||||
// Make sure this is a defined parameter and check the incoming value type
|
// Make sure this is a defined parameter and check the incoming value type
|
||||||
if ( values == null ) {
|
if ( values == null ) {
|
||||||
throw new IllegalArgumentException( "Collection must be not null!" );
|
throw new IllegalArgumentException( "Collection must be not null" );
|
||||||
}
|
}
|
||||||
JdbcMapping type = definition.getParameterJdbcMapping( name );
|
JdbcMapping type = definition.getParameterJdbcMapping( name );
|
||||||
if ( type == null ) {
|
if ( type == null ) {
|
||||||
|
|
|
@ -506,7 +506,7 @@ public class SessionFactoryImpl implements SessionFactoryImplementor {
|
||||||
|
|
||||||
public Session getCurrentSession() throws HibernateException {
|
public Session getCurrentSession() throws HibernateException {
|
||||||
if ( currentSessionContext == null ) {
|
if ( currentSessionContext == null ) {
|
||||||
throw new HibernateException( "No CurrentSessionContext configured!" );
|
throw new HibernateException( "No CurrentSessionContext configured" );
|
||||||
}
|
}
|
||||||
return currentSessionContext.currentSession();
|
return currentSessionContext.currentSession();
|
||||||
}
|
}
|
||||||
|
|
|
@ -457,7 +457,7 @@ public class StatelessSessionImpl extends AbstractSharedSessionContract implemen
|
||||||
|
|
||||||
private void managedClose() {
|
private void managedClose() {
|
||||||
if ( isClosed() ) {
|
if ( isClosed() ) {
|
||||||
throw new SessionException( "Session was already closed!" );
|
throw new SessionException( "Session was already closed" );
|
||||||
}
|
}
|
||||||
close();
|
close();
|
||||||
}
|
}
|
||||||
|
|
|
@ -498,7 +498,7 @@ public class BoundedConcurrentHashMap<K, V> extends AbstractMap<K, V>
|
||||||
coldHit( evicted );
|
coldHit( evicted );
|
||||||
break;
|
break;
|
||||||
case HIR_NONRESIDENT:
|
case HIR_NONRESIDENT:
|
||||||
throw new IllegalStateException( "Can't hit a non-resident entry!" );
|
throw new IllegalStateException( "Can't hit a non-resident entry" );
|
||||||
default:
|
default:
|
||||||
throw new AssertionError( "Hit with unknown status: " + state );
|
throw new AssertionError( "Hit with unknown status: " + state );
|
||||||
}
|
}
|
||||||
|
|
|
@ -1593,7 +1593,7 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
throw new IllegalArgumentException(
|
throw new IllegalArgumentException(
|
||||||
"The provided " + settingName + " setting value [" + settingValue + "] is not supported!"
|
"The provided " + settingName + " setting value [" + settingValue + "] is not supported"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1603,7 +1603,7 @@ public class EntityManagerFactoryBuilderImpl implements EntityManagerFactoryBuil
|
||||||
}
|
}
|
||||||
catch (InstantiationException | IllegalAccessException e) {
|
catch (InstantiationException | IllegalAccessException e) {
|
||||||
throw new IllegalArgumentException(
|
throw new IllegalArgumentException(
|
||||||
"The " + clazz.getSimpleName() +" class [" + instanceClass + "] could not be instantiated!",
|
"The " + clazz.getSimpleName() +" class [" + instanceClass + "] could not be instantiated",
|
||||||
e
|
e
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -110,7 +110,7 @@ public class LoaderSqlAstCreationState
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void registerLockMode(String identificationVariable, LockMode explicitLockMode) {
|
public void registerLockMode(String identificationVariable, LockMode explicitLockMode) {
|
||||||
throw new UnsupportedOperationException( "Registering lock modes should only be done for result set mappings!" );
|
throw new UnsupportedOperationException( "Registering lock modes should only be done for result set mappings" );
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -113,7 +113,7 @@ public abstract class Constraint implements RelationalModel, Exportable, Seriali
|
||||||
return bigInt.toString( 35 );
|
return bigInt.toString( 35 );
|
||||||
}
|
}
|
||||||
catch ( NoSuchAlgorithmException e ) {
|
catch ( NoSuchAlgorithmException e ) {
|
||||||
throw new HibernateException( "Unable to generate a hashed Constraint name!", e );
|
throw new HibernateException( "Unable to generate a hashed Constraint name", e );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -644,7 +644,7 @@ public class Table implements RelationalModel, Serializable, ContributableDataba
|
||||||
public Index addIndex(Index index) {
|
public Index addIndex(Index index) {
|
||||||
Index current = indexes.get( index.getName() );
|
Index current = indexes.get( index.getName() );
|
||||||
if ( current != null ) {
|
if ( current != null ) {
|
||||||
throw new MappingException( "Index " + index.getName() + " already exists!" );
|
throw new MappingException( "Index " + index.getName() + " already exists" );
|
||||||
}
|
}
|
||||||
indexes.put( index.getName(), index );
|
indexes.put( index.getName(), index );
|
||||||
return index;
|
return index;
|
||||||
|
@ -653,7 +653,7 @@ public class Table implements RelationalModel, Serializable, ContributableDataba
|
||||||
public UniqueKey addUniqueKey(UniqueKey uniqueKey) {
|
public UniqueKey addUniqueKey(UniqueKey uniqueKey) {
|
||||||
UniqueKey current = uniqueKeys.get( uniqueKey.getName() );
|
UniqueKey current = uniqueKeys.get( uniqueKey.getName() );
|
||||||
if ( current != null ) {
|
if ( current != null ) {
|
||||||
throw new MappingException( "UniqueKey " + uniqueKey.getName() + " already exists!" );
|
throw new MappingException( "UniqueKey " + uniqueKey.getName() + " already exists" );
|
||||||
}
|
}
|
||||||
uniqueKeys.put( uniqueKey.getName(), uniqueKey );
|
uniqueKeys.put( uniqueKey.getName(), uniqueKey );
|
||||||
return uniqueKey;
|
return uniqueKey;
|
||||||
|
|
|
@ -3804,7 +3804,7 @@ public abstract class AbstractEntityPersister
|
||||||
// Ensure that an immutable or non-modifiable entity is not being updated unless it is
|
// Ensure that an immutable or non-modifiable entity is not being updated unless it is
|
||||||
// in the process of being deleted.
|
// in the process of being deleted.
|
||||||
if ( entry == null && !isMutable() ) {
|
if ( entry == null && !isMutable() ) {
|
||||||
throw new IllegalStateException( "Updating immutable entity that is not in session yet!" );
|
throw new IllegalStateException( "Updating immutable entity that is not in session yet" );
|
||||||
}
|
}
|
||||||
if ( ( entityMetamodel.isDynamicUpdate() && dirtyFields != null ) ) {
|
if ( ( entityMetamodel.isDynamicUpdate() && dirtyFields != null ) ) {
|
||||||
// We need to generate the UPDATE SQL when dynamic-update="true"
|
// We need to generate the UPDATE SQL when dynamic-update="true"
|
||||||
|
|
|
@ -64,6 +64,6 @@ public interface QueryLogging extends BasicLogger {
|
||||||
void ignoringUnrecognizedQueryHint(String hintName);
|
void ignoringUnrecognizedQueryHint(String hintName);
|
||||||
|
|
||||||
@LogMessage(level = WARN)
|
@LogMessage(level = WARN)
|
||||||
@Message(value = "firstResult/maxResults specified with collection fetch; applying in memory!", id = 90003004)
|
@Message(value = "firstResult/maxResults specified with collection fetch; applying in memory", id = 90003004)
|
||||||
void firstOrMaxResultsSpecifiedWithCollectionFetch();
|
void firstOrMaxResultsSpecifiedWithCollectionFetch();
|
||||||
}
|
}
|
||||||
|
|
|
@ -343,7 +343,7 @@ public class AnonymousTupleEmbeddableValuedModelPart implements EmbeddableValued
|
||||||
boolean selected,
|
boolean selected,
|
||||||
String resultVariable,
|
String resultVariable,
|
||||||
DomainResultCreationState creationState) {
|
DomainResultCreationState creationState) {
|
||||||
throw new UnsupportedOperationException( "AnonymousTupleEmbeddableValuedModelPart is not fetchable!" );
|
throw new UnsupportedOperationException( "AnonymousTupleEmbeddableValuedModelPart is not fetchable" );
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -175,7 +175,7 @@ public class AnonymousTupleTableGroupProducer implements TableGroupProducer, Map
|
||||||
final EmbeddableValuedModelPart modelPartContainer = (EmbeddableValuedModelPart) existingModelPart;
|
final EmbeddableValuedModelPart modelPartContainer = (EmbeddableValuedModelPart) existingModelPart;
|
||||||
for ( Attribute<?, ?> attribute : attributes ) {
|
for ( Attribute<?, ?> attribute : attributes ) {
|
||||||
if ( !( attribute instanceof SingularPersistentAttribute<?, ?> ) ) {
|
if ( !( attribute instanceof SingularPersistentAttribute<?, ?> ) ) {
|
||||||
throw new IllegalArgumentException( "Only embeddables without collections are supported!" );
|
throw new IllegalArgumentException( "Only embeddables without collections are supported" );
|
||||||
}
|
}
|
||||||
final DomainType<?> attributeType = ( (SingularPersistentAttribute<?, ?>) attribute ).getType();
|
final DomainType<?> attributeType = ( (SingularPersistentAttribute<?, ?>) attribute ).getType();
|
||||||
final ModelPart modelPart = createModelPart(
|
final ModelPart modelPart = createModelPart(
|
||||||
|
|
|
@ -52,7 +52,7 @@ public class AnonymousTupleType<T> implements TupleType<T>, DomainType<T>, Retur
|
||||||
final SqmSelectableNode<?> component = components[i];
|
final SqmSelectableNode<?> component = components[i];
|
||||||
final String alias = component.getAlias();
|
final String alias = component.getAlias();
|
||||||
if ( alias == null ) {
|
if ( alias == null ) {
|
||||||
throw new IllegalArgumentException( "Component at index " + i + " has no alias, but alias is required!" );
|
throw new IllegalArgumentException( "Component at index " + i + " has no alias, but alias is required" );
|
||||||
}
|
}
|
||||||
map.put( alias, i );
|
map.put( alias, i );
|
||||||
}
|
}
|
||||||
|
@ -62,7 +62,7 @@ public class AnonymousTupleType<T> implements TupleType<T>, DomainType<T>, Retur
|
||||||
private static SqmSelectableNode<?>[] extractSqmExpressibles(SqmSubQuery<?> subQuery) {
|
private static SqmSelectableNode<?>[] extractSqmExpressibles(SqmSubQuery<?> subQuery) {
|
||||||
final SqmSelectClause selectClause = subQuery.getQuerySpec().getSelectClause();
|
final SqmSelectClause selectClause = subQuery.getQuerySpec().getSelectClause();
|
||||||
if ( selectClause == null || selectClause.getSelectionItems().isEmpty() ) {
|
if ( selectClause == null || selectClause.getSelectionItems().isEmpty() ) {
|
||||||
throw new IllegalArgumentException( "subquery has no selection items!" );
|
throw new IllegalArgumentException( "subquery has no selection items" );
|
||||||
}
|
}
|
||||||
// todo: right now, we "snapshot" the state of the subquery when creating this type, but maybe we shouldn't?
|
// todo: right now, we "snapshot" the state of the subquery when creating this type, but maybe we shouldn't?
|
||||||
// i.e. what if the subquery changes later on? Or should we somehow mark the subquery to signal,
|
// i.e. what if the subquery changes later on? Or should we somehow mark the subquery to signal,
|
||||||
|
|
|
@ -129,7 +129,7 @@ public class QuerySplitter {
|
||||||
final SqmQueryGroup<?> queryGroup = (SqmQueryGroup<?>) queryPart;
|
final SqmQueryGroup<?> queryGroup = (SqmQueryGroup<?>) queryPart;
|
||||||
final SqmRoot<?> root = findUnmappedPolymorphicReference( queryGroup.getQueryParts().get( 0 ) );
|
final SqmRoot<?> root = findUnmappedPolymorphicReference( queryGroup.getQueryParts().get( 0 ) );
|
||||||
if ( root != null ) {
|
if ( root != null ) {
|
||||||
throw new UnsupportedOperationException( "Polymorphic query group is unsupported!" );
|
throw new UnsupportedOperationException( "Polymorphic query group is unsupported" );
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
|
@ -3610,7 +3610,7 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
|
||||||
final SqmFunctionDescriptor functionTemplate = getFunctionDescriptor( "listagg" );
|
final SqmFunctionDescriptor functionTemplate = getFunctionDescriptor( "listagg" );
|
||||||
if ( functionTemplate == null ) {
|
if ( functionTemplate == null ) {
|
||||||
throw new SemanticException(
|
throw new SemanticException(
|
||||||
"The listagg function was not registered for the dialect!"
|
"The listagg function was not registered for the dialect"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
final int argumentStartIndex;
|
final int argumentStartIndex;
|
||||||
|
|
|
@ -14,11 +14,6 @@ import org.hibernate.query.sqm.produce.function.FunctionReturnTypeResolver;
|
||||||
import org.hibernate.query.sqm.tree.SqmTypedNode;
|
import org.hibernate.query.sqm.tree.SqmTypedNode;
|
||||||
import org.hibernate.query.sqm.tree.predicate.SqmPredicate;
|
import org.hibernate.query.sqm.tree.predicate.SqmPredicate;
|
||||||
import org.hibernate.query.sqm.tree.select.SqmOrderByClause;
|
import org.hibernate.query.sqm.tree.select.SqmOrderByClause;
|
||||||
import org.hibernate.sql.ast.SqlAstTranslator;
|
|
||||||
import org.hibernate.sql.ast.spi.SqlAppender;
|
|
||||||
import org.hibernate.sql.ast.tree.SqlAstNode;
|
|
||||||
import org.hibernate.sql.ast.tree.predicate.Predicate;
|
|
||||||
import org.hibernate.sql.ast.tree.select.SortSpecification;
|
|
||||||
import org.hibernate.type.spi.TypeConfiguration;
|
import org.hibernate.type.spi.TypeConfiguration;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -111,7 +106,7 @@ public abstract class AbstractSqmSelfRenderingFunctionDescriptor
|
||||||
QueryEngine queryEngine,
|
QueryEngine queryEngine,
|
||||||
TypeConfiguration typeConfiguration) {
|
TypeConfiguration typeConfiguration) {
|
||||||
if ( functionKind != FunctionKind.AGGREGATE ) {
|
if ( functionKind != FunctionKind.AGGREGATE ) {
|
||||||
throw new UnsupportedOperationException( "The function " + getName() + " is not an aggregate function!" );
|
throw new UnsupportedOperationException( "The function " + getName() + " is not an aggregate function" );
|
||||||
}
|
}
|
||||||
return new SelfRenderingSqmAggregateFunction<>(
|
return new SelfRenderingSqmAggregateFunction<>(
|
||||||
this,
|
this,
|
||||||
|
@ -135,7 +130,7 @@ public abstract class AbstractSqmSelfRenderingFunctionDescriptor
|
||||||
QueryEngine queryEngine,
|
QueryEngine queryEngine,
|
||||||
TypeConfiguration typeConfiguration) {
|
TypeConfiguration typeConfiguration) {
|
||||||
if ( functionKind != FunctionKind.ORDERED_SET_AGGREGATE ) {
|
if ( functionKind != FunctionKind.ORDERED_SET_AGGREGATE ) {
|
||||||
throw new UnsupportedOperationException( "The function " + getName() + " is not an ordered set-aggregate function!" );
|
throw new UnsupportedOperationException( "The function " + getName() + " is not an ordered set-aggregate function" );
|
||||||
}
|
}
|
||||||
return new SelfRenderingSqmOrderedSetAggregateFunction<>(
|
return new SelfRenderingSqmOrderedSetAggregateFunction<>(
|
||||||
this,
|
this,
|
||||||
|
@ -161,7 +156,7 @@ public abstract class AbstractSqmSelfRenderingFunctionDescriptor
|
||||||
QueryEngine queryEngine,
|
QueryEngine queryEngine,
|
||||||
TypeConfiguration typeConfiguration) {
|
TypeConfiguration typeConfiguration) {
|
||||||
if ( functionKind != FunctionKind.WINDOW ) {
|
if ( functionKind != FunctionKind.WINDOW ) {
|
||||||
throw new UnsupportedOperationException( "The function " + getName() + " is not a window function!" );
|
throw new UnsupportedOperationException( "The function " + getName() + " is not a window function" );
|
||||||
}
|
}
|
||||||
return new SelfRenderingSqmWindowFunction<>(
|
return new SelfRenderingSqmWindowFunction<>(
|
||||||
this,
|
this,
|
||||||
|
|
|
@ -58,7 +58,7 @@ public interface SqmFunctionDescriptor {
|
||||||
ReturnableType<T> impliedResultType,
|
ReturnableType<T> impliedResultType,
|
||||||
QueryEngine queryEngine,
|
QueryEngine queryEngine,
|
||||||
TypeConfiguration typeConfiguration) {
|
TypeConfiguration typeConfiguration) {
|
||||||
throw new UnsupportedOperationException( "Not an aggregate function!" );
|
throw new UnsupportedOperationException( "Not an aggregate function" );
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -72,7 +72,7 @@ public interface SqmFunctionDescriptor {
|
||||||
ReturnableType<T> impliedResultType,
|
ReturnableType<T> impliedResultType,
|
||||||
QueryEngine queryEngine,
|
QueryEngine queryEngine,
|
||||||
TypeConfiguration typeConfiguration) {
|
TypeConfiguration typeConfiguration) {
|
||||||
throw new UnsupportedOperationException( "Not an ordered set-aggregate function!" );
|
throw new UnsupportedOperationException( "Not an ordered set-aggregate function" );
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -87,7 +87,7 @@ public interface SqmFunctionDescriptor {
|
||||||
ReturnableType<T> impliedResultType,
|
ReturnableType<T> impliedResultType,
|
||||||
QueryEngine queryEngine,
|
QueryEngine queryEngine,
|
||||||
TypeConfiguration typeConfiguration) {
|
TypeConfiguration typeConfiguration) {
|
||||||
throw new UnsupportedOperationException( "Not an aggregate function!" );
|
throw new UnsupportedOperationException( "Not an aggregate function" );
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -332,7 +332,7 @@ public class QuerySqmImpl<R>
|
||||||
);
|
);
|
||||||
default:
|
default:
|
||||||
throw new UnsupportedOperationException(
|
throw new UnsupportedOperationException(
|
||||||
"The " + immutableEntityUpdateQueryHandlingMode + " is not supported!"
|
"The " + immutableEntityUpdateQueryHandlingMode + " is not supported"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -304,7 +304,7 @@ public class SqmCriteriaNodeBuilder implements NodeBuilder, SqmCreationContext,
|
||||||
queryParts.add( ( (SqmSelectQuery<T>) query1 ).getQueryPart() );
|
queryParts.add( ( (SqmSelectQuery<T>) query1 ).getQueryPart() );
|
||||||
for ( CriteriaQuery<?> query : queries ) {
|
for ( CriteriaQuery<?> query : queries ) {
|
||||||
if ( query.getResultType() != resultType ) {
|
if ( query.getResultType() != resultType ) {
|
||||||
throw new IllegalArgumentException( "Result type of all operands must match!" );
|
throw new IllegalArgumentException( "Result type of all operands must match" );
|
||||||
}
|
}
|
||||||
queryParts.add( ( (SqmSelectQuery<T>) query ).getQueryPart() );
|
queryParts.add( ( (SqmSelectQuery<T>) query ).getQueryPart() );
|
||||||
}
|
}
|
||||||
|
|
|
@ -667,7 +667,7 @@ public abstract class BaseSqmToSqlAstConverter<T extends Statement> extends Base
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void registerLockMode(String identificationVariable, LockMode explicitLockMode) {
|
public void registerLockMode(String identificationVariable, LockMode explicitLockMode) {
|
||||||
throw new UnsupportedOperationException( "Registering lock modes should only be done for result set mappings!" );
|
throw new UnsupportedOperationException( "Registering lock modes should only be done for result set mappings" );
|
||||||
}
|
}
|
||||||
|
|
||||||
public QueryOptions getQueryOptions() {
|
public QueryOptions getQueryOptions() {
|
||||||
|
@ -1593,7 +1593,7 @@ public abstract class BaseSqmToSqlAstConverter<T extends Statement> extends Base
|
||||||
}
|
}
|
||||||
throw new IllegalArgumentException(
|
throw new IllegalArgumentException(
|
||||||
String.format(
|
String.format(
|
||||||
"Couldn't find cte column %s in cte %s!",
|
"Couldn't find cte column %s in cte %s",
|
||||||
cteColumn.getColumnName(),
|
cteColumn.getColumnName(),
|
||||||
cteTable.getTableExpression()
|
cteTable.getTableExpression()
|
||||||
)
|
)
|
||||||
|
@ -3066,7 +3066,7 @@ public abstract class BaseSqmToSqlAstConverter<T extends Statement> extends Base
|
||||||
else if ( domainType instanceof ManagedDomainType<?> ) {
|
else if ( domainType instanceof ManagedDomainType<?> ) {
|
||||||
for ( Attribute<?, ?> attribute : ( (ManagedDomainType<?>) domainType ).getAttributes() ) {
|
for ( Attribute<?, ?> attribute : ( (ManagedDomainType<?>) domainType ).getAttributes() ) {
|
||||||
if ( !( attribute instanceof SingularPersistentAttribute<?, ?> ) ) {
|
if ( !( attribute instanceof SingularPersistentAttribute<?, ?> ) ) {
|
||||||
throw new IllegalArgumentException( "Only embeddables without collections are supported!" );
|
throw new IllegalArgumentException( "Only embeddables without collections are supported" );
|
||||||
}
|
}
|
||||||
final DomainType<?> attributeType = ( (SingularPersistentAttribute<?, ?>) attribute ).getType();
|
final DomainType<?> attributeType = ( (SingularPersistentAttribute<?, ?>) attribute ).getType();
|
||||||
addColumnNames( columnNames, attributeType, componentName + "_" + attribute.getName() );
|
addColumnNames( columnNames, attributeType, componentName + "_" + attribute.getName() );
|
||||||
|
|
|
@ -82,7 +82,7 @@ public abstract class AbstractSqmDmlStatement<E>
|
||||||
@Override
|
@Override
|
||||||
public void addCteStatement(SqmCteStatement<?> cteStatement) {
|
public void addCteStatement(SqmCteStatement<?> cteStatement) {
|
||||||
if ( cteStatements.putIfAbsent( cteStatement.getCteTable().getCteName(), cteStatement ) != null ) {
|
if ( cteStatements.putIfAbsent( cteStatement.getCteTable().getCteName(), cteStatement ) != null ) {
|
||||||
throw new IllegalArgumentException( "A CTE with the label " + cteStatement.getCteTable().getCteName() + " already exists!" );
|
throw new IllegalArgumentException( "A CTE with the label " + cteStatement.getCteTable().getCteName() + " already exists" );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -113,21 +113,21 @@ public class SqmDerivedRoot<T> extends SqmRoot<T> implements JpaDerivedRoot<T> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public <S extends T> SqmTreatedRoot<T, S> treatAs(Class<S> treatJavaType) throws PathException {
|
public <S extends T> SqmTreatedRoot<T, S> treatAs(Class<S> treatJavaType) throws PathException {
|
||||||
throw new UnsupportedOperationException( "Derived roots can not be treated!" );
|
throw new UnsupportedOperationException( "Derived roots can not be treated" );
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public <S extends T> SqmTreatedRoot<T, S> treatAs(EntityDomainType<S> treatTarget) throws PathException {
|
public <S extends T> SqmTreatedRoot<T, S> treatAs(EntityDomainType<S> treatTarget) throws PathException {
|
||||||
throw new UnsupportedOperationException( "Derived roots can not be treated!" );
|
throw new UnsupportedOperationException( "Derived roots can not be treated" );
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public <S extends T> SqmFrom<?, S> treatAs(Class<S> treatJavaType, String alias) {
|
public <S extends T> SqmFrom<?, S> treatAs(Class<S> treatJavaType, String alias) {
|
||||||
throw new UnsupportedOperationException( "Derived roots can not be treated!" );
|
throw new UnsupportedOperationException( "Derived roots can not be treated" );
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public <S extends T> SqmFrom<?, S> treatAs(EntityDomainType<S> treatTarget, String alias) {
|
public <S extends T> SqmFrom<?, S> treatAs(EntityDomainType<S> treatTarget, String alias) {
|
||||||
throw new UnsupportedOperationException( "Derived roots can not be treated!" );
|
throw new UnsupportedOperationException( "Derived roots can not be treated" );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -86,7 +86,7 @@ public class SqmPluralPartJoin<O,T> extends AbstractSqmJoin<O,T> implements SqmQ
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void setJoinPredicate(SqmPredicate predicate) {
|
public void setJoinPredicate(SqmPredicate predicate) {
|
||||||
throw new UnsupportedOperationException( "Setting a predicate for a plural part join is unsupported!" );
|
throw new UnsupportedOperationException( "Setting a predicate for a plural part join is unsupported" );
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -163,21 +163,21 @@ public class SqmDerivedJoin<T> extends AbstractSqmJoin<T, T> implements JpaDeriv
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public <S extends T> SqmTreatedEntityJoin<T,S> treatAs(Class<S> treatJavaType) throws PathException {
|
public <S extends T> SqmTreatedEntityJoin<T,S> treatAs(Class<S> treatJavaType) throws PathException {
|
||||||
throw new UnsupportedOperationException( "Derived joins can not be treated!" );
|
throw new UnsupportedOperationException( "Derived joins can not be treated" );
|
||||||
}
|
}
|
||||||
@Override
|
@Override
|
||||||
public <S extends T> SqmTreatedEntityJoin<T,S> treatAs(EntityDomainType<S> treatTarget) throws PathException {
|
public <S extends T> SqmTreatedEntityJoin<T,S> treatAs(EntityDomainType<S> treatTarget) throws PathException {
|
||||||
throw new UnsupportedOperationException( "Derived joins can not be treated!" );
|
throw new UnsupportedOperationException( "Derived joins can not be treated" );
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public <S extends T> SqmFrom<?, S> treatAs(Class<S> treatJavaType, String alias) {
|
public <S extends T> SqmFrom<?, S> treatAs(Class<S> treatJavaType, String alias) {
|
||||||
throw new UnsupportedOperationException( "Derived joins can not be treated!" );
|
throw new UnsupportedOperationException( "Derived joins can not be treated" );
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public <S extends T> SqmFrom<?, S> treatAs(EntityDomainType<S> treatTarget, String alias) {
|
public <S extends T> SqmFrom<?, S> treatAs(EntityDomainType<S> treatTarget, String alias) {
|
||||||
throw new UnsupportedOperationException( "Derived joins can not be treated!" );
|
throw new UnsupportedOperationException( "Derived joins can not be treated" );
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -95,7 +95,7 @@ public abstract class AbstractSqmSelectQuery<T>
|
||||||
@Override
|
@Override
|
||||||
public void addCteStatement(SqmCteStatement<?> cteStatement) {
|
public void addCteStatement(SqmCteStatement<?> cteStatement) {
|
||||||
if ( cteStatements.putIfAbsent( cteStatement.getCteTable().getCteName(), cteStatement ) != null ) {
|
if ( cteStatements.putIfAbsent( cteStatement.getCteTable().getCteName(), cteStatement ) != null ) {
|
||||||
throw new IllegalArgumentException( "A CTE with the label " + cteStatement.getCteTable().getCteName() + " already exists!" );
|
throw new IllegalArgumentException( "A CTE with the label " + cteStatement.getCteTable().getCteName() + " already exists" );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -161,14 +161,14 @@ public class SqmQueryGroup<T> extends SqmQueryPart<T> implements JpaQueryGroup<T
|
||||||
final SqmQuerySpec<?> querySpec = (SqmQuerySpec<?>) queryPart;
|
final SqmQuerySpec<?> querySpec = (SqmQuerySpec<?>) queryPart;
|
||||||
final List<SqmSelection<?>> selections = querySpec.getSelectClause().getSelections();
|
final List<SqmSelection<?>> selections = querySpec.getSelectClause().getSelections();
|
||||||
if ( firstSelectionSize != selections.size() ) {
|
if ( firstSelectionSize != selections.size() ) {
|
||||||
throw new SemanticException( "All query parts in a query group must have the same arity!" );
|
throw new SemanticException( "All query parts in a query group must have the same arity" );
|
||||||
}
|
}
|
||||||
for ( int j = 0; j < firstSelectionSize; j++ ) {
|
for ( int j = 0; j < firstSelectionSize; j++ ) {
|
||||||
final SqmTypedNode<?> firstSqmSelection = typedNodes.get( j );
|
final SqmTypedNode<?> firstSqmSelection = typedNodes.get( j );
|
||||||
final JavaType<?> firstJavaType = firstSqmSelection.getNodeJavaType();
|
final JavaType<?> firstJavaType = firstSqmSelection.getNodeJavaType();
|
||||||
if ( firstJavaType != selections.get( j ).getNodeJavaType() ) {
|
if ( firstJavaType != selections.get( j ).getNodeJavaType() ) {
|
||||||
throw new SemanticException(
|
throw new SemanticException(
|
||||||
"Select items of the same index must have the same java type across all query parts!"
|
"Select items of the same index must have the same java type across all query parts"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if ( firstSqmSelection instanceof SqmFrom<?, ?> ) {
|
if ( firstSqmSelection instanceof SqmFrom<?, ?> ) {
|
||||||
|
@ -202,7 +202,7 @@ public class SqmQueryGroup<T> extends SqmQueryPart<T> implements JpaQueryGroup<T
|
||||||
}
|
}
|
||||||
if ( matchingAttrJoin == null || firstAttrJoin.getModel() != matchingAttrJoin.getModel() ) {
|
if ( matchingAttrJoin == null || firstAttrJoin.getModel() != matchingAttrJoin.getModel() ) {
|
||||||
throw new SemanticException(
|
throw new SemanticException(
|
||||||
"All query parts in a query group must have the same join fetches in the same order!"
|
"All query parts in a query group must have the same join fetches in the same order"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
validateFetchesMatch( firstAttrJoin, matchingAttrJoin );
|
validateFetchesMatch( firstAttrJoin, matchingAttrJoin );
|
||||||
|
@ -216,7 +216,7 @@ public class SqmQueryGroup<T> extends SqmQueryPart<T> implements JpaQueryGroup<T
|
||||||
final SqmAttributeJoin<?, ?> attrJoin = (SqmAttributeJoin<?, ?>) sqmJoin;
|
final SqmAttributeJoin<?, ?> attrJoin = (SqmAttributeJoin<?, ?>) sqmJoin;
|
||||||
if ( attrJoin.isFetched() ) {
|
if ( attrJoin.isFetched() ) {
|
||||||
throw new SemanticException(
|
throw new SemanticException(
|
||||||
"All query parts in a query group must have the same join fetches in the same order!"
|
"All query parts in a query group must have the same join fetches in the same order"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -111,7 +111,7 @@ public abstract class SqmQueryPart<T> implements SqmVisitableNode, JpaQueryPart<
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
if ( fetchClauseType == null ) {
|
if ( fetchClauseType == null ) {
|
||||||
throw new IllegalArgumentException( "Fetch clause may not be null!" );
|
throw new IllegalArgumentException( "Fetch clause may not be null" );
|
||||||
}
|
}
|
||||||
fetchExpression.applyInferableType( nodeBuilder.getIntegerType() );
|
fetchExpression.applyInferableType( nodeBuilder.getIntegerType() );
|
||||||
this.fetchExpression = fetchExpression;
|
this.fetchExpression = fetchExpression;
|
||||||
|
|
|
@ -87,7 +87,7 @@ public class SynchronizationCallbackCoordinatorTrackingImpl extends Synchronizat
|
||||||
doAfterCompletion( false, true );
|
doAfterCompletion( false, true );
|
||||||
|
|
||||||
// NOTE : doAfterCompletion calls reset
|
// NOTE : doAfterCompletion calls reset
|
||||||
throw new HibernateException( "Transaction was rolled back in a different thread!" );
|
throw new HibernateException( "Transaction was rolled back in a different thread" );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -468,7 +468,7 @@ public abstract class AbstractServiceRegistryImpl
|
||||||
*/
|
*/
|
||||||
public synchronized void reactivate() {
|
public synchronized void reactivate() {
|
||||||
if ( !active.compareAndSet( false, true ) ) {
|
if ( !active.compareAndSet( false, true ) ) {
|
||||||
throw new IllegalStateException( "Was not inactive, could not reactivate!" );
|
throw new IllegalStateException( "Was not inactive, could not reactivate" );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -533,13 +533,13 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
|
||||||
}
|
}
|
||||||
else if ( expression instanceof JdbcParameter ) {
|
else if ( expression instanceof JdbcParameter ) {
|
||||||
if ( jdbcParameterBindings == null ) {
|
if ( jdbcParameterBindings == null ) {
|
||||||
throw new IllegalArgumentException( "Can't interpret expression because no parameter bindings are available!" );
|
throw new IllegalArgumentException( "Can't interpret expression because no parameter bindings are available" );
|
||||||
}
|
}
|
||||||
return (R) getParameterBindValue( (JdbcParameter) expression );
|
return (R) getParameterBindValue( (JdbcParameter) expression );
|
||||||
}
|
}
|
||||||
else if ( expression instanceof SqmParameterInterpretation ) {
|
else if ( expression instanceof SqmParameterInterpretation ) {
|
||||||
if ( jdbcParameterBindings == null ) {
|
if ( jdbcParameterBindings == null ) {
|
||||||
throw new IllegalArgumentException( "Can't interpret expression because no parameter bindings are available!" );
|
throw new IllegalArgumentException( "Can't interpret expression because no parameter bindings are available" );
|
||||||
}
|
}
|
||||||
return (R) getParameterBindValue( (JdbcParameter) ( (SqmParameterInterpretation) expression).getResolvedExpression() );
|
return (R) getParameterBindValue( (JdbcParameter) ( (SqmParameterInterpretation) expression).getResolvedExpression() );
|
||||||
}
|
}
|
||||||
|
@ -553,7 +553,7 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
|
||||||
}
|
}
|
||||||
else if ( expression instanceof JdbcParameter ) {
|
else if ( expression instanceof JdbcParameter ) {
|
||||||
if ( jdbcParameterBindings == null ) {
|
if ( jdbcParameterBindings == null ) {
|
||||||
throw new IllegalArgumentException( "Can't interpret expression because no parameter bindings are available!" );
|
throw new IllegalArgumentException( "Can't interpret expression because no parameter bindings are available" );
|
||||||
}
|
}
|
||||||
final JdbcParameter parameter = (JdbcParameter) expression;
|
final JdbcParameter parameter = (JdbcParameter) expression;
|
||||||
renderAsLiteral( parameter, getParameterBindValue( parameter ) );
|
renderAsLiteral( parameter, getParameterBindValue( parameter ) );
|
||||||
|
@ -561,7 +561,7 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
|
||||||
}
|
}
|
||||||
else if ( expression instanceof SqmParameterInterpretation ) {
|
else if ( expression instanceof SqmParameterInterpretation ) {
|
||||||
if ( jdbcParameterBindings == null ) {
|
if ( jdbcParameterBindings == null ) {
|
||||||
throw new IllegalArgumentException( "Can't interpret expression because no parameter bindings are available!" );
|
throw new IllegalArgumentException( "Can't interpret expression because no parameter bindings are available" );
|
||||||
}
|
}
|
||||||
final JdbcParameter parameter = (JdbcParameter) ( (SqmParameterInterpretation) expression).getResolvedExpression();
|
final JdbcParameter parameter = (JdbcParameter) ( (SqmParameterInterpretation) expression).getResolvedExpression();
|
||||||
renderAsLiteral( parameter, getParameterBindValue( parameter ) );
|
renderAsLiteral( parameter, getParameterBindValue( parameter ) );
|
||||||
|
@ -644,7 +644,7 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
|
||||||
jdbcOperation = translateSelect( (SelectStatement) statement );
|
jdbcOperation = translateSelect( (SelectStatement) statement );
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
throw new IllegalArgumentException( "Unexpected statement!" );
|
throw new IllegalArgumentException( "Unexpected statement" );
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( jdbcParameterBindings != null && CollectionHelper.isNotEmpty( getFilterJdbcParameters() ) ) {
|
if ( jdbcParameterBindings != null && CollectionHelper.isNotEmpty( getFilterJdbcParameters() ) ) {
|
||||||
|
@ -1241,19 +1241,19 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
|
||||||
LockStrategy strategy = LockStrategy.CLAUSE;
|
LockStrategy strategy = LockStrategy.CLAUSE;
|
||||||
if ( !querySpec.getGroupByClauseExpressions().isEmpty() ) {
|
if ( !querySpec.getGroupByClauseExpressions().isEmpty() ) {
|
||||||
if ( Boolean.FALSE.equals( followOnLocking ) ) {
|
if ( Boolean.FALSE.equals( followOnLocking ) ) {
|
||||||
throw new IllegalQueryOperationException( "Locking with GROUP BY is not supported!" );
|
throw new IllegalQueryOperationException( "Locking with GROUP BY is not supported" );
|
||||||
}
|
}
|
||||||
strategy = LockStrategy.FOLLOW_ON;
|
strategy = LockStrategy.FOLLOW_ON;
|
||||||
}
|
}
|
||||||
if ( querySpec.getHavingClauseRestrictions() != null ) {
|
if ( querySpec.getHavingClauseRestrictions() != null ) {
|
||||||
if ( Boolean.FALSE.equals( followOnLocking ) ) {
|
if ( Boolean.FALSE.equals( followOnLocking ) ) {
|
||||||
throw new IllegalQueryOperationException( "Locking with HAVING is not supported!" );
|
throw new IllegalQueryOperationException( "Locking with HAVING is not supported" );
|
||||||
}
|
}
|
||||||
strategy = LockStrategy.FOLLOW_ON;
|
strategy = LockStrategy.FOLLOW_ON;
|
||||||
}
|
}
|
||||||
if ( querySpec.getSelectClause().isDistinct() ) {
|
if ( querySpec.getSelectClause().isDistinct() ) {
|
||||||
if ( Boolean.FALSE.equals( followOnLocking ) ) {
|
if ( Boolean.FALSE.equals( followOnLocking ) ) {
|
||||||
throw new IllegalQueryOperationException( "Locking with DISTINCT is not supported!" );
|
throw new IllegalQueryOperationException( "Locking with DISTINCT is not supported" );
|
||||||
}
|
}
|
||||||
strategy = LockStrategy.FOLLOW_ON;
|
strategy = LockStrategy.FOLLOW_ON;
|
||||||
}
|
}
|
||||||
|
@ -1267,7 +1267,7 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
|
||||||
if ( tableGroupJoin.getJoinType() != SqlAstJoinType.INNER && !( group instanceof VirtualTableGroup ) ) {
|
if ( tableGroupJoin.getJoinType() != SqlAstJoinType.INNER && !( group instanceof VirtualTableGroup ) ) {
|
||||||
if ( Boolean.FALSE.equals( followOnLocking ) ) {
|
if ( Boolean.FALSE.equals( followOnLocking ) ) {
|
||||||
throw new IllegalQueryOperationException(
|
throw new IllegalQueryOperationException(
|
||||||
"Locking with OUTER joins is not supported!" );
|
"Locking with OUTER joins is not supported" );
|
||||||
}
|
}
|
||||||
return Boolean.TRUE;
|
return Boolean.TRUE;
|
||||||
}
|
}
|
||||||
|
@ -1285,7 +1285,7 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
|
||||||
if ( tableJoin.getJoinType() != SqlAstJoinType.INNER && !( tableJoin.getJoinedNode() instanceof VirtualTableGroup ) ) {
|
if ( tableJoin.getJoinType() != SqlAstJoinType.INNER && !( tableJoin.getJoinedNode() instanceof VirtualTableGroup ) ) {
|
||||||
if ( Boolean.FALSE.equals( followOnLocking ) ) {
|
if ( Boolean.FALSE.equals( followOnLocking ) ) {
|
||||||
throw new IllegalQueryOperationException(
|
throw new IllegalQueryOperationException(
|
||||||
"Locking with OUTER joins is not supported!" );
|
"Locking with OUTER joins is not supported" );
|
||||||
}
|
}
|
||||||
return Boolean.TRUE;
|
return Boolean.TRUE;
|
||||||
}
|
}
|
||||||
|
@ -1298,7 +1298,7 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
|
||||||
}
|
}
|
||||||
if ( hasAggregateFunctions( querySpec ) ) {
|
if ( hasAggregateFunctions( querySpec ) ) {
|
||||||
if ( Boolean.FALSE.equals( followOnLocking ) ) {
|
if ( Boolean.FALSE.equals( followOnLocking ) ) {
|
||||||
throw new IllegalQueryOperationException( "Locking with aggregate functions is not supported!" );
|
throw new IllegalQueryOperationException( "Locking with aggregate functions is not supported" );
|
||||||
}
|
}
|
||||||
strategy = LockStrategy.FOLLOW_ON;
|
strategy = LockStrategy.FOLLOW_ON;
|
||||||
}
|
}
|
||||||
|
@ -4213,13 +4213,13 @@ public abstract class AbstractSqlAstTranslator<T extends JdbcOperation> implemen
|
||||||
@Override
|
@Override
|
||||||
public void visitTableGroup(TableGroup tableGroup) {
|
public void visitTableGroup(TableGroup tableGroup) {
|
||||||
// TableGroup and TableGroup handling should be performed as part of `#visitFromClause`...
|
// TableGroup and TableGroup handling should be performed as part of `#visitFromClause`...
|
||||||
throw new UnsupportedOperationException( "This should never be invoked as org.hibernate.query.sqm.sql.BaseSqmToSqlAstConverter.visitTableGroup should handle this!" );
|
throw new UnsupportedOperationException( "This should never be invoked as org.hibernate.query.sqm.sql.BaseSqmToSqlAstConverter.visitTableGroup should handle this" );
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void visitTableGroupJoin(TableGroupJoin tableGroupJoin) {
|
public void visitTableGroupJoin(TableGroupJoin tableGroupJoin) {
|
||||||
// TableGroup and TableGroupJoin handling should be performed as part of `#visitFromClause`...
|
// TableGroup and TableGroupJoin handling should be performed as part of `#visitFromClause`...
|
||||||
throw new UnsupportedOperationException( "This should never be invoked as org.hibernate.query.sqm.sql.BaseSqmToSqlAstConverter.visitTableGroup should handle this!" );
|
throw new UnsupportedOperationException( "This should never be invoked as org.hibernate.query.sqm.sql.BaseSqmToSqlAstConverter.visitTableGroup should handle this" );
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -47,7 +47,7 @@ public abstract class AbstractStatement implements Statement, CteContainer {
|
||||||
@Override
|
@Override
|
||||||
public void addCteStatement(CteStatement cteStatement) {
|
public void addCteStatement(CteStatement cteStatement) {
|
||||||
if ( cteStatements.putIfAbsent( cteStatement.getCteTable().getTableExpression(), cteStatement ) != null ) {
|
if ( cteStatements.putIfAbsent( cteStatement.getCteTable().getTableExpression(), cteStatement ) != null ) {
|
||||||
throw new IllegalArgumentException( "A CTE with the label " + cteStatement.getCteTable().getTableExpression() + " already exists!" );
|
throw new IllegalArgumentException( "A CTE with the label " + cteStatement.getCteTable().getTableExpression() + " already exists" );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -113,7 +113,7 @@ public abstract class QueryPart implements SqlAstNode, Expression, DomainResultP
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
if ( fetchClauseType == null ) {
|
if ( fetchClauseType == null ) {
|
||||||
throw new IllegalArgumentException( "Fetch clause may not be null!" );
|
throw new IllegalArgumentException( "Fetch clause may not be null" );
|
||||||
}
|
}
|
||||||
this.fetchClauseExpression = fetchClauseExpression;
|
this.fetchClauseExpression = fetchClauseExpression;
|
||||||
this.fetchClauseType = fetchClauseType;
|
this.fetchClauseType = fetchClauseType;
|
||||||
|
|
|
@ -186,7 +186,7 @@ public class QuerySpec extends QueryPart implements SqlAstNode, PredicateContain
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
throw new UnsupportedOperationException("Domain result for non-scalar subquery shouldn't be created!");
|
throw new UnsupportedOperationException("Domain result for non-scalar subquery shouldn't be created");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -183,7 +183,7 @@ public class ListResultsConsumer<R> implements ResultsConsumer<List<R>, R> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
throw new IllegalStateException( "Should not reach this!" );
|
throw new IllegalStateException( "Should not reach this" );
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -70,12 +70,12 @@ public class NullJdbcType implements JdbcType {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doBind(PreparedStatement st, X value, int index, WrapperOptions options) {
|
protected void doBind(PreparedStatement st, X value, int index, WrapperOptions options) {
|
||||||
throw new UnsupportedOperationException( getClass().getName() + " should only be used to bind null!" );
|
throw new UnsupportedOperationException( getClass().getName() + " should only be used to bind null" );
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doBind(CallableStatement st, X value, String name, WrapperOptions options) {
|
protected void doBind(CallableStatement st, X value, String name, WrapperOptions options) {
|
||||||
throw new UnsupportedOperationException( getClass().getName() + " should only be used to bind null!" );
|
throw new UnsupportedOperationException( getClass().getName() + " should only be used to bind null" );
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,7 +33,7 @@ public class MappedSuperClassIdPropertyBasicAttributeOverrideTest {
|
||||||
}
|
}
|
||||||
catch (MappingException expected) {
|
catch (MappingException expected) {
|
||||||
assertEquals(
|
assertEquals(
|
||||||
"You cannot override the [uid] non-identifier property from the [org.hibernate.orm.test.annotations.override.mappedsuperclass.MappedSuperClassWithUuidAsBasic] base class or @MappedSuperclass and make it an identifier in the [org.hibernate.orm.test.annotations.override.mappedsuperclass.SubclassWithUuidAsId] subclass!",
|
"You cannot override the [uid] non-identifier property from the [org.hibernate.orm.test.annotations.override.mappedsuperclass.MappedSuperClassWithUuidAsBasic] base class or @MappedSuperclass and make it an identifier in the [org.hibernate.orm.test.annotations.override.mappedsuperclass.SubclassWithUuidAsId] subclass",
|
||||||
expected.getMessage()
|
expected.getMessage()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -198,7 +198,7 @@ public class OracleFollowOnLockingTest extends
|
||||||
);
|
);
|
||||||
assertTrue(
|
assertTrue(
|
||||||
expected.getCause().getMessage().contains(
|
expected.getCause().getMessage().contains(
|
||||||
"Locking with OFFSET is not supported!"
|
"Locking with OFFSET is not supported"
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -278,7 +278,7 @@ public class OracleFollowOnLockingTest extends
|
||||||
);
|
);
|
||||||
assertTrue(
|
assertTrue(
|
||||||
expected.getCause().getMessage().contains(
|
expected.getCause().getMessage().contains(
|
||||||
"Locking with ORDER BY is not supported!"
|
"Locking with ORDER BY is not supported"
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -356,7 +356,7 @@ public class OracleFollowOnLockingTest extends
|
||||||
);
|
);
|
||||||
assertTrue(
|
assertTrue(
|
||||||
expected.getCause().getMessage().contains(
|
expected.getCause().getMessage().contains(
|
||||||
"Locking with DISTINCT is not supported!"
|
"Locking with DISTINCT is not supported"
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -434,7 +434,7 @@ public class OracleFollowOnLockingTest extends
|
||||||
);
|
);
|
||||||
assertTrue(
|
assertTrue(
|
||||||
expected.getCause().getMessage().contains(
|
expected.getCause().getMessage().contains(
|
||||||
"Locking with GROUP BY is not supported!"
|
"Locking with GROUP BY is not supported"
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -505,7 +505,7 @@ public class OracleFollowOnLockingTest extends
|
||||||
);
|
);
|
||||||
assertTrue(
|
assertTrue(
|
||||||
expected.getCause().getMessage().contains(
|
expected.getCause().getMessage().contains(
|
||||||
"Locking with set operators is not supported!"
|
"Locking with set operators is not supported"
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -39,7 +39,7 @@ public class DuplicateEntityNameTest extends BaseCoreFunctionalTestCase {
|
||||||
fail("Should throw DuplicateMappingException");
|
fail("Should throw DuplicateMappingException");
|
||||||
}
|
}
|
||||||
catch (DuplicateMappingException e) {
|
catch (DuplicateMappingException e) {
|
||||||
assertEquals( "The [org.hibernate.orm.test.entityname.DuplicateEntityNameTest$Purchase1] and [org.hibernate.orm.test.entityname.DuplicateEntityNameTest$Purchase2] entities share the same JPA entity name: [Purchase], which is not allowed!", e.getMessage() );
|
assertEquals( "The [org.hibernate.orm.test.entityname.DuplicateEntityNameTest$Purchase1] and [org.hibernate.orm.test.entityname.DuplicateEntityNameTest$Purchase2] entities share the same JPA entity name: [Purchase], which is not allowed", e.getMessage() );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -90,7 +90,7 @@ public class JtaAfterCompletionTest extends BaseSessionFactoryFunctionalTest {
|
||||||
}
|
}
|
||||||
catch (HibernateException e) {
|
catch (HibernateException e) {
|
||||||
// This is expected
|
// This is expected
|
||||||
assertEquals( "Transaction was rolled back in a different thread!", e.getMessage() );
|
assertEquals( "Transaction was rolled back in a different thread", e.getMessage() );
|
||||||
}
|
}
|
||||||
|
|
||||||
// verify that the callback was fired.
|
// verify that the callback was fired.
|
||||||
|
|
|
@ -98,7 +98,7 @@ public class JtaTransactionAfterCallbackTest extends BaseEnversJPAFunctionalTest
|
||||||
}
|
}
|
||||||
catch ( PersistenceException e ) {
|
catch ( PersistenceException e ) {
|
||||||
// we expect this
|
// we expect this
|
||||||
assertTrue( e.getMessage().contains( "Transaction was rolled back in a different thread!" ) );
|
assertTrue( e.getMessage().contains( "Transaction was rolled back in a different thread" ) );
|
||||||
}
|
}
|
||||||
|
|
||||||
// test the audit process manager was flushed
|
// test the audit process manager was flushed
|
||||||
|
|
Loading…
Reference in New Issue