HHH-14642, HHH-13717 Various JPA Criteria related fixes
* Get rid of unnecessary whitespace and optional keywords in generated SQL * Handle some type inference related issues with some databases requiring to render casted parameters in some situations * Ensure SQM model is fully serializable * Ensure JPA Criteria throws expected exceptions * Make sure JPA Criteria implementations work properly * Move jpa.test.callback and jpa.test.criteria packages * Improve the reuse of SqmPath instances * Get rid of many raw-types related warnings * Make Predicate extend Expression and handle SQL rendering/emulation * Support fetching SqmTuple as array * Implement treat operator support
This commit is contained in:
parent
a216a23ae4
commit
77c1370e45
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
mysql_5_7() {
|
mysql_5_7() {
|
||||||
docker rm -f mysql || true
|
docker rm -f mysql || true
|
||||||
docker run --name mysql -e MYSQL_USER=hibernate_orm_test -e MYSQL_PASSWORD=hibernate_orm_test -e MYSQL_DATABASE=hibernate_orm_test -e MYSQL_ROOT_PASSWORD=hibernate_orm_test -p3306:3306 -d mysql:5.7 --character-set-server=utf8mb4 --collation-server=utf8mb4_general_cs
|
docker run --name mysql -e MYSQL_USER=hibernate_orm_test -e MYSQL_PASSWORD=hibernate_orm_test -e MYSQL_DATABASE=hibernate_orm_test -e MYSQL_ROOT_PASSWORD=hibernate_orm_test -p3306:3306 -d mysql:5.7 --character-set-server=utf8mb4 --collation-server=utf8mb4_bin
|
||||||
# Give the container some time to start
|
# Give the container some time to start
|
||||||
OUTPUT=
|
OUTPUT=
|
||||||
n=0
|
n=0
|
||||||
|
|
|
@ -27,8 +27,8 @@ import org.hibernate.Session;
|
||||||
import org.hibernate.dialect.CockroachDialect;
|
import org.hibernate.dialect.CockroachDialect;
|
||||||
import org.hibernate.dialect.DerbyDialect;
|
import org.hibernate.dialect.DerbyDialect;
|
||||||
import org.hibernate.dialect.H2Dialect;
|
import org.hibernate.dialect.H2Dialect;
|
||||||
import org.hibernate.dialect.MySQL5Dialect;
|
import org.hibernate.dialect.MySQLDialect;
|
||||||
import org.hibernate.dialect.Oracle8iDialect;
|
import org.hibernate.dialect.OracleDialect;
|
||||||
import org.hibernate.dialect.PostgreSQLDialect;
|
import org.hibernate.dialect.PostgreSQLDialect;
|
||||||
import org.hibernate.dialect.SQLServerDialect;
|
import org.hibernate.dialect.SQLServerDialect;
|
||||||
import org.hibernate.jpa.test.BaseEntityManagerFunctionalTestCase;
|
import org.hibernate.jpa.test.BaseEntityManagerFunctionalTestCase;
|
||||||
|
@ -1355,7 +1355,8 @@ public class HQLTest extends BaseEntityManagerFunctionalTestCase {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test @RequiresDialect(H2Dialect.class)
|
@Test
|
||||||
|
@RequiresDialect(H2Dialect.class)
|
||||||
public void test_hql_current_time_function_example() {
|
public void test_hql_current_time_function_example() {
|
||||||
doInJPA( this::entityManagerFactory, entityManager -> {
|
doInJPA( this::entityManagerFactory, entityManager -> {
|
||||||
//tag::hql-current-time-function-example[]
|
//tag::hql-current-time-function-example[]
|
||||||
|
@ -1385,8 +1386,8 @@ public class HQLTest extends BaseEntityManagerFunctionalTestCase {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@RequiresDialect(H2Dialect.class)
|
@RequiresDialect(H2Dialect.class)
|
||||||
@RequiresDialect(Oracle8iDialect.class)
|
@RequiresDialect(OracleDialect.class)
|
||||||
@RequiresDialect(MySQL5Dialect.class)
|
@RequiresDialect(MySQLDialect.class)
|
||||||
public void test_hql_bit_length_function_example() {
|
public void test_hql_bit_length_function_example() {
|
||||||
doInJPA( this::entityManagerFactory, entityManager -> {
|
doInJPA( this::entityManagerFactory, entityManager -> {
|
||||||
//tag::hql-bit-length-function-example[]
|
//tag::hql-bit-length-function-example[]
|
||||||
|
@ -1940,7 +1941,7 @@ public class HQLTest extends BaseEntityManagerFunctionalTestCase {
|
||||||
@Test
|
@Test
|
||||||
@RequiresDialect(H2Dialect.class)
|
@RequiresDialect(H2Dialect.class)
|
||||||
@RequiresDialect(PostgreSQLDialect.class)
|
@RequiresDialect(PostgreSQLDialect.class)
|
||||||
@RequiresDialect(MySQL5Dialect.class)
|
@RequiresDialect(MySQLDialect.class)
|
||||||
public void test_hql_relational_comparisons_example_3() {
|
public void test_hql_relational_comparisons_example_3() {
|
||||||
|
|
||||||
doInJPA( this::entityManagerFactory, entityManager -> {
|
doInJPA( this::entityManagerFactory, entityManager -> {
|
||||||
|
@ -2145,7 +2146,7 @@ public class HQLTest extends BaseEntityManagerFunctionalTestCase {
|
||||||
@Test
|
@Test
|
||||||
@RequiresDialect(H2Dialect.class)
|
@RequiresDialect(H2Dialect.class)
|
||||||
@RequiresDialect(PostgreSQLDialect.class)
|
@RequiresDialect(PostgreSQLDialect.class)
|
||||||
@RequiresDialect(MySQL5Dialect.class)
|
@RequiresDialect(MySQLDialect.class)
|
||||||
public void test_hql_between_predicate_example_2() {
|
public void test_hql_between_predicate_example_2() {
|
||||||
|
|
||||||
doInJPA( this::entityManagerFactory, entityManager -> {
|
doInJPA( this::entityManagerFactory, entityManager -> {
|
||||||
|
@ -2399,9 +2400,6 @@ public class HQLTest extends BaseEntityManagerFunctionalTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@RequiresDialect(H2Dialect.class)
|
|
||||||
@RequiresDialect(PostgreSQLDialect.class)
|
|
||||||
@RequiresDialect(MySQL5Dialect.class)
|
|
||||||
public void test_hql_group_by_example_3() {
|
public void test_hql_group_by_example_3() {
|
||||||
|
|
||||||
doInJPA( this::entityManagerFactory, entityManager -> {
|
doInJPA( this::entityManagerFactory, entityManager -> {
|
||||||
|
@ -2421,9 +2419,6 @@ public class HQLTest extends BaseEntityManagerFunctionalTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@RequiresDialect(H2Dialect.class)
|
|
||||||
@RequiresDialect(PostgreSQLDialect.class)
|
|
||||||
@RequiresDialect(MySQL5Dialect.class)
|
|
||||||
public void test_hql_group_by_example_4() {
|
public void test_hql_group_by_example_4() {
|
||||||
|
|
||||||
doInJPA( this::entityManagerFactory, entityManager -> {
|
doInJPA( this::entityManagerFactory, entityManager -> {
|
||||||
|
|
|
@ -167,6 +167,7 @@ public class CUBRIDDialect extends Dialect {
|
||||||
CommonFunctionFactory.lastDay( queryEngine );
|
CommonFunctionFactory.lastDay( queryEngine );
|
||||||
CommonFunctionFactory.weekQuarter( queryEngine );
|
CommonFunctionFactory.weekQuarter( queryEngine );
|
||||||
CommonFunctionFactory.octetLength( queryEngine );
|
CommonFunctionFactory.octetLength( queryEngine );
|
||||||
|
CommonFunctionFactory.bitLength( queryEngine );
|
||||||
CommonFunctionFactory.md5( queryEngine );
|
CommonFunctionFactory.md5( queryEngine );
|
||||||
CommonFunctionFactory.trunc( queryEngine );
|
CommonFunctionFactory.trunc( queryEngine );
|
||||||
CommonFunctionFactory.truncate( queryEngine );
|
CommonFunctionFactory.truncate( queryEngine );
|
||||||
|
@ -386,11 +387,11 @@ public class CUBRIDDialect extends Dialect {
|
||||||
public String timestampaddPattern(TemporalUnit unit, TemporalType temporalType) {
|
public String timestampaddPattern(TemporalUnit unit, TemporalType temporalType) {
|
||||||
switch (unit) {
|
switch (unit) {
|
||||||
case NANOSECOND:
|
case NANOSECOND:
|
||||||
return "adddate(?3, interval (?2)/1e6 millisecond)";
|
return "adddate(?3,interval (?2)/1e6 millisecond)";
|
||||||
case NATIVE:
|
case NATIVE:
|
||||||
return "adddate(?3, interval ?2 millisecond)";
|
return "adddate(?3,interval ?2 millisecond)";
|
||||||
default:
|
default:
|
||||||
return "adddate(?3, interval ?2 ?1)";
|
return "adddate(?3,interval ?2 ?1)";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -146,9 +146,10 @@ public class CacheDialect extends Dialect {
|
||||||
queryEngine.getSqmFunctionRegistry().registerBinaryTernaryPattern(
|
queryEngine.getSqmFunctionRegistry().registerBinaryTernaryPattern(
|
||||||
"locate",
|
"locate",
|
||||||
StandardBasicTypes.INTEGER,
|
StandardBasicTypes.INTEGER,
|
||||||
"$find(?2, ?1)",
|
"$find(?2,?1)",
|
||||||
"$find(?2, ?1, ?3)"
|
"$find(?2,?1,?3)"
|
||||||
).setArgumentListSignature("(pattern, string[, start])");
|
).setArgumentListSignature("(pattern, string[, start])");
|
||||||
|
CommonFunctionFactory.bitLength_pattern( queryEngine, "($length(?1)*8)" );
|
||||||
|
|
||||||
useJdbcEscape(queryEngine, "sin");
|
useJdbcEscape(queryEngine, "sin");
|
||||||
useJdbcEscape(queryEngine, "cos");
|
useJdbcEscape(queryEngine, "cos");
|
||||||
|
@ -181,7 +182,7 @@ public class CacheDialect extends Dialect {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String extractPattern(TemporalUnit unit) {
|
public String extractPattern(TemporalUnit unit) {
|
||||||
return "datepart(?1, ?2)";
|
return "datepart(?1,?2)";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -189,9 +190,9 @@ public class CacheDialect extends Dialect {
|
||||||
switch (unit) {
|
switch (unit) {
|
||||||
case NANOSECOND:
|
case NANOSECOND:
|
||||||
case NATIVE:
|
case NATIVE:
|
||||||
return "dateadd(millisecond, (?2)/1e6, ?3)";
|
return "dateadd(millisecond,(?2)/1e6,?3)";
|
||||||
default:
|
default:
|
||||||
return "dateadd(?1, ?2, ?3)";
|
return "dateadd(?1,?2,?3)";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -200,9 +201,9 @@ public class CacheDialect extends Dialect {
|
||||||
switch (unit) {
|
switch (unit) {
|
||||||
case NANOSECOND:
|
case NANOSECOND:
|
||||||
case NATIVE:
|
case NATIVE:
|
||||||
return "datediff(millisecond, ?2, ?3)*1e6";
|
return "datediff(millisecond,?2,?3)*1e6";
|
||||||
default:
|
default:
|
||||||
return "datediff(?1, ?2, ?3)";
|
return "datediff(?1,?2,?3)";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -248,7 +248,7 @@ public class FirebirdDialect extends Dialect {
|
||||||
"locate",
|
"locate",
|
||||||
StandardBasicTypes.INTEGER,
|
StandardBasicTypes.INTEGER,
|
||||||
"position(?1 in ?2)",
|
"position(?1 in ?2)",
|
||||||
"position(?1, ?2, ?3)"
|
"position(?1,?2,?3)"
|
||||||
).setArgumentListSignature( "(pattern, string[, start])" );
|
).setArgumentListSignature( "(pattern, string[, start])" );
|
||||||
functionRegistry.namedDescriptorBuilder( "ascii_val" )
|
functionRegistry.namedDescriptorBuilder( "ascii_val" )
|
||||||
.setExactArgumentCount( 1 )
|
.setExactArgumentCount( 1 )
|
||||||
|
@ -593,7 +593,7 @@ public class FirebirdDialect extends Dialect {
|
||||||
? "select rdb$generator_name from rdb$generators"
|
? "select rdb$generator_name from rdb$generators"
|
||||||
// Note: Firebird 3 has an 'off by increment' bug (fixed in Firebird 4), see
|
// Note: Firebird 3 has an 'off by increment' bug (fixed in Firebird 4), see
|
||||||
// http://tracker.firebirdsql.org/browse/CORE-6084
|
// http://tracker.firebirdsql.org/browse/CORE-6084
|
||||||
: "select rdb$generator_name, rdb$initial_value, rdb$generator_increment from rdb$generators where coalesce(rdb$system_flag, 0) = 0";
|
: "select rdb$generator_name,rdb$initial_value,rdb$generator_increment from rdb$generators where coalesce(rdb$system_flag,0)=0";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -30,6 +30,7 @@ import org.hibernate.sql.ast.tree.expression.QueryLiteral;
|
||||||
import org.hibernate.sql.ast.tree.expression.SelfRenderingExpression;
|
import org.hibernate.sql.ast.tree.expression.SelfRenderingExpression;
|
||||||
import org.hibernate.sql.ast.tree.expression.SqlTuple;
|
import org.hibernate.sql.ast.tree.expression.SqlTuple;
|
||||||
import org.hibernate.sql.ast.tree.expression.Summarization;
|
import org.hibernate.sql.ast.tree.expression.Summarization;
|
||||||
|
import org.hibernate.sql.ast.tree.predicate.BooleanExpressionPredicate;
|
||||||
import org.hibernate.sql.ast.tree.predicate.SelfRenderingPredicate;
|
import org.hibernate.sql.ast.tree.predicate.SelfRenderingPredicate;
|
||||||
import org.hibernate.sql.ast.tree.select.QueryGroup;
|
import org.hibernate.sql.ast.tree.select.QueryGroup;
|
||||||
import org.hibernate.sql.ast.tree.select.QueryPart;
|
import org.hibernate.sql.ast.tree.select.QueryPart;
|
||||||
|
@ -50,6 +51,16 @@ public class FirebirdSqlAstTranslator<T extends JdbcOperation> extends AbstractS
|
||||||
super( sessionFactory, statement );
|
super( sessionFactory, statement );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void visitBooleanExpressionPredicate(BooleanExpressionPredicate booleanExpressionPredicate) {
|
||||||
|
if ( getDialect().getVersion() >= 300 ) {
|
||||||
|
booleanExpressionPredicate.getExpression().accept( this );
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
super.visitBooleanExpressionPredicate( booleanExpressionPredicate );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected String getForUpdate() {
|
protected String getForUpdate() {
|
||||||
return " with lock";
|
return " with lock";
|
||||||
|
|
|
@ -168,8 +168,8 @@ public class InformixDialect extends Dialect {
|
||||||
queryEngine.getSqmFunctionRegistry().registerBinaryTernaryPattern(
|
queryEngine.getSqmFunctionRegistry().registerBinaryTernaryPattern(
|
||||||
"locate",
|
"locate",
|
||||||
StandardBasicTypes.INTEGER,
|
StandardBasicTypes.INTEGER,
|
||||||
"instr(?2, ?1)",
|
"instr(?2,?1)",
|
||||||
"instr(?2, ?1, ?3)"
|
"instr(?2,?1,?3)"
|
||||||
).setArgumentListSignature("(pattern, string[, start])");
|
).setArgumentListSignature("(pattern, string[, start])");
|
||||||
|
|
||||||
//coalesce() and nullif() both supported since Informix 12
|
//coalesce() and nullif() both supported since Informix 12
|
||||||
|
@ -307,7 +307,7 @@ public class InformixDialect extends Dialect {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getQuerySequencesString() {
|
public String getQuerySequencesString() {
|
||||||
return "select systables.tabname as sequence_name, syssequences.* from syssequences join systables on syssequences.tabid = systables.tabid where tabtype = 'Q'";
|
return "select systables.tabname as sequence_name,syssequences.* from syssequences join systables on syssequences.tabid=systables.tabid where tabtype='Q'";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -322,7 +322,7 @@ public class InformixDialect extends Dialect {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getFromDual() {
|
public String getFromDual() {
|
||||||
return "from (select 0 from systables where tabid = 1) as dual";
|
return "from (select 0 from systables where tabid=1) as dual";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -145,7 +145,7 @@ public class InformixSqlAstTranslator<T extends JdbcOperation> extends AbstractS
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected String getFromDual() {
|
protected String getFromDual() {
|
||||||
return " from (select 0 from systables where tabid = 1) as dual";
|
return " from (select 0 from systables where tabid=1) dual";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -235,15 +235,16 @@ public class IngresDialect extends Dialect {
|
||||||
CommonFunctionFactory.position( queryEngine );
|
CommonFunctionFactory.position( queryEngine );
|
||||||
CommonFunctionFactory.format_dateFormat( queryEngine );
|
CommonFunctionFactory.format_dateFormat( queryEngine );
|
||||||
CommonFunctionFactory.dateTrunc( queryEngine );
|
CommonFunctionFactory.dateTrunc( queryEngine );
|
||||||
|
CommonFunctionFactory.bitLength_pattern( queryEngine, "octet_length(hex(?1))*4" );
|
||||||
|
|
||||||
queryEngine.getSqmFunctionRegistry().registerBinaryTernaryPattern(
|
queryEngine.getSqmFunctionRegistry().registerBinaryTernaryPattern(
|
||||||
"locate",
|
"locate",
|
||||||
StandardBasicTypes.INTEGER,
|
StandardBasicTypes.INTEGER,
|
||||||
"position(?1 in ?2)",
|
"position(?1 in ?2)",
|
||||||
"(position(?1 in substring(?2 from ?3)) + (?3) - 1)"
|
"(position(?1 in substring(?2 from ?3))+(?3)-1)"
|
||||||
).setArgumentListSignature("(pattern, string[, start])");
|
).setArgumentListSignature("(pattern, string[, start])");
|
||||||
|
|
||||||
queryEngine.getSqmFunctionRegistry().registerPattern( "extract", "date_part('?1', ?2)", StandardBasicTypes.INTEGER );
|
queryEngine.getSqmFunctionRegistry().registerPattern( "extract", "date_part('?1',?2)", StandardBasicTypes.INTEGER );
|
||||||
|
|
||||||
CommonFunctionFactory.bitandorxornot_bitAndOrXorNot(queryEngine);
|
CommonFunctionFactory.bitandorxornot_bitAndOrXorNot(queryEngine);
|
||||||
|
|
||||||
|
@ -290,13 +291,13 @@ public class IngresDialect extends Dialect {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String timestampaddPattern(TemporalUnit unit, TemporalType temporalType) {
|
public String timestampaddPattern(TemporalUnit unit, TemporalType temporalType) {
|
||||||
return "timestampadd(?1, ?2, ?3)";
|
return "timestampadd(?1,?2,?3)";
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String timestampdiffPattern(TemporalUnit unit, TemporalType fromTemporalType, TemporalType toTemporalType) {
|
public String timestampdiffPattern(TemporalUnit unit, TemporalType fromTemporalType, TemporalType toTemporalType) {
|
||||||
return "timestampdiff(?1, ?2, ?3)";
|
return "timestampdiff(?1,?2,?3)";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -151,7 +151,7 @@ public class IngresSqlAstTranslator<T extends JdbcOperation> extends AbstractSql
|
||||||
@Override
|
@Override
|
||||||
protected String getFromDual() {
|
protected String getFromDual() {
|
||||||
//this is only necessary if the query has a where clause
|
//this is only necessary if the query has a where clause
|
||||||
return " from (select 0) as dual";
|
return " from (select 0) dual";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -139,7 +139,7 @@ public class MaxDBDialect extends Dialect {
|
||||||
|
|
||||||
queryEngine.getSqmFunctionRegistry().registerBinaryTernaryPattern(
|
queryEngine.getSqmFunctionRegistry().registerBinaryTernaryPattern(
|
||||||
"locate",
|
"locate",
|
||||||
StandardBasicTypes.INTEGER, "index(?2, ?1)", "index(?2, ?1, ?3)"
|
StandardBasicTypes.INTEGER, "index(?2,?1)", "index(?2,?1,?3)"
|
||||||
).setArgumentListSignature("(pattern, string[, start])");
|
).setArgumentListSignature("(pattern, string[, start])");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -68,7 +68,7 @@ public class MaxDBSqlAstTranslator<T extends JdbcOperation> extends AbstractSqlA
|
||||||
visitDecodeCaseSearchedExpression( caseSearchedExpression );
|
visitDecodeCaseSearchedExpression( caseSearchedExpression );
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
visitAnsiCaseSearchedExpression( caseSearchedExpression );
|
super.visitCaseSearchedExpression( caseSearchedExpression, inSelect );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -101,6 +101,7 @@ public class MimerSQLDialect extends Dialect {
|
||||||
|
|
||||||
CommonFunctionFactory.soundex( queryEngine );
|
CommonFunctionFactory.soundex( queryEngine );
|
||||||
CommonFunctionFactory.octetLength( queryEngine );
|
CommonFunctionFactory.octetLength( queryEngine );
|
||||||
|
CommonFunctionFactory.bitLength( queryEngine );
|
||||||
CommonFunctionFactory.truncate( queryEngine );
|
CommonFunctionFactory.truncate( queryEngine );
|
||||||
CommonFunctionFactory.repeat( queryEngine );
|
CommonFunctionFactory.repeat( queryEngine );
|
||||||
CommonFunctionFactory.pad_repeat( queryEngine );
|
CommonFunctionFactory.pad_repeat( queryEngine );
|
||||||
|
@ -161,7 +162,7 @@ public class MimerSQLDialect extends Dialect {
|
||||||
|
|
||||||
public String timestampdiffPattern(TemporalUnit unit, TemporalType fromTemporalType, TemporalType toTemporalType) {
|
public String timestampdiffPattern(TemporalUnit unit, TemporalType fromTemporalType, TemporalType toTemporalType) {
|
||||||
StringBuilder pattern = new StringBuilder();
|
StringBuilder pattern = new StringBuilder();
|
||||||
pattern.append("cast((?3 - ?2) ");
|
pattern.append("cast((?3-?2) ");
|
||||||
switch (unit) {
|
switch (unit) {
|
||||||
case NATIVE:
|
case NATIVE:
|
||||||
case NANOSECOND:
|
case NANOSECOND:
|
||||||
|
@ -209,13 +210,13 @@ public class MimerSQLDialect extends Dialect {
|
||||||
switch ( unit ) {
|
switch ( unit ) {
|
||||||
case NATIVE:
|
case NATIVE:
|
||||||
case NANOSECOND:
|
case NANOSECOND:
|
||||||
return "(?3 + (?2)/1e9 * interval '1' second)";
|
return "(?3+(?2)/1e9*interval '1' second)";
|
||||||
case QUARTER:
|
case QUARTER:
|
||||||
return "(?3 + (?2) * interval '3' month)";
|
return "(?3+(?2)*interval '3' month)";
|
||||||
case WEEK:
|
case WEEK:
|
||||||
return "(?3 + (?2) * interval '7' day)";
|
return "(?3+(?2)*interval '7' day)";
|
||||||
default:
|
default:
|
||||||
return "(?3 + (?2) * interval '1' ?1)";
|
return "(?3+(?2)*interval '1' ?1)";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -223,11 +223,11 @@ public class RDMSOS2200Dialect extends Dialect {
|
||||||
public String timestampaddPattern(TemporalUnit unit, TemporalType temporalType) {
|
public String timestampaddPattern(TemporalUnit unit, TemporalType temporalType) {
|
||||||
switch (unit) {
|
switch (unit) {
|
||||||
case NANOSECOND:
|
case NANOSECOND:
|
||||||
return "timestampadd('SQL_TSI_FRAC_SECOND', (?2)/1e3, ?3)";
|
return "timestampadd('SQL_TSI_FRAC_SECOND',(?2)/1e3,?3)";
|
||||||
case NATIVE:
|
case NATIVE:
|
||||||
return "timestampadd('SQL_TSI_FRAC_SECOND', ?2, ?3)";
|
return "timestampadd('SQL_TSI_FRAC_SECOND',?2,?3)";
|
||||||
default:
|
default:
|
||||||
return "dateadd('?1', ?2, ?3)";
|
return "dateadd('?1',?2,?3)";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -235,11 +235,11 @@ public class RDMSOS2200Dialect extends Dialect {
|
||||||
public String timestampdiffPattern(TemporalUnit unit, TemporalType fromTemporalType, TemporalType toTemporalType) {
|
public String timestampdiffPattern(TemporalUnit unit, TemporalType fromTemporalType, TemporalType toTemporalType) {
|
||||||
switch (unit) {
|
switch (unit) {
|
||||||
case NANOSECOND:
|
case NANOSECOND:
|
||||||
return "timestampdiff('SQL_TSI_FRAC_SECOND', ?2, ?3)*1e3";
|
return "timestampdiff('SQL_TSI_FRAC_SECOND',?2,?3)*1e3";
|
||||||
case NATIVE:
|
case NATIVE:
|
||||||
return "timestampdiff('SQL_TSI_FRAC_SECOND', ?2, ?3)";
|
return "timestampdiff('SQL_TSI_FRAC_SECOND',?2,?3)";
|
||||||
default:
|
default:
|
||||||
return "dateadd('?1', ?2, ?3)";
|
return "dateadd('?1',?2,?3)";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -325,7 +325,7 @@ public class RDMSOS2200Dialect extends Dialect {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getFromDual() {
|
public String getFromDual() {
|
||||||
return "from rdms.rdms_dummy where key_col = 1";
|
return "from rdms.rdms_dummy where key_col=1";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -138,7 +138,7 @@ public class RDMSOS2200SqlAstTranslator<T extends JdbcOperation> extends Abstrac
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected String getFromDual() {
|
protected String getFromDual() {
|
||||||
return " from rdms.rdms_dummy where key_col = 1";
|
return " from rdms.rdms_dummy where key_col=1";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -126,27 +126,27 @@ public class SQLiteDialect extends Dialect {
|
||||||
public String extractPattern(TemporalUnit unit) {
|
public String extractPattern(TemporalUnit unit) {
|
||||||
switch ( unit ) {
|
switch ( unit ) {
|
||||||
case SECOND:
|
case SECOND:
|
||||||
return "cast(strftime('%S.%f', ?2) as double)";
|
return "cast(strftime('%S.%f',?2) as double)";
|
||||||
case MINUTE:
|
case MINUTE:
|
||||||
return "strftime('%M', ?2)";
|
return "strftime('%M',?2)";
|
||||||
case HOUR:
|
case HOUR:
|
||||||
return "strftime('%H', ?2)";
|
return "strftime('%H',?2)";
|
||||||
case DAY:
|
case DAY:
|
||||||
case DAY_OF_MONTH:
|
case DAY_OF_MONTH:
|
||||||
return "(strftime('%d', ?2)+1)";
|
return "(strftime('%d',?2)+1)";
|
||||||
case MONTH:
|
case MONTH:
|
||||||
return "strftime('%m', ?2)";
|
return "strftime('%m',?2)";
|
||||||
case YEAR:
|
case YEAR:
|
||||||
return "strftime('%Y', ?2)";
|
return "strftime('%Y',?2)";
|
||||||
case DAY_OF_WEEK:
|
case DAY_OF_WEEK:
|
||||||
return "(strftime('%w', ?2)+1)";
|
return "(strftime('%w',?2)+1)";
|
||||||
case DAY_OF_YEAR:
|
case DAY_OF_YEAR:
|
||||||
return "strftime('%j', ?2)";
|
return "strftime('%j',?2)";
|
||||||
case EPOCH:
|
case EPOCH:
|
||||||
return "strftime('%s', ?2)";
|
return "strftime('%s',?2)";
|
||||||
case WEEK:
|
case WEEK:
|
||||||
// Thanks https://stackoverflow.com/questions/15082584/sqlite-return-wrong-week-number-for-2013
|
// Thanks https://stackoverflow.com/questions/15082584/sqlite-return-wrong-week-number-for-2013
|
||||||
return "((strftime('%j', date(?2, '-3 days', 'weekday 4'))-1)/7+1)";
|
return "((strftime('%j',date(?2,'-3 days','weekday 4'))-1)/7+1)";
|
||||||
default:
|
default:
|
||||||
return super.extractPattern(unit);
|
return super.extractPattern(unit);
|
||||||
}
|
}
|
||||||
|
@ -158,13 +158,13 @@ public class SQLiteDialect extends Dialect {
|
||||||
switch ( unit ) {
|
switch ( unit ) {
|
||||||
case NANOSECOND:
|
case NANOSECOND:
|
||||||
case NATIVE:
|
case NATIVE:
|
||||||
return "datetime(?3, '+?2 seconds')";
|
return "datetime(?3,'+?2 seconds')";
|
||||||
case QUARTER: //quarter is not supported in interval literals
|
case QUARTER: //quarter is not supported in interval literals
|
||||||
return function + "(?3, '+'||(?2*3)||' months')";
|
return function + "(?3,'+'||(?2*3)||' months')";
|
||||||
case WEEK: //week is not supported in interval literals
|
case WEEK: //week is not supported in interval literals
|
||||||
return function + "(?3, '+'||(?2*7)||' days')";
|
return function + "(?3,'+'||(?2*7)||' days')";
|
||||||
default:
|
default:
|
||||||
return function + "(?3, '+?2 ?1s')";
|
return function + "(?3,'+?2 ?1s')";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -244,20 +244,20 @@ public class SQLiteDialect extends Dialect {
|
||||||
queryEngine.getSqmFunctionRegistry().registerBinaryTernaryPattern(
|
queryEngine.getSqmFunctionRegistry().registerBinaryTernaryPattern(
|
||||||
"locate",
|
"locate",
|
||||||
StandardBasicTypes.INTEGER,
|
StandardBasicTypes.INTEGER,
|
||||||
"instr(?2, ?1)",
|
"instr(?2,?1)",
|
||||||
"instr(?2, ?1, ?3)"
|
"instr(?2,?1,?3)"
|
||||||
).setArgumentListSignature("(pattern, string[, start])");
|
).setArgumentListSignature("(pattern, string[, start])");
|
||||||
queryEngine.getSqmFunctionRegistry().registerBinaryTernaryPattern(
|
queryEngine.getSqmFunctionRegistry().registerBinaryTernaryPattern(
|
||||||
"lpad",
|
"lpad",
|
||||||
StandardBasicTypes.STRING,
|
StandardBasicTypes.STRING,
|
||||||
"(substr(replace(hex(zeroblob(?2)), '00', ' '), 1, ?2 - length(?1))||?1)",
|
"(substr(replace(hex(zeroblob(?2)),'00',' '),1,?2-length(?1))||?1)",
|
||||||
"(substr(replace(hex(zeroblob(?2)), '00', ?3), 1, ?2 - length(?1))||?1)"
|
"(substr(replace(hex(zeroblob(?2)),'00',?3),1,?2-length(?1))||?1)"
|
||||||
).setArgumentListSignature("(string, length[, padding])");
|
).setArgumentListSignature("(string, length[, padding])");
|
||||||
queryEngine.getSqmFunctionRegistry().registerBinaryTernaryPattern(
|
queryEngine.getSqmFunctionRegistry().registerBinaryTernaryPattern(
|
||||||
"rpad",
|
"rpad",
|
||||||
StandardBasicTypes.STRING,
|
StandardBasicTypes.STRING,
|
||||||
"(?1||substr(replace(hex(zeroblob(?2)), '00', ' '), 1, ?2 - length(?1)))",
|
"(?1||substr(replace(hex(zeroblob(?2)),'00',' '),1,?2-length(?1)))",
|
||||||
"(?1||substr(replace(hex(zeroblob(?2)), '00', ?3), 1, ?2 - length(?1)))"
|
"(?1||substr(replace(hex(zeroblob(?2)),'00',?3),1,?2-length(?1)))"
|
||||||
).setArgumentListSignature("(string, length[, padding])");
|
).setArgumentListSignature("(string, length[, padding])");
|
||||||
|
|
||||||
queryEngine.getSqmFunctionRegistry().namedDescriptorBuilder("format", "strftime")
|
queryEngine.getSqmFunctionRegistry().namedDescriptorBuilder("format", "strftime")
|
||||||
|
@ -269,13 +269,13 @@ public class SQLiteDialect extends Dialect {
|
||||||
if (!supportsMathFunctions() ) {
|
if (!supportsMathFunctions() ) {
|
||||||
queryEngine.getSqmFunctionRegistry().patternDescriptorBuilder(
|
queryEngine.getSqmFunctionRegistry().patternDescriptorBuilder(
|
||||||
"floor",
|
"floor",
|
||||||
"(cast(?1 as int) - (?1 < cast(?1 as int)))"
|
"(cast(?1 as int)-(?1<cast(?1 as int)))"
|
||||||
).setReturnTypeResolver( StandardFunctionReturnTypeResolvers.useArgType( 1 ) )
|
).setReturnTypeResolver( StandardFunctionReturnTypeResolvers.useArgType( 1 ) )
|
||||||
.setExactArgumentCount( 1 )
|
.setExactArgumentCount( 1 )
|
||||||
.register();
|
.register();
|
||||||
queryEngine.getSqmFunctionRegistry().patternDescriptorBuilder(
|
queryEngine.getSqmFunctionRegistry().patternDescriptorBuilder(
|
||||||
"ceiling",
|
"ceiling",
|
||||||
"(cast(?1 as int) + (?1 > cast(?1 as int)))"
|
"(cast(?1 as int)+(?1>cast(?1 as int)))"
|
||||||
).setReturnTypeResolver( StandardFunctionReturnTypeResolvers.useArgType( 1 ) )
|
).setReturnTypeResolver( StandardFunctionReturnTypeResolvers.useArgType( 1 ) )
|
||||||
.setExactArgumentCount( 1 )
|
.setExactArgumentCount( 1 )
|
||||||
.register();
|
.register();
|
||||||
|
@ -288,15 +288,15 @@ public class SQLiteDialect extends Dialect {
|
||||||
case BOTH:
|
case BOTH:
|
||||||
return character == ' '
|
return character == ' '
|
||||||
? "trim(?1)"
|
? "trim(?1)"
|
||||||
: "trim(?1, '" + character + "')";
|
: "trim(?1,'" + character + "')";
|
||||||
case LEADING:
|
case LEADING:
|
||||||
return character == ' '
|
return character == ' '
|
||||||
? "ltrim(?1)"
|
? "ltrim(?1)"
|
||||||
: "ltrim(?1, '" + character + "')";
|
: "ltrim(?1,'" + character + "')";
|
||||||
case TRAILING:
|
case TRAILING:
|
||||||
return character == ' '
|
return character == ' '
|
||||||
? "rtrim(?1)"
|
? "rtrim(?1)"
|
||||||
: "rtrim(?1, '" + character + "')";
|
: "rtrim(?1,'" + character + "')";
|
||||||
}
|
}
|
||||||
throw new UnsupportedOperationException( "Unsupported specification: " + specification );
|
throw new UnsupportedOperationException( "Unsupported specification: " + specification );
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,14 +7,18 @@
|
||||||
package org.hibernate.community.dialect;
|
package org.hibernate.community.dialect;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.function.Consumer;
|
||||||
|
|
||||||
import org.hibernate.LockMode;
|
import org.hibernate.LockMode;
|
||||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||||
import org.hibernate.query.ComparisonOperator;
|
import org.hibernate.query.ComparisonOperator;
|
||||||
|
import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
|
||||||
import org.hibernate.sql.ast.spi.AbstractSqlAstTranslator;
|
import org.hibernate.sql.ast.spi.AbstractSqlAstTranslator;
|
||||||
import org.hibernate.sql.ast.spi.SqlSelection;
|
import org.hibernate.sql.ast.spi.SqlSelection;
|
||||||
import org.hibernate.sql.ast.tree.Statement;
|
import org.hibernate.sql.ast.tree.Statement;
|
||||||
import org.hibernate.sql.ast.tree.cte.CteStatement;
|
import org.hibernate.sql.ast.tree.cte.CteStatement;
|
||||||
|
import org.hibernate.sql.ast.tree.expression.CaseSearchedExpression;
|
||||||
|
import org.hibernate.sql.ast.tree.expression.CaseSimpleExpression;
|
||||||
import org.hibernate.sql.ast.tree.expression.Expression;
|
import org.hibernate.sql.ast.tree.expression.Expression;
|
||||||
import org.hibernate.sql.ast.tree.expression.Literal;
|
import org.hibernate.sql.ast.tree.expression.Literal;
|
||||||
import org.hibernate.sql.ast.tree.expression.SqlTuple;
|
import org.hibernate.sql.ast.tree.expression.SqlTuple;
|
||||||
|
@ -36,6 +40,58 @@ public class SybaseAnywhereSqlAstTranslator<T extends JdbcOperation> extends Abs
|
||||||
super( sessionFactory, statement );
|
super( sessionFactory, statement );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Sybase Anywhere does not allow CASE expressions where all result arms contain plain parameters.
|
||||||
|
// At least one result arm must provide some type context for inference,
|
||||||
|
// so we cast the first result arm if we encounter this condition
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void visitAnsiCaseSearchedExpression(
|
||||||
|
CaseSearchedExpression caseSearchedExpression,
|
||||||
|
Consumer<Expression> resultRenderer) {
|
||||||
|
if ( getParameterRenderingMode() == SqlAstNodeRenderingMode.DEFAULT && areAllResultsParameters( caseSearchedExpression ) ) {
|
||||||
|
final List<CaseSearchedExpression.WhenFragment> whenFragments = caseSearchedExpression.getWhenFragments();
|
||||||
|
final Expression firstResult = whenFragments.get( 0 ).getResult();
|
||||||
|
super.visitAnsiCaseSearchedExpression(
|
||||||
|
caseSearchedExpression,
|
||||||
|
e -> {
|
||||||
|
if ( e == firstResult ) {
|
||||||
|
renderCasted( e );
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
resultRenderer.accept( e );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
super.visitAnsiCaseSearchedExpression( caseSearchedExpression, resultRenderer );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void visitAnsiCaseSimpleExpression(
|
||||||
|
CaseSimpleExpression caseSimpleExpression,
|
||||||
|
Consumer<Expression> resultRenderer) {
|
||||||
|
if ( getParameterRenderingMode() == SqlAstNodeRenderingMode.DEFAULT && areAllResultsParameters( caseSimpleExpression ) ) {
|
||||||
|
final List<CaseSimpleExpression.WhenFragment> whenFragments = caseSimpleExpression.getWhenFragments();
|
||||||
|
final Expression firstResult = whenFragments.get( 0 ).getResult();
|
||||||
|
super.visitAnsiCaseSimpleExpression(
|
||||||
|
caseSimpleExpression,
|
||||||
|
e -> {
|
||||||
|
if ( e == firstResult ) {
|
||||||
|
renderCasted( e );
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
resultRenderer.accept( e );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
super.visitAnsiCaseSimpleExpression( caseSimpleExpression, resultRenderer );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected boolean renderTableReference(TableReference tableReference, LockMode lockMode) {
|
protected boolean renderTableReference(TableReference tableReference, LockMode lockMode) {
|
||||||
super.renderTableReference( tableReference, lockMode );
|
super.renderTableReference( tableReference, lockMode );
|
||||||
|
|
|
@ -178,7 +178,7 @@ public class TeradataDialect extends Dialect {
|
||||||
public String timestampdiffPattern(TemporalUnit unit, TemporalType fromTemporalType, TemporalType toTemporalType) {
|
public String timestampdiffPattern(TemporalUnit unit, TemporalType fromTemporalType, TemporalType toTemporalType) {
|
||||||
StringBuilder pattern = new StringBuilder();
|
StringBuilder pattern = new StringBuilder();
|
||||||
//TODO: TOTALLY UNTESTED CODE!
|
//TODO: TOTALLY UNTESTED CODE!
|
||||||
pattern.append("cast((?3 - ?2) ");
|
pattern.append("cast((?3-?2) ");
|
||||||
switch (unit) {
|
switch (unit) {
|
||||||
case NANOSECOND:
|
case NANOSECOND:
|
||||||
case NATIVE:
|
case NATIVE:
|
||||||
|
@ -214,15 +214,15 @@ public class TeradataDialect extends Dialect {
|
||||||
//TODO: TOTALLY UNTESTED CODE!
|
//TODO: TOTALLY UNTESTED CODE!
|
||||||
switch ( unit ) {
|
switch ( unit ) {
|
||||||
case NANOSECOND:
|
case NANOSECOND:
|
||||||
return "(?3 + (?2)/1e9 * interval '1' second)";
|
return "(?3+(?2)/1e9*interval '1' second)";
|
||||||
case NATIVE:
|
case NATIVE:
|
||||||
return "(?3 + (?2) * interval '1' second)";
|
return "(?3+(?2)*interval '1' second)";
|
||||||
case QUARTER:
|
case QUARTER:
|
||||||
return "(?3 + (?2) * interval '3' month)";
|
return "(?3+(?2)*interval '3' month)";
|
||||||
case WEEK:
|
case WEEK:
|
||||||
return "(?3 + (?2) * interval '7' day)";
|
return "(?3+(?2)*interval '7' day)";
|
||||||
default:
|
default:
|
||||||
return "(?3 + (?2) * interval '1' ?1)";
|
return "(?3+(?2)*interval '1' ?1)";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -238,6 +238,7 @@ public class TeradataDialect extends Dialect {
|
||||||
CommonFunctionFactory.substring_substr( queryEngine );
|
CommonFunctionFactory.substring_substr( queryEngine );
|
||||||
//also natively supports ANSI-style substring()
|
//also natively supports ANSI-style substring()
|
||||||
CommonFunctionFactory.position( queryEngine );
|
CommonFunctionFactory.position( queryEngine );
|
||||||
|
CommonFunctionFactory.bitLength_pattern( queryEngine, "octet_length(cast(?1 as char))*4" );
|
||||||
|
|
||||||
queryEngine.getSqmFunctionRegistry().patternDescriptorBuilder( "mod", "(?1 mod ?2)" )
|
queryEngine.getSqmFunctionRegistry().patternDescriptorBuilder( "mod", "(?1 mod ?2)" )
|
||||||
.setInvariantType( StandardBasicTypes.STRING )
|
.setInvariantType( StandardBasicTypes.STRING )
|
||||||
|
|
|
@ -145,8 +145,8 @@ public class TimesTenDialect extends Dialect {
|
||||||
queryEngine.getSqmFunctionRegistry().registerBinaryTernaryPattern(
|
queryEngine.getSqmFunctionRegistry().registerBinaryTernaryPattern(
|
||||||
"locate",
|
"locate",
|
||||||
StandardBasicTypes.INTEGER,
|
StandardBasicTypes.INTEGER,
|
||||||
"instr(?2, ?1)",
|
"instr(?2,?1)",
|
||||||
"instr(?2, ?1, ?3)"
|
"instr(?2,?1,?3)"
|
||||||
).setArgumentListSignature("(pattern, string[, start])");
|
).setArgumentListSignature("(pattern, string[, start])");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -166,9 +166,9 @@ public class TimesTenDialect extends Dialect {
|
||||||
switch (unit) {
|
switch (unit) {
|
||||||
case NANOSECOND:
|
case NANOSECOND:
|
||||||
case NATIVE:
|
case NATIVE:
|
||||||
return "timestampadd(sql_tsi_frac_second, ?2, ?3)";
|
return "timestampadd(sql_tsi_frac_second,?2,?3)";
|
||||||
default:
|
default:
|
||||||
return "timestampadd(sql_tsi_?1, ?2, ?3)";
|
return "timestampadd(sql_tsi_?1,?2,?3)";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -177,9 +177,9 @@ public class TimesTenDialect extends Dialect {
|
||||||
switch (unit) {
|
switch (unit) {
|
||||||
case NANOSECOND:
|
case NANOSECOND:
|
||||||
case NATIVE:
|
case NATIVE:
|
||||||
return "timestampdiff(sql_tsi_frac_second, ?2, ?3)";
|
return "timestampdiff(sql_tsi_frac_second,?2,?3)";
|
||||||
default:
|
default:
|
||||||
return "timestampdiff(sql_tsi_?1, ?2, ?3)";
|
return "timestampdiff(sql_tsi_?1,?2,?3)";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -30,7 +30,7 @@ public final class RDMSSequenceSupport implements SequenceSupport {
|
||||||
@Override
|
@Override
|
||||||
public String getFromDual() {
|
public String getFromDual() {
|
||||||
// The where clause was added to eliminate this statement from Brute Force Searches.
|
// The where clause was added to eliminate this statement from Brute Force Searches.
|
||||||
return " from rdms.rdms_dummy where key_col = 1";
|
return " from rdms.rdms_dummy where key_col=1";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -46,6 +46,8 @@ public class InformixDialectTestCase extends BaseUnitTestCase {
|
||||||
|
|
||||||
ssr = new StandardServiceRegistryBuilder().build();
|
ssr = new StandardServiceRegistryBuilder().build();
|
||||||
queryEngine = new QueryEngine(
|
queryEngine = new QueryEngine(
|
||||||
|
null,
|
||||||
|
null,
|
||||||
jpaMetamodel,
|
jpaMetamodel,
|
||||||
ValueHandlingMode.BIND,
|
ValueHandlingMode.BIND,
|
||||||
dialect.getPreferredSqlTypeCodeForBoolean(),
|
dialect.getPreferredSqlTypeCodeForBoolean(),
|
||||||
|
|
|
@ -819,8 +819,8 @@ public abstract class AbstractHANADialect extends Dialect {
|
||||||
queryEngine.getSqmFunctionRegistry().registerBinaryTernaryPattern(
|
queryEngine.getSqmFunctionRegistry().registerBinaryTernaryPattern(
|
||||||
"locate",
|
"locate",
|
||||||
StandardBasicTypes.INTEGER,
|
StandardBasicTypes.INTEGER,
|
||||||
"locate(?2, ?1)",
|
"locate(?2,?1)",
|
||||||
"locate(?2, ?1, ?3)"
|
"locate(?2,?1,?3)"
|
||||||
).setArgumentListSignature("(pattern, string[, start])");
|
).setArgumentListSignature("(pattern, string[, start])");
|
||||||
|
|
||||||
CommonFunctionFactory.ceiling_ceil( queryEngine );
|
CommonFunctionFactory.ceiling_ceil( queryEngine );
|
||||||
|
@ -846,6 +846,7 @@ public abstract class AbstractHANADialect extends Dialect {
|
||||||
CommonFunctionFactory.format_toVarchar( queryEngine );
|
CommonFunctionFactory.format_toVarchar( queryEngine );
|
||||||
CommonFunctionFactory.currentUtcdatetimetimestamp( queryEngine );
|
CommonFunctionFactory.currentUtcdatetimetimestamp( queryEngine );
|
||||||
CommonFunctionFactory.everyAny_sumCaseCase( queryEngine );
|
CommonFunctionFactory.everyAny_sumCaseCase( queryEngine );
|
||||||
|
CommonFunctionFactory.bitLength_pattern( queryEngine, "length(to_binary(?1))*8" );
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -1421,7 +1422,7 @@ public abstract class AbstractHANADialect extends Dialect {
|
||||||
conn = connectionProvider.getConnection();
|
conn = connectionProvider.getConnection();
|
||||||
try ( Statement statement = conn.createStatement() ) {
|
try ( Statement statement = conn.createStatement() ) {
|
||||||
try ( ResultSet rs = statement.executeQuery(
|
try ( ResultSet rs = statement.executeQuery(
|
||||||
"SELECT TOP 1 VALUE, MAP(LAYER_NAME, 'DEFAULT', 1, 'SYSTEM', 2, 'DATABASE', 3, 4) AS LAYER FROM SYS.M_INIFILE_CONTENTS WHERE FILE_NAME='indexserver.ini' AND SECTION='session' AND KEY='max_lob_prefetch_size' ORDER BY LAYER DESC" ) ) {
|
"SELECT TOP 1 VALUE,MAP(LAYER_NAME,'DEFAULT',1,'SYSTEM',2,'DATABASE',3,4) AS LAYER FROM SYS.M_INIFILE_CONTENTS WHERE FILE_NAME='indexserver.ini' AND SECTION='session' AND KEY='max_lob_prefetch_size' ORDER BY LAYER DESC" ) ) {
|
||||||
// This only works if the current user has the privilege INIFILE ADMIN
|
// This only works if the current user has the privilege INIFILE ADMIN
|
||||||
if ( rs.next() ) {
|
if ( rs.next() ) {
|
||||||
maxLobPrefetchSizeDefault = rs.getInt( 1 );
|
maxLobPrefetchSizeDefault = rs.getInt( 1 );
|
||||||
|
@ -1645,21 +1646,21 @@ public abstract class AbstractHANADialect extends Dialect {
|
||||||
case NANOSECOND:
|
case NANOSECOND:
|
||||||
case NATIVE:
|
case NATIVE:
|
||||||
if ( temporalType == TemporalType.TIME ) {
|
if ( temporalType == TemporalType.TIME ) {
|
||||||
return "cast(add_nano100('1970-01-01 '||(?3), ?2) as time)";
|
return "cast(add_nano100('1970-01-01 '||(?3),?2) as time)";
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
return "add_nano100(?3, ?2)";
|
return "add_nano100(?3,?2)";
|
||||||
}
|
}
|
||||||
case QUARTER:
|
case QUARTER:
|
||||||
return "add_months(?3, 3*?2)";
|
return "add_months(?3,3*?2)";
|
||||||
case WEEK:
|
case WEEK:
|
||||||
return "add_days(?3, 7*?2)";
|
return "add_days(?3,7*?2)";
|
||||||
case MINUTE:
|
case MINUTE:
|
||||||
return "add_seconds(?3, 60*?2)";
|
return "add_seconds(?3,60*?2)";
|
||||||
case HOUR:
|
case HOUR:
|
||||||
return "add_seconds(?3, 3600*?2)";
|
return "add_seconds(?3,3600*?2)";
|
||||||
default:
|
default:
|
||||||
return "add_?1s(?3, ?2)";
|
return "add_?1s(?3,?2)";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1672,18 +1673,18 @@ public abstract class AbstractHANADialect extends Dialect {
|
||||||
// return "nano100_between(cast(?3 as timestamp), cast(?2 as timestamp))";
|
// return "nano100_between(cast(?3 as timestamp), cast(?2 as timestamp))";
|
||||||
// }
|
// }
|
||||||
// else {
|
// else {
|
||||||
return "nano100_between(?2, ?3)";
|
return "nano100_between(?2,?3)";
|
||||||
// }
|
// }
|
||||||
case QUARTER:
|
case QUARTER:
|
||||||
return "months_between(?2, ?3)/3";
|
return "months_between(?2,?3)/3";
|
||||||
case WEEK:
|
case WEEK:
|
||||||
return "days_between(?2, ?3)/7";
|
return "days_between(?2,?3)/7";
|
||||||
case MINUTE:
|
case MINUTE:
|
||||||
return "seconds_between(?2, ?3)/60";
|
return "seconds_between(?2,?3)/60";
|
||||||
case HOUR:
|
case HOUR:
|
||||||
return "seconds_between(?2, ?3)/3600";
|
return "seconds_between(?2,?3)/3600";
|
||||||
default:
|
default:
|
||||||
return "?1s_between(?2, ?3)";
|
return "?1s_between(?2,?3)";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -229,7 +229,7 @@ public class CockroachDialect extends Dialect {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getQuerySequencesString() {
|
public String getQuerySequencesString() {
|
||||||
return "select sequence_name, sequence_schema, sequence_catalog, start_value, minimum_value, maximum_value, increment from information_schema.sequences";
|
return "select sequence_name,sequence_schema,sequence_catalog,start_value,minimum_value,maximum_value,increment from information_schema.sequences";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -305,15 +305,15 @@ public class CockroachDialect extends Dialect {
|
||||||
public String timestampaddPattern(TemporalUnit unit, TemporalType temporalType) {
|
public String timestampaddPattern(TemporalUnit unit, TemporalType temporalType) {
|
||||||
switch ( unit ) {
|
switch ( unit ) {
|
||||||
case NANOSECOND:
|
case NANOSECOND:
|
||||||
return "(?3 + (?2)/1e3 * interval '1 microsecond')";
|
return "(?3+(?2)/1e3*interval '1 microsecond')";
|
||||||
case NATIVE:
|
case NATIVE:
|
||||||
return "(?3 + (?2) * interval '1 microsecond')";
|
return "(?3+(?2)*interval '1 microsecond')";
|
||||||
case QUARTER: //quarter is not supported in interval literals
|
case QUARTER: //quarter is not supported in interval literals
|
||||||
return "(?3 + (?2) * interval '3 month')";
|
return "(?3+(?2)*interval '3 month')";
|
||||||
case WEEK: //week is not supported in interval literals
|
case WEEK: //week is not supported in interval literals
|
||||||
return "(?3 + (?2) * interval '7 day')";
|
return "(?3+(?2)*interval '7 day')";
|
||||||
default:
|
default:
|
||||||
return "(?3 + (?2) * interval '1 ?1')";
|
return "(?3+(?2)*interval '1 ?1')";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -13,6 +13,7 @@ import org.hibernate.sql.ast.tree.cte.CteStatement;
|
||||||
import org.hibernate.sql.ast.tree.expression.Expression;
|
import org.hibernate.sql.ast.tree.expression.Expression;
|
||||||
import org.hibernate.sql.ast.tree.expression.Literal;
|
import org.hibernate.sql.ast.tree.expression.Literal;
|
||||||
import org.hibernate.sql.ast.tree.expression.Summarization;
|
import org.hibernate.sql.ast.tree.expression.Summarization;
|
||||||
|
import org.hibernate.sql.ast.tree.predicate.BooleanExpressionPredicate;
|
||||||
import org.hibernate.sql.ast.tree.select.QueryGroup;
|
import org.hibernate.sql.ast.tree.select.QueryGroup;
|
||||||
import org.hibernate.sql.ast.tree.select.QueryPart;
|
import org.hibernate.sql.ast.tree.select.QueryPart;
|
||||||
import org.hibernate.sql.ast.tree.select.QuerySpec;
|
import org.hibernate.sql.ast.tree.select.QuerySpec;
|
||||||
|
@ -29,6 +30,16 @@ public class CockroachSqlAstTranslator<T extends JdbcOperation> extends Abstract
|
||||||
super( sessionFactory, statement );
|
super( sessionFactory, statement );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void renderExpressionAsClauseItem(Expression expression) {
|
||||||
|
expression.accept( this );
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void visitBooleanExpressionPredicate(BooleanExpressionPredicate booleanExpressionPredicate) {
|
||||||
|
booleanExpressionPredicate.getExpression().accept( this );
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected String getForShare() {
|
protected String getForShare() {
|
||||||
return " for share";
|
return " for share";
|
||||||
|
|
|
@ -42,8 +42,8 @@ public class DB2400V7R3Dialect extends DB2400Dialect {
|
||||||
@Override
|
@Override
|
||||||
public String getQuerySequencesString() {
|
public String getQuerySequencesString() {
|
||||||
return "select distinct sequence_name from qsys2.syssequences " +
|
return "select distinct sequence_name from qsys2.syssequences " +
|
||||||
"where ( current_schema = '*LIBL' and sequence_schema in ( select schema_name from qsys2.library_list_info ) ) " +
|
"where current_schema='*LIBL' and sequence_schema in (select schema_name from qsys2.library_list_info) " +
|
||||||
"or sequence_schema = current_schema";
|
"or sequence_schema=current_schema";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -53,8 +53,8 @@ public class DB2400V7R3Dialect extends DB2400Dialect {
|
||||||
return sql + " fetch first " + limit + " rows only";
|
return sql + " fetch first " + limit + " rows only";
|
||||||
}
|
}
|
||||||
//nest the main query in an outer select
|
//nest the main query in an outer select
|
||||||
return "select * from ( select inner2_.*, rownumber() over(order by order of inner2_) as rownumber_ from ( "
|
return "select * from (select inner2_.*,rownumber() over(order by order of inner2_) as rownumber_ from ("
|
||||||
+ sql + " fetch first " + limit + " rows only ) as inner2_ ) as inner1_ where rownumber_ > "
|
+ sql + " fetch first " + limit + " rows only) as inner2_) as inner1_ where rownumber_>"
|
||||||
+ offset + " order by rownumber_";
|
+ offset + " order by rownumber_";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -190,6 +190,7 @@ public class DB2Dialect extends Dialect {
|
||||||
CommonFunctionFactory.addYearsMonthsDaysHoursMinutesSeconds( queryEngine );
|
CommonFunctionFactory.addYearsMonthsDaysHoursMinutesSeconds( queryEngine );
|
||||||
CommonFunctionFactory.yearsMonthsDaysHoursMinutesSecondsBetween( queryEngine );
|
CommonFunctionFactory.yearsMonthsDaysHoursMinutesSecondsBetween( queryEngine );
|
||||||
CommonFunctionFactory.dateTrunc( queryEngine );
|
CommonFunctionFactory.dateTrunc( queryEngine );
|
||||||
|
CommonFunctionFactory.bitLength_pattern( queryEngine, "length(?1)*8" );
|
||||||
|
|
||||||
queryEngine.getSqmFunctionRegistry().register( "format", new DB2FormatEmulation() );
|
queryEngine.getSqmFunctionRegistry().register( "format", new DB2FormatEmulation() );
|
||||||
|
|
||||||
|
@ -417,7 +418,7 @@ public class DB2Dialect extends Dialect {
|
||||||
default:
|
default:
|
||||||
literal = "0";
|
literal = "0";
|
||||||
}
|
}
|
||||||
return "nullif(" + literal + ", " + literal + ')';
|
return "nullif(" + literal + "," + literal + ')';
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -657,7 +658,7 @@ public class DB2Dialect extends Dialect {
|
||||||
|
|
||||||
return String.format(
|
return String.format(
|
||||||
Locale.ENGLISH,
|
Locale.ENGLISH,
|
||||||
"case when %s is null then %s else %s end, %s %s",
|
"case when %s is null then %s else %s end,%s %s",
|
||||||
expression,
|
expression,
|
||||||
nullPrecedence == NullPrecedence.FIRST ? "0" : "1",
|
nullPrecedence == NullPrecedence.FIRST ? "0" : "1",
|
||||||
nullPrecedence == NullPrecedence.FIRST ? "1" : "0",
|
nullPrecedence == NullPrecedence.FIRST ? "1" : "0",
|
||||||
|
|
|
@ -7,16 +7,20 @@
|
||||||
package org.hibernate.dialect;
|
package org.hibernate.dialect;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.function.Consumer;
|
||||||
|
|
||||||
import org.hibernate.query.FetchClauseType;
|
import org.hibernate.query.FetchClauseType;
|
||||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||||
import org.hibernate.query.ComparisonOperator;
|
import org.hibernate.query.ComparisonOperator;
|
||||||
import org.hibernate.query.sqm.sql.internal.SqmParameterInterpretation;
|
import org.hibernate.query.sqm.sql.internal.SqmParameterInterpretation;
|
||||||
|
import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
|
||||||
import org.hibernate.sql.ast.spi.AbstractSqlAstTranslator;
|
import org.hibernate.sql.ast.spi.AbstractSqlAstTranslator;
|
||||||
import org.hibernate.sql.ast.spi.SqlSelection;
|
import org.hibernate.sql.ast.spi.SqlSelection;
|
||||||
import org.hibernate.sql.ast.tree.MutationStatement;
|
import org.hibernate.sql.ast.tree.MutationStatement;
|
||||||
import org.hibernate.sql.ast.tree.Statement;
|
import org.hibernate.sql.ast.tree.Statement;
|
||||||
import org.hibernate.sql.ast.tree.delete.DeleteStatement;
|
import org.hibernate.sql.ast.tree.delete.DeleteStatement;
|
||||||
|
import org.hibernate.sql.ast.tree.expression.CaseSearchedExpression;
|
||||||
|
import org.hibernate.sql.ast.tree.expression.CaseSimpleExpression;
|
||||||
import org.hibernate.sql.ast.tree.expression.ColumnReference;
|
import org.hibernate.sql.ast.tree.expression.ColumnReference;
|
||||||
import org.hibernate.sql.ast.tree.expression.Expression;
|
import org.hibernate.sql.ast.tree.expression.Expression;
|
||||||
import org.hibernate.sql.ast.tree.expression.JdbcParameter;
|
import org.hibernate.sql.ast.tree.expression.JdbcParameter;
|
||||||
|
@ -25,6 +29,7 @@ import org.hibernate.sql.ast.tree.expression.NullnessLiteral;
|
||||||
import org.hibernate.sql.ast.tree.expression.SqlTuple;
|
import org.hibernate.sql.ast.tree.expression.SqlTuple;
|
||||||
import org.hibernate.sql.ast.tree.expression.Summarization;
|
import org.hibernate.sql.ast.tree.expression.Summarization;
|
||||||
import org.hibernate.sql.ast.tree.insert.InsertStatement;
|
import org.hibernate.sql.ast.tree.insert.InsertStatement;
|
||||||
|
import org.hibernate.sql.ast.tree.predicate.BooleanExpressionPredicate;
|
||||||
import org.hibernate.sql.ast.tree.select.QueryGroup;
|
import org.hibernate.sql.ast.tree.select.QueryGroup;
|
||||||
import org.hibernate.sql.ast.tree.select.QueryPart;
|
import org.hibernate.sql.ast.tree.select.QueryPart;
|
||||||
import org.hibernate.sql.ast.tree.select.QuerySpec;
|
import org.hibernate.sql.ast.tree.select.QuerySpec;
|
||||||
|
@ -42,6 +47,73 @@ public class DB2SqlAstTranslator<T extends JdbcOperation> extends AbstractSqlAst
|
||||||
super( sessionFactory, statement );
|
super( sessionFactory, statement );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void renderExpressionAsClauseItem(Expression expression) {
|
||||||
|
expression.accept( this );
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void visitBooleanExpressionPredicate(BooleanExpressionPredicate booleanExpressionPredicate) {
|
||||||
|
if ( getDialect().getVersion() >= 1100 ) {
|
||||||
|
booleanExpressionPredicate.getExpression().accept( this );
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
super.visitBooleanExpressionPredicate( booleanExpressionPredicate );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// DB2 does not allow CASE expressions where all result arms contain plain parameters.
|
||||||
|
// At least one result arm must provide some type context for inference,
|
||||||
|
// so we cast the first result arm if we encounter this condition
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void visitAnsiCaseSearchedExpression(
|
||||||
|
CaseSearchedExpression caseSearchedExpression,
|
||||||
|
Consumer<Expression> resultRenderer) {
|
||||||
|
if ( getParameterRenderingMode() == SqlAstNodeRenderingMode.DEFAULT && areAllResultsParameters( caseSearchedExpression ) ) {
|
||||||
|
final List<CaseSearchedExpression.WhenFragment> whenFragments = caseSearchedExpression.getWhenFragments();
|
||||||
|
final Expression firstResult = whenFragments.get( 0 ).getResult();
|
||||||
|
super.visitAnsiCaseSearchedExpression(
|
||||||
|
caseSearchedExpression,
|
||||||
|
e -> {
|
||||||
|
if ( e == firstResult ) {
|
||||||
|
renderCasted( e );
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
resultRenderer.accept( e );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
super.visitAnsiCaseSearchedExpression( caseSearchedExpression, resultRenderer );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void visitAnsiCaseSimpleExpression(
|
||||||
|
CaseSimpleExpression caseSimpleExpression,
|
||||||
|
Consumer<Expression> resultRenderer) {
|
||||||
|
if ( getParameterRenderingMode() == SqlAstNodeRenderingMode.DEFAULT && areAllResultsParameters( caseSimpleExpression ) ) {
|
||||||
|
final List<CaseSimpleExpression.WhenFragment> whenFragments = caseSimpleExpression.getWhenFragments();
|
||||||
|
final Expression firstResult = whenFragments.get( 0 ).getResult();
|
||||||
|
super.visitAnsiCaseSimpleExpression(
|
||||||
|
caseSimpleExpression,
|
||||||
|
e -> {
|
||||||
|
if ( e == firstResult ) {
|
||||||
|
renderCasted( e );
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
resultRenderer.accept( e );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
super.visitAnsiCaseSimpleExpression( caseSimpleExpression, resultRenderer );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected String getForUpdate() {
|
protected String getForUpdate() {
|
||||||
return " for read only with rs use and keep update locks";
|
return " for read only with rs use and keep update locks";
|
||||||
|
@ -146,7 +218,7 @@ public class DB2SqlAstTranslator<T extends JdbcOperation> extends AbstractSqlAst
|
||||||
for ( int i = 0; i < size; i++ ) {
|
for ( int i = 0; i < size; i++ ) {
|
||||||
appendSql( separator );
|
appendSql( separator );
|
||||||
appendSql( returningColumns.get( i ).getColumnExpression() );
|
appendSql( returningColumns.get( i ).getColumnExpression() );
|
||||||
separator = ", ";
|
separator = ",";
|
||||||
}
|
}
|
||||||
if ( statement instanceof DeleteStatement ) {
|
if ( statement instanceof DeleteStatement ) {
|
||||||
appendSql( " from old table (" );
|
appendSql( " from old table (" );
|
||||||
|
|
|
@ -79,8 +79,8 @@ public class DB2iDialect extends DB2Dialect {
|
||||||
public String getQuerySequencesString() {
|
public String getQuerySequencesString() {
|
||||||
if ( getIVersion() >= 730 ) {
|
if ( getIVersion() >= 730 ) {
|
||||||
return "select distinct sequence_name from qsys2.syssequences " +
|
return "select distinct sequence_name from qsys2.syssequences " +
|
||||||
"where ( current_schema = '*LIBL' and sequence_schema in ( select schema_name from qsys2.library_list_info ) ) " +
|
"where current_schema='*LIBL' and sequence_schema in (select schema_name from qsys2.library_list_info) " +
|
||||||
"or sequence_schema = current_schema";
|
"or sequence_schema=current_schema";
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
return null;
|
return null;
|
||||||
|
|
|
@ -11,6 +11,7 @@ import org.hibernate.NotYetImplementedFor6Exception;
|
||||||
import org.hibernate.boot.TempTableDdlTransactionHandling;
|
import org.hibernate.boot.TempTableDdlTransactionHandling;
|
||||||
import org.hibernate.cfg.Environment;
|
import org.hibernate.cfg.Environment;
|
||||||
import org.hibernate.dialect.function.CommonFunctionFactory;
|
import org.hibernate.dialect.function.CommonFunctionFactory;
|
||||||
|
import org.hibernate.dialect.function.DerbyConcatFunction;
|
||||||
import org.hibernate.dialect.function.DerbyLpadEmulation;
|
import org.hibernate.dialect.function.DerbyLpadEmulation;
|
||||||
import org.hibernate.dialect.function.DerbyRpadEmulation;
|
import org.hibernate.dialect.function.DerbyRpadEmulation;
|
||||||
import org.hibernate.dialect.function.CaseLeastGreatestEmulation;
|
import org.hibernate.dialect.function.CaseLeastGreatestEmulation;
|
||||||
|
@ -183,6 +184,9 @@ public class DerbyDialect extends Dialect {
|
||||||
public void initializeFunctionRegistry(QueryEngine queryEngine) {
|
public void initializeFunctionRegistry(QueryEngine queryEngine) {
|
||||||
super.initializeFunctionRegistry( queryEngine );
|
super.initializeFunctionRegistry( queryEngine );
|
||||||
|
|
||||||
|
// Derby needs an actual argument type for aggregates like SUM, AVG, MIN, MAX to determine the result type
|
||||||
|
CommonFunctionFactory.aggregates( queryEngine, SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER );
|
||||||
|
|
||||||
CommonFunctionFactory.concat_pipeOperator( queryEngine );
|
CommonFunctionFactory.concat_pipeOperator( queryEngine );
|
||||||
CommonFunctionFactory.cot( queryEngine );
|
CommonFunctionFactory.cot( queryEngine );
|
||||||
CommonFunctionFactory.chr_char( queryEngine );
|
CommonFunctionFactory.chr_char( queryEngine );
|
||||||
|
@ -209,6 +213,8 @@ public class DerbyDialect extends Dialect {
|
||||||
.setExactArgumentCount( 2 )
|
.setExactArgumentCount( 2 )
|
||||||
.register();
|
.register();
|
||||||
|
|
||||||
|
queryEngine.getSqmFunctionRegistry().register( "concat", new DerbyConcatFunction() );
|
||||||
|
|
||||||
//no way I can see to pad with anything other than spaces
|
//no way I can see to pad with anything other than spaces
|
||||||
queryEngine.getSqmFunctionRegistry().register( "lpad", new DerbyLpadEmulation() );
|
queryEngine.getSqmFunctionRegistry().register( "lpad", new DerbyLpadEmulation() );
|
||||||
queryEngine.getSqmFunctionRegistry().register( "rpad", new DerbyRpadEmulation() );
|
queryEngine.getSqmFunctionRegistry().register( "rpad", new DerbyRpadEmulation() );
|
||||||
|
@ -252,14 +258,14 @@ public class DerbyDialect extends Dialect {
|
||||||
case DAY_OF_MONTH:
|
case DAY_OF_MONTH:
|
||||||
return "day(?2)";
|
return "day(?2)";
|
||||||
case DAY_OF_YEAR:
|
case DAY_OF_YEAR:
|
||||||
return "({fn timestampdiff(sql_tsi_day, date(char(year(?2),4)||'-01-01'),?2)}+1)";
|
return "({fn timestampdiff(sql_tsi_day,date(char(year(?2),4)||'-01-01'),?2)}+1)";
|
||||||
case DAY_OF_WEEK:
|
case DAY_OF_WEEK:
|
||||||
// Use the approach as outlined here: https://stackoverflow.com/questions/36357013/day-of-week-from-seconds-since-epoch
|
// Use the approach as outlined here: https://stackoverflow.com/questions/36357013/day-of-week-from-seconds-since-epoch
|
||||||
return "(mod(mod({fn timestampdiff(sql_tsi_day, {d '1970-01-01'}, ?2)}+4,7)+7,7)+1)";
|
return "(mod(mod({fn timestampdiff(sql_tsi_day,{d '1970-01-01'},?2)}+4,7)+7,7)+1)";
|
||||||
case WEEK:
|
case WEEK:
|
||||||
// Use the approach as outlined here: https://www.sqlservercentral.com/articles/a-simple-formula-to-calculate-the-iso-week-number
|
// Use the approach as outlined here: https://www.sqlservercentral.com/articles/a-simple-formula-to-calculate-the-iso-week-number
|
||||||
// In SQL Server terms this is (DATEPART(dy,DATEADD(dd,DATEDIFF(dd,'17530101',@SomeDate)/7*7,'17530104'))+6)/7
|
// In SQL Server terms this is (DATEPART(dy,DATEADD(dd,DATEDIFF(dd,'17530101',@SomeDate)/7*7,'17530104'))+6)/7
|
||||||
return "(({fn timestampdiff(sql_tsi_day, date(char(year(?2),4)||'-01-01'),{fn timestampadd(sql_tsi_day, {fn timestampdiff(sql_tsi_day, {d '1753-01-01'}, ?2)}/7*7, {d '1753-01-04'})})}+7)/7)";
|
return "(({fn timestampdiff(sql_tsi_day,date(char(year(?2),4)||'-01-01'),{fn timestampadd(sql_tsi_day,{fn timestampdiff(sql_tsi_day,{d '1753-01-01'},?2)}/7*7,{d '1753-01-04'})})}+7)/7)";
|
||||||
case QUARTER:
|
case QUARTER:
|
||||||
return "((month(?2)+2)/3)";
|
return "((month(?2)+2)/3)";
|
||||||
default:
|
default:
|
||||||
|
@ -320,9 +326,9 @@ public class DerbyDialect extends Dialect {
|
||||||
switch (unit) {
|
switch (unit) {
|
||||||
case NANOSECOND:
|
case NANOSECOND:
|
||||||
case NATIVE:
|
case NATIVE:
|
||||||
return "{fn timestampadd(sql_tsi_frac_second, mod(bigint(?2),1000000000), {fn timestampadd(sql_tsi_second, bigint((?2)/1000000000), ?3)})}";
|
return "{fn timestampadd(sql_tsi_frac_second,mod(bigint(?2),1000000000),{fn timestampadd(sql_tsi_second,bigint((?2)/1000000000),?3)})}";
|
||||||
default:
|
default:
|
||||||
return "{fn timestampadd(sql_tsi_?1, bigint(?2), ?3)}";
|
return "{fn timestampadd(sql_tsi_?1,bigint(?2),?3)}";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -331,9 +337,9 @@ public class DerbyDialect extends Dialect {
|
||||||
switch (unit) {
|
switch (unit) {
|
||||||
case NANOSECOND:
|
case NANOSECOND:
|
||||||
case NATIVE:
|
case NATIVE:
|
||||||
return "{fn timestampdiff(sql_tsi_frac_second, ?2, ?3)}";
|
return "{fn timestampdiff(sql_tsi_frac_second,?2,?3)}";
|
||||||
default:
|
default:
|
||||||
return "{fn timestampdiff(sql_tsi_?1, ?2, ?3)}";
|
return "{fn timestampdiff(sql_tsi_?1,?2,?3)}";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -368,7 +374,7 @@ public class DerbyDialect extends Dialect {
|
||||||
public String getQuerySequencesString() {
|
public String getQuerySequencesString() {
|
||||||
return getVersion() < 1060
|
return getVersion() < 1060
|
||||||
? null
|
? null
|
||||||
: "select sys.sysschemas.schemaname as sequence_schema, sys.syssequences.* from sys.syssequences left join sys.sysschemas on sys.syssequences.schemaid = sys.sysschemas.schemaid";
|
: "select sys.sysschemas.schemaname as sequence_schema,sys.syssequences.* from sys.syssequences left join sys.sysschemas on sys.syssequences.schemaid=sys.sysschemas.schemaid";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -7,24 +7,24 @@
|
||||||
package org.hibernate.dialect;
|
package org.hibernate.dialect;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.function.Consumer;
|
||||||
|
|
||||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||||
import org.hibernate.query.ComparisonOperator;
|
import org.hibernate.query.ComparisonOperator;
|
||||||
import org.hibernate.query.sqm.sql.internal.SqmParameterInterpretation;
|
import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
|
||||||
import org.hibernate.sql.ast.SqlAstWalker;
|
|
||||||
import org.hibernate.sql.ast.spi.AbstractSqlAstTranslator;
|
import org.hibernate.sql.ast.spi.AbstractSqlAstTranslator;
|
||||||
import org.hibernate.sql.ast.spi.SqlSelection;
|
import org.hibernate.sql.ast.spi.SqlSelection;
|
||||||
import org.hibernate.sql.ast.tree.Statement;
|
import org.hibernate.sql.ast.tree.Statement;
|
||||||
import org.hibernate.sql.ast.tree.cte.CteContainer;
|
import org.hibernate.sql.ast.tree.cte.CteContainer;
|
||||||
import org.hibernate.sql.ast.tree.cte.CteStatement;
|
import org.hibernate.sql.ast.tree.cte.CteStatement;
|
||||||
import org.hibernate.sql.ast.tree.expression.CaseSearchedExpression;
|
import org.hibernate.sql.ast.tree.expression.CaseSearchedExpression;
|
||||||
|
import org.hibernate.sql.ast.tree.expression.CaseSimpleExpression;
|
||||||
import org.hibernate.sql.ast.tree.expression.Expression;
|
import org.hibernate.sql.ast.tree.expression.Expression;
|
||||||
import org.hibernate.sql.ast.tree.expression.JdbcParameter;
|
|
||||||
import org.hibernate.sql.ast.tree.expression.Literal;
|
import org.hibernate.sql.ast.tree.expression.Literal;
|
||||||
import org.hibernate.sql.ast.tree.expression.NullnessLiteral;
|
|
||||||
import org.hibernate.sql.ast.tree.expression.QueryLiteral;
|
|
||||||
import org.hibernate.sql.ast.tree.expression.SqlTuple;
|
import org.hibernate.sql.ast.tree.expression.SqlTuple;
|
||||||
import org.hibernate.sql.ast.tree.expression.Summarization;
|
import org.hibernate.sql.ast.tree.expression.Summarization;
|
||||||
|
import org.hibernate.sql.ast.tree.predicate.BooleanExpressionPredicate;
|
||||||
|
import org.hibernate.sql.ast.tree.predicate.InListPredicate;
|
||||||
import org.hibernate.sql.ast.tree.select.QueryPart;
|
import org.hibernate.sql.ast.tree.select.QueryPart;
|
||||||
import org.hibernate.sql.exec.spi.JdbcOperation;
|
import org.hibernate.sql.exec.spi.JdbcOperation;
|
||||||
|
|
||||||
|
@ -39,6 +39,68 @@ public class DerbySqlAstTranslator<T extends JdbcOperation> extends AbstractSqlA
|
||||||
super( sessionFactory, statement );
|
super( sessionFactory, statement );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void renderExpressionAsClauseItem(Expression expression) {
|
||||||
|
expression.accept( this );
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void visitBooleanExpressionPredicate(BooleanExpressionPredicate booleanExpressionPredicate) {
|
||||||
|
booleanExpressionPredicate.getExpression().accept( this );
|
||||||
|
}
|
||||||
|
|
||||||
|
// Derby does not allow CASE expressions where all result arms contain plain parameters.
|
||||||
|
// At least one result arm must provide some type context for inference,
|
||||||
|
// so we cast the first result arm if we encounter this condition
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void visitAnsiCaseSearchedExpression(
|
||||||
|
CaseSearchedExpression caseSearchedExpression,
|
||||||
|
Consumer<Expression> resultRenderer) {
|
||||||
|
if ( getParameterRenderingMode() == SqlAstNodeRenderingMode.DEFAULT && areAllResultsParameters( caseSearchedExpression ) ) {
|
||||||
|
final List<CaseSearchedExpression.WhenFragment> whenFragments = caseSearchedExpression.getWhenFragments();
|
||||||
|
final Expression firstResult = whenFragments.get( 0 ).getResult();
|
||||||
|
super.visitAnsiCaseSearchedExpression(
|
||||||
|
caseSearchedExpression,
|
||||||
|
e -> {
|
||||||
|
if ( e == firstResult ) {
|
||||||
|
renderCasted( e );
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
resultRenderer.accept( e );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
super.visitAnsiCaseSearchedExpression( caseSearchedExpression, resultRenderer );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void visitAnsiCaseSimpleExpression(
|
||||||
|
CaseSimpleExpression caseSimpleExpression,
|
||||||
|
Consumer<Expression> resultRenderer) {
|
||||||
|
if ( getParameterRenderingMode() == SqlAstNodeRenderingMode.DEFAULT && areAllResultsParameters( caseSimpleExpression ) ) {
|
||||||
|
final List<CaseSimpleExpression.WhenFragment> whenFragments = caseSimpleExpression.getWhenFragments();
|
||||||
|
final Expression firstResult = whenFragments.get( 0 ).getResult();
|
||||||
|
super.visitAnsiCaseSimpleExpression(
|
||||||
|
caseSimpleExpression,
|
||||||
|
e -> {
|
||||||
|
if ( e == firstResult ) {
|
||||||
|
renderCasted( e );
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
resultRenderer.accept( e );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
super.visitAnsiCaseSimpleExpression( caseSimpleExpression, resultRenderer );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected String getForUpdate() {
|
protected String getForUpdate() {
|
||||||
return " for update";
|
return " for update";
|
||||||
|
@ -139,6 +201,28 @@ public class DerbySqlAstTranslator<T extends JdbcOperation> extends AbstractSqlA
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void visitInListPredicate(InListPredicate inListPredicate) {
|
||||||
|
final List<Expression> listExpressions = inListPredicate.getListExpressions();
|
||||||
|
if ( listExpressions.isEmpty() ) {
|
||||||
|
appendSql( "1=0" );
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
final Expression testExpression = inListPredicate.getTestExpression();
|
||||||
|
if ( isParameter( testExpression ) ) {
|
||||||
|
renderCasted( testExpression );
|
||||||
|
if ( inListPredicate.isNegated() ) {
|
||||||
|
appendSql( " not" );
|
||||||
|
}
|
||||||
|
appendSql( " in(" );
|
||||||
|
renderCommaSeparated( listExpressions );
|
||||||
|
appendSql( CLOSE_PARENTHESIS );
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
super.visitInListPredicate( inListPredicate );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected boolean supportsRowValueConstructorSyntax() {
|
protected boolean supportsRowValueConstructorSyntax() {
|
||||||
return false;
|
return false;
|
||||||
|
@ -156,7 +240,7 @@ public class DerbySqlAstTranslator<T extends JdbcOperation> extends AbstractSqlA
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected String getFromDual() {
|
protected String getFromDual() {
|
||||||
return " from (values 0) as dual";
|
return " from (values 0) dual";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -70,6 +70,7 @@ import org.hibernate.query.sqm.mutation.spi.SqmMultiTableMutationStrategy;
|
||||||
import org.hibernate.query.sqm.sql.SqmTranslatorFactory;
|
import org.hibernate.query.sqm.sql.SqmTranslatorFactory;
|
||||||
import org.hibernate.service.ServiceRegistry;
|
import org.hibernate.service.ServiceRegistry;
|
||||||
import org.hibernate.sql.*;
|
import org.hibernate.sql.*;
|
||||||
|
import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
|
||||||
import org.hibernate.sql.ast.SqlAstTranslatorFactory;
|
import org.hibernate.sql.ast.SqlAstTranslatorFactory;
|
||||||
import org.hibernate.sql.ast.spi.AbstractSqlAstTranslator;
|
import org.hibernate.sql.ast.spi.AbstractSqlAstTranslator;
|
||||||
import org.hibernate.sql.ast.spi.StandardSqlAstTranslatorFactory;
|
import org.hibernate.sql.ast.spi.StandardSqlAstTranslatorFactory;
|
||||||
|
@ -475,7 +476,7 @@ public abstract class Dialect implements ConversionContext {
|
||||||
|
|
||||||
//aggregate functions, supported on every database
|
//aggregate functions, supported on every database
|
||||||
|
|
||||||
CommonFunctionFactory.aggregates( queryEngine );
|
CommonFunctionFactory.aggregates( queryEngine, SqlAstNodeRenderingMode.DEFAULT );
|
||||||
|
|
||||||
//the ANSI SQL-defined aggregate functions any() and every() are only
|
//the ANSI SQL-defined aggregate functions any() and every() are only
|
||||||
//supported on one database, but can be emulated using sum() and case,
|
//supported on one database, but can be emulated using sum() and case,
|
||||||
|
|
|
@ -41,6 +41,7 @@ import org.hibernate.query.sqm.mutation.internal.idtable.AfterUseAction;
|
||||||
import org.hibernate.query.sqm.mutation.internal.idtable.IdTable;
|
import org.hibernate.query.sqm.mutation.internal.idtable.IdTable;
|
||||||
import org.hibernate.query.sqm.mutation.internal.idtable.LocalTemporaryTableStrategy;
|
import org.hibernate.query.sqm.mutation.internal.idtable.LocalTemporaryTableStrategy;
|
||||||
import org.hibernate.query.sqm.mutation.spi.SqmMultiTableMutationStrategy;
|
import org.hibernate.query.sqm.mutation.spi.SqmMultiTableMutationStrategy;
|
||||||
|
import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
|
||||||
import org.hibernate.sql.ast.SqlAstTranslator;
|
import org.hibernate.sql.ast.SqlAstTranslator;
|
||||||
import org.hibernate.sql.ast.SqlAstTranslatorFactory;
|
import org.hibernate.sql.ast.SqlAstTranslatorFactory;
|
||||||
import org.hibernate.sql.ast.spi.StandardSqlAstTranslatorFactory;
|
import org.hibernate.sql.ast.spi.StandardSqlAstTranslatorFactory;
|
||||||
|
@ -155,6 +156,9 @@ public class H2Dialect extends Dialect {
|
||||||
public void initializeFunctionRegistry(QueryEngine queryEngine) {
|
public void initializeFunctionRegistry(QueryEngine queryEngine) {
|
||||||
super.initializeFunctionRegistry( queryEngine );
|
super.initializeFunctionRegistry( queryEngine );
|
||||||
|
|
||||||
|
// H2 needs an actual argument type for aggregates like SUM, AVG, MIN, MAX to determine the result type
|
||||||
|
CommonFunctionFactory.aggregates( queryEngine, SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER );
|
||||||
|
|
||||||
CommonFunctionFactory.pi( queryEngine );
|
CommonFunctionFactory.pi( queryEngine );
|
||||||
CommonFunctionFactory.cot( queryEngine );
|
CommonFunctionFactory.cot( queryEngine );
|
||||||
CommonFunctionFactory.radians( queryEngine );
|
CommonFunctionFactory.radians( queryEngine );
|
||||||
|
@ -246,13 +250,13 @@ public class H2Dialect extends Dialect {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String timestampaddPattern(TemporalUnit unit, TemporalType temporalType) {
|
public String timestampaddPattern(TemporalUnit unit, TemporalType temporalType) {
|
||||||
return "dateadd(?1, ?2, ?3)";
|
return "dateadd(?1,?2,?3)";
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String timestampdiffPattern(TemporalUnit unit, TemporalType fromTemporalType, TemporalType toTemporalType) {
|
public String timestampdiffPattern(TemporalUnit unit, TemporalType fromTemporalType, TemporalType toTemporalType) {
|
||||||
return "datediff(?1, ?2, ?3)";
|
return "datediff(?1,?2,?3)";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -18,6 +18,7 @@ import org.hibernate.sql.ast.tree.expression.Expression;
|
||||||
import org.hibernate.sql.ast.tree.expression.Literal;
|
import org.hibernate.sql.ast.tree.expression.Literal;
|
||||||
import org.hibernate.sql.ast.tree.expression.SqlTuple;
|
import org.hibernate.sql.ast.tree.expression.SqlTuple;
|
||||||
import org.hibernate.sql.ast.tree.expression.Summarization;
|
import org.hibernate.sql.ast.tree.expression.Summarization;
|
||||||
|
import org.hibernate.sql.ast.tree.predicate.BooleanExpressionPredicate;
|
||||||
import org.hibernate.sql.ast.tree.select.QueryPart;
|
import org.hibernate.sql.ast.tree.select.QueryPart;
|
||||||
import org.hibernate.sql.exec.spi.JdbcOperation;
|
import org.hibernate.sql.exec.spi.JdbcOperation;
|
||||||
|
|
||||||
|
@ -32,6 +33,16 @@ public class H2SqlAstTranslator<T extends JdbcOperation> extends AbstractSqlAstT
|
||||||
super( sessionFactory, statement );
|
super( sessionFactory, statement );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void renderExpressionAsClauseItem(Expression expression) {
|
||||||
|
expression.accept( this );
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void visitBooleanExpressionPredicate(BooleanExpressionPredicate booleanExpressionPredicate) {
|
||||||
|
booleanExpressionPredicate.getExpression().accept( this );
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void visitOffsetFetchClause(QueryPart queryPart) {
|
public void visitOffsetFetchClause(QueryPart queryPart) {
|
||||||
if ( isRowsOnlyFetchClauseType( queryPart ) ) {
|
if ( isRowsOnlyFetchClauseType( queryPart ) ) {
|
||||||
|
|
|
@ -284,13 +284,13 @@ public class HSQLDialect extends Dialect {
|
||||||
switch (unit) {
|
switch (unit) {
|
||||||
case NANOSECOND:
|
case NANOSECOND:
|
||||||
case NATIVE:
|
case NATIVE:
|
||||||
pattern.append("timestampadd(sql_tsi_frac_second, ?2, "); //nanos
|
pattern.append("timestampadd(sql_tsi_frac_second,?2,"); //nanos
|
||||||
break;
|
break;
|
||||||
case WEEK:
|
case WEEK:
|
||||||
pattern.append("dateadd('day', ?2*7, ");
|
pattern.append("dateadd('day',?2*7,");
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
pattern.append("dateadd('?1', ?2, ");
|
pattern.append("dateadd('?1',?2,");
|
||||||
}
|
}
|
||||||
if (castTo) {
|
if (castTo) {
|
||||||
pattern.append("cast(?3 as timestamp)");
|
pattern.append("cast(?3 as timestamp)");
|
||||||
|
@ -317,14 +317,14 @@ public class HSQLDialect extends Dialect {
|
||||||
default:
|
default:
|
||||||
pattern.append("datediff('?1'");
|
pattern.append("datediff('?1'");
|
||||||
}
|
}
|
||||||
pattern.append(", ");
|
pattern.append(',');
|
||||||
if (castFrom) {
|
if (castFrom) {
|
||||||
pattern.append("cast(?2 as timestamp)");
|
pattern.append("cast(?2 as timestamp)");
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
pattern.append("?2");
|
pattern.append("?2");
|
||||||
}
|
}
|
||||||
pattern.append(", ");
|
pattern.append(',');
|
||||||
if (castTo) {
|
if (castTo) {
|
||||||
pattern.append("cast(?3 as timestamp)");
|
pattern.append("cast(?3 as timestamp)");
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,6 +7,7 @@
|
||||||
package org.hibernate.dialect;
|
package org.hibernate.dialect;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.function.Consumer;
|
||||||
|
|
||||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||||
import org.hibernate.query.ComparisonOperator;
|
import org.hibernate.query.ComparisonOperator;
|
||||||
|
@ -15,10 +16,13 @@ import org.hibernate.sql.ast.spi.AbstractSqlAstTranslator;
|
||||||
import org.hibernate.sql.ast.spi.SqlSelection;
|
import org.hibernate.sql.ast.spi.SqlSelection;
|
||||||
import org.hibernate.sql.ast.tree.Statement;
|
import org.hibernate.sql.ast.tree.Statement;
|
||||||
import org.hibernate.sql.ast.tree.cte.CteStatement;
|
import org.hibernate.sql.ast.tree.cte.CteStatement;
|
||||||
|
import org.hibernate.sql.ast.tree.expression.CaseSearchedExpression;
|
||||||
|
import org.hibernate.sql.ast.tree.expression.CaseSimpleExpression;
|
||||||
import org.hibernate.sql.ast.tree.expression.Expression;
|
import org.hibernate.sql.ast.tree.expression.Expression;
|
||||||
import org.hibernate.sql.ast.tree.expression.Literal;
|
import org.hibernate.sql.ast.tree.expression.Literal;
|
||||||
import org.hibernate.sql.ast.tree.expression.SqlTuple;
|
import org.hibernate.sql.ast.tree.expression.SqlTuple;
|
||||||
import org.hibernate.sql.ast.tree.expression.Summarization;
|
import org.hibernate.sql.ast.tree.expression.Summarization;
|
||||||
|
import org.hibernate.sql.ast.tree.predicate.BooleanExpressionPredicate;
|
||||||
import org.hibernate.sql.ast.tree.select.QueryPart;
|
import org.hibernate.sql.ast.tree.select.QueryPart;
|
||||||
import org.hibernate.sql.ast.tree.select.QuerySpec;
|
import org.hibernate.sql.ast.tree.select.QuerySpec;
|
||||||
import org.hibernate.sql.exec.spi.JdbcOperation;
|
import org.hibernate.sql.exec.spi.JdbcOperation;
|
||||||
|
@ -34,6 +38,68 @@ public class HSQLSqlAstTranslator<T extends JdbcOperation> extends AbstractSqlAs
|
||||||
super( sessionFactory, statement );
|
super( sessionFactory, statement );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void renderExpressionAsClauseItem(Expression expression) {
|
||||||
|
expression.accept( this );
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void visitBooleanExpressionPredicate(BooleanExpressionPredicate booleanExpressionPredicate) {
|
||||||
|
booleanExpressionPredicate.getExpression().accept( this );
|
||||||
|
}
|
||||||
|
|
||||||
|
// HSQL does not allow CASE expressions where all result arms contain plain parameters.
|
||||||
|
// At least one result arm must provide some type context for inference,
|
||||||
|
// so we cast the first result arm if we encounter this condition
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void visitAnsiCaseSearchedExpression(
|
||||||
|
CaseSearchedExpression caseSearchedExpression,
|
||||||
|
Consumer<Expression> resultRenderer) {
|
||||||
|
if ( getParameterRenderingMode() == SqlAstNodeRenderingMode.DEFAULT && areAllResultsParameters( caseSearchedExpression ) ) {
|
||||||
|
final List<CaseSearchedExpression.WhenFragment> whenFragments = caseSearchedExpression.getWhenFragments();
|
||||||
|
final Expression firstResult = whenFragments.get( 0 ).getResult();
|
||||||
|
super.visitAnsiCaseSearchedExpression(
|
||||||
|
caseSearchedExpression,
|
||||||
|
e -> {
|
||||||
|
if ( e == firstResult ) {
|
||||||
|
renderCasted( e );
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
resultRenderer.accept( e );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
super.visitAnsiCaseSearchedExpression( caseSearchedExpression, resultRenderer );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void visitAnsiCaseSimpleExpression(
|
||||||
|
CaseSimpleExpression caseSimpleExpression,
|
||||||
|
Consumer<Expression> resultRenderer) {
|
||||||
|
if ( getParameterRenderingMode() == SqlAstNodeRenderingMode.DEFAULT && areAllResultsParameters( caseSimpleExpression ) ) {
|
||||||
|
final List<CaseSimpleExpression.WhenFragment> whenFragments = caseSimpleExpression.getWhenFragments();
|
||||||
|
final Expression firstResult = whenFragments.get( 0 ).getResult();
|
||||||
|
super.visitAnsiCaseSimpleExpression(
|
||||||
|
caseSimpleExpression,
|
||||||
|
e -> {
|
||||||
|
if ( e == firstResult ) {
|
||||||
|
renderCasted( e );
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
resultRenderer.accept( e );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
super.visitAnsiCaseSimpleExpression( caseSimpleExpression, resultRenderer );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean supportsFilterClause() {
|
public boolean supportsFilterClause() {
|
||||||
return true;
|
return true;
|
||||||
|
@ -98,9 +164,7 @@ public class HSQLSqlAstTranslator<T extends JdbcOperation> extends AbstractSqlAs
|
||||||
case NOT_DISTINCT_FROM:
|
case NOT_DISTINCT_FROM:
|
||||||
// HSQL does not like parameters in the distinct from predicate
|
// HSQL does not like parameters in the distinct from predicate
|
||||||
render( lhs, SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER );
|
render( lhs, SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER );
|
||||||
appendSql( " " );
|
|
||||||
appendSql( operator.sqlText() );
|
appendSql( operator.sqlText() );
|
||||||
appendSql( " " );
|
|
||||||
render( rhs, SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER );
|
render( rhs, SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER );
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
|
|
|
@ -131,7 +131,7 @@ public class MariaDBDialect extends MySQLDialect {
|
||||||
@Override
|
@Override
|
||||||
public String getQuerySequencesString() {
|
public String getQuerySequencesString() {
|
||||||
return getSequenceSupport().supportsSequences()
|
return getSequenceSupport().supportsSequences()
|
||||||
? "select table_name from information_schema.TABLES where table_schema = database() and table_type = 'SEQUENCE'"
|
? "select table_name from information_schema.TABLES where table_schema=database() and table_type='SEQUENCE'"
|
||||||
: super.getQuerySequencesString(); //fancy way to write "null"
|
: super.getQuerySequencesString(); //fancy way to write "null"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -14,6 +14,7 @@ import org.hibernate.sql.ast.tree.cte.CteStatement;
|
||||||
import org.hibernate.sql.ast.tree.expression.Expression;
|
import org.hibernate.sql.ast.tree.expression.Expression;
|
||||||
import org.hibernate.sql.ast.tree.expression.Literal;
|
import org.hibernate.sql.ast.tree.expression.Literal;
|
||||||
import org.hibernate.sql.ast.tree.expression.Summarization;
|
import org.hibernate.sql.ast.tree.expression.Summarization;
|
||||||
|
import org.hibernate.sql.ast.tree.predicate.BooleanExpressionPredicate;
|
||||||
import org.hibernate.sql.ast.tree.select.QueryGroup;
|
import org.hibernate.sql.ast.tree.select.QueryGroup;
|
||||||
import org.hibernate.sql.ast.tree.select.QueryPart;
|
import org.hibernate.sql.ast.tree.select.QueryPart;
|
||||||
import org.hibernate.sql.ast.tree.select.QuerySpec;
|
import org.hibernate.sql.ast.tree.select.QuerySpec;
|
||||||
|
@ -30,6 +31,16 @@ public class MariaDBSqlAstTranslator<T extends JdbcOperation> extends AbstractSq
|
||||||
super( sessionFactory, statement );
|
super( sessionFactory, statement );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void renderExpressionAsClauseItem(Expression expression) {
|
||||||
|
expression.accept( this );
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void visitBooleanExpressionPredicate(BooleanExpressionPredicate booleanExpressionPredicate) {
|
||||||
|
booleanExpressionPredicate.getExpression().accept( this );
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected String getForShare() {
|
protected String getForShare() {
|
||||||
return " lock in share mode";
|
return " lock in share mode";
|
||||||
|
|
|
@ -465,11 +465,11 @@ public class MySQLDialect extends Dialect {
|
||||||
public String timestampaddPattern(TemporalUnit unit, TemporalType temporalType) {
|
public String timestampaddPattern(TemporalUnit unit, TemporalType temporalType) {
|
||||||
switch (unit) {
|
switch (unit) {
|
||||||
case NANOSECOND:
|
case NANOSECOND:
|
||||||
return "timestampadd(microsecond, (?2)/1e3, ?3)";
|
return "timestampadd(microsecond,(?2)/1e3,?3)";
|
||||||
case NATIVE:
|
case NATIVE:
|
||||||
return "timestampadd(microsecond, ?2, ?3)";
|
return "timestampadd(microsecond,?2,?3)";
|
||||||
default:
|
default:
|
||||||
return "timestampadd(?1, ?2, ?3)";
|
return "timestampadd(?1,?2,?3)";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -477,11 +477,11 @@ public class MySQLDialect extends Dialect {
|
||||||
public String timestampdiffPattern(TemporalUnit unit, TemporalType fromTemporalType, TemporalType toTemporalType) {
|
public String timestampdiffPattern(TemporalUnit unit, TemporalType fromTemporalType, TemporalType toTemporalType) {
|
||||||
switch (unit) {
|
switch (unit) {
|
||||||
case NANOSECOND:
|
case NANOSECOND:
|
||||||
return "timestampdiff(microsecond, ?2, ?3)*1e3";
|
return "timestampdiff(microsecond,?2,?3)*1e3";
|
||||||
case NATIVE:
|
case NATIVE:
|
||||||
return "timestampdiff(microsecond, ?2, ?3)";
|
return "timestampdiff(microsecond,?2,?3)";
|
||||||
default:
|
default:
|
||||||
return "timestampdiff(?1, ?2, ?3)";
|
return "timestampdiff(?1,?2,?3)";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -773,7 +773,7 @@ public class MySQLDialect extends Dialect {
|
||||||
else {
|
else {
|
||||||
orderByElement.append( "1 else 0" );
|
orderByElement.append( "1 else 0" );
|
||||||
}
|
}
|
||||||
orderByElement.append( " end, " );
|
orderByElement.append( " end," );
|
||||||
}
|
}
|
||||||
// Nulls precedence has already been handled so passing NONE value.
|
// Nulls precedence has already been handled so passing NONE value.
|
||||||
orderByElement.append( super.renderOrderByElement( expression, collation, order, NullPrecedence.NONE ) );
|
orderByElement.append( super.renderOrderByElement( expression, collation, order, NullPrecedence.NONE ) );
|
||||||
|
|
|
@ -14,6 +14,7 @@ import org.hibernate.sql.ast.tree.cte.CteStatement;
|
||||||
import org.hibernate.sql.ast.tree.expression.Expression;
|
import org.hibernate.sql.ast.tree.expression.Expression;
|
||||||
import org.hibernate.sql.ast.tree.expression.Literal;
|
import org.hibernate.sql.ast.tree.expression.Literal;
|
||||||
import org.hibernate.sql.ast.tree.expression.Summarization;
|
import org.hibernate.sql.ast.tree.expression.Summarization;
|
||||||
|
import org.hibernate.sql.ast.tree.predicate.BooleanExpressionPredicate;
|
||||||
import org.hibernate.sql.ast.tree.select.QueryGroup;
|
import org.hibernate.sql.ast.tree.select.QueryGroup;
|
||||||
import org.hibernate.sql.ast.tree.select.QueryPart;
|
import org.hibernate.sql.ast.tree.select.QueryPart;
|
||||||
import org.hibernate.sql.ast.tree.select.QuerySpec;
|
import org.hibernate.sql.ast.tree.select.QuerySpec;
|
||||||
|
@ -30,6 +31,16 @@ public class MySQLSqlAstTranslator<T extends JdbcOperation> extends AbstractSqlA
|
||||||
super( sessionFactory, statement );
|
super( sessionFactory, statement );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void renderExpressionAsClauseItem(Expression expression) {
|
||||||
|
expression.accept( this );
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void visitBooleanExpressionPredicate(BooleanExpressionPredicate booleanExpressionPredicate) {
|
||||||
|
booleanExpressionPredicate.getExpression().accept( this );
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected String getForShare() {
|
protected String getForShare() {
|
||||||
return getDialect().getVersion() >= 800 ? " for share" : " lock in share mode";
|
return getDialect().getVersion() >= 800 ? " for share" : " lock in share mode";
|
||||||
|
|
|
@ -172,6 +172,7 @@ public class OracleDialect extends Dialect {
|
||||||
CommonFunctionFactory.covarPopSamp( queryEngine );
|
CommonFunctionFactory.covarPopSamp( queryEngine );
|
||||||
CommonFunctionFactory.corr( queryEngine );
|
CommonFunctionFactory.corr( queryEngine );
|
||||||
CommonFunctionFactory.regrLinearRegressionAggregates( queryEngine );
|
CommonFunctionFactory.regrLinearRegressionAggregates( queryEngine );
|
||||||
|
CommonFunctionFactory.bitLength_pattern( queryEngine, "vsize(?1)*8" );
|
||||||
|
|
||||||
if ( getVersion() < 900 ) {
|
if ( getVersion() < 900 ) {
|
||||||
queryEngine.getSqmFunctionRegistry().register( "coalesce", new NvlCoalesceEmulation() );
|
queryEngine.getSqmFunctionRegistry().register( "coalesce", new NvlCoalesceEmulation() );
|
||||||
|
@ -184,8 +185,8 @@ public class OracleDialect extends Dialect {
|
||||||
queryEngine.getSqmFunctionRegistry().registerBinaryTernaryPattern(
|
queryEngine.getSqmFunctionRegistry().registerBinaryTernaryPattern(
|
||||||
"locate",
|
"locate",
|
||||||
StandardBasicTypes.INTEGER,
|
StandardBasicTypes.INTEGER,
|
||||||
"instr(?2, ?1)",
|
"instr(?2,?1)",
|
||||||
"instr(?2, ?1, ?3)"
|
"instr(?2,?1,?3)"
|
||||||
).setArgumentListSignature("(pattern, string[, start])");
|
).setArgumentListSignature("(pattern, string[, start])");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -367,7 +368,7 @@ public class OracleDialect extends Dialect {
|
||||||
@Override
|
@Override
|
||||||
public String timestampaddPattern(TemporalUnit unit, TemporalType temporalType) {
|
public String timestampaddPattern(TemporalUnit unit, TemporalType temporalType) {
|
||||||
StringBuilder pattern = new StringBuilder();
|
StringBuilder pattern = new StringBuilder();
|
||||||
pattern.append("(?3 + ");
|
pattern.append("(?3+");
|
||||||
switch ( unit ) {
|
switch ( unit ) {
|
||||||
case YEAR:
|
case YEAR:
|
||||||
case QUARTER:
|
case QUARTER:
|
||||||
|
@ -1005,7 +1006,7 @@ public class OracleDialect extends Dialect {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getCurrentSchemaCommand() {
|
public String getCurrentSchemaCommand() {
|
||||||
return "SELECT SYS_CONTEXT('USERENV', 'CURRENT_SCHEMA') FROM DUAL";
|
return "SELECT SYS_CONTEXT('USERENV','CURRENT_SCHEMA') FROM DUAL";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -163,7 +163,7 @@ public class OracleSqlAstTranslator<T extends JdbcOperation> extends AbstractSql
|
||||||
appendSql( ')' );
|
appendSql( ')' );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
appendSql( " where rownum <= " );
|
appendSql( " where rownum<=" );
|
||||||
final Stack<Clause> clauseStack = getClauseStack();
|
final Stack<Clause> clauseStack = getClauseStack();
|
||||||
clauseStack.push( Clause.WHERE );
|
clauseStack.push( Clause.WHERE );
|
||||||
try {
|
try {
|
||||||
|
@ -307,7 +307,7 @@ public class OracleSqlAstTranslator<T extends JdbcOperation> extends AbstractSql
|
||||||
visitDecodeCaseSearchedExpression( caseSearchedExpression );
|
visitDecodeCaseSearchedExpression( caseSearchedExpression );
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
visitAnsiCaseSearchedExpression( caseSearchedExpression );
|
super.visitCaseSearchedExpression( caseSearchedExpression, inSelect );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -175,15 +175,15 @@ public class PostgreSQLDialect extends Dialect {
|
||||||
public String timestampaddPattern(TemporalUnit unit, TemporalType temporalType) {
|
public String timestampaddPattern(TemporalUnit unit, TemporalType temporalType) {
|
||||||
switch ( unit ) {
|
switch ( unit ) {
|
||||||
case NANOSECOND:
|
case NANOSECOND:
|
||||||
return "(?3 + (?2)/1e3 * interval '1 microsecond')";
|
return "(?3+(?2)/1e3*interval '1 microsecond')";
|
||||||
case NATIVE:
|
case NATIVE:
|
||||||
return "(?3 + (?2) * interval '1 second')";
|
return "(?3+(?2)*interval '1 second')";
|
||||||
case QUARTER: //quarter is not supported in interval literals
|
case QUARTER: //quarter is not supported in interval literals
|
||||||
return "(?3 + (?2) * interval '3 month')";
|
return "(?3+(?2)*interval '3 month')";
|
||||||
case WEEK: //week is not supported in interval literals
|
case WEEK: //week is not supported in interval literals
|
||||||
return "(?3 + (?2) * interval '7 day')";
|
return "(?3+(?2)*interval '7 day')";
|
||||||
default:
|
default:
|
||||||
return "(?3 + (?2) * interval '1 ?1')";
|
return "(?3+(?2)*interval '1 ?1')";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -324,7 +324,7 @@ public class PostgreSQLDialect extends Dialect {
|
||||||
"locate",
|
"locate",
|
||||||
StandardBasicTypes.INTEGER,
|
StandardBasicTypes.INTEGER,
|
||||||
"position(?1 in ?2)",
|
"position(?1 in ?2)",
|
||||||
"(position(?1 in substring(?2 from ?3)) + (?3) - 1)"
|
"(position(?1 in substring(?2 from ?3))+(?3)-1)"
|
||||||
).setArgumentListSignature("(pattern, string[, start])");
|
).setArgumentListSignature("(pattern, string[, start])");
|
||||||
|
|
||||||
if ( getVersion() >= 940 ) {
|
if ( getVersion() >= 940 ) {
|
||||||
|
|
|
@ -15,6 +15,7 @@ import org.hibernate.sql.ast.tree.cte.CteStatement;
|
||||||
import org.hibernate.sql.ast.tree.expression.Expression;
|
import org.hibernate.sql.ast.tree.expression.Expression;
|
||||||
import org.hibernate.sql.ast.tree.expression.Literal;
|
import org.hibernate.sql.ast.tree.expression.Literal;
|
||||||
import org.hibernate.sql.ast.tree.expression.Summarization;
|
import org.hibernate.sql.ast.tree.expression.Summarization;
|
||||||
|
import org.hibernate.sql.ast.tree.predicate.BooleanExpressionPredicate;
|
||||||
import org.hibernate.sql.ast.tree.select.QueryGroup;
|
import org.hibernate.sql.ast.tree.select.QueryGroup;
|
||||||
import org.hibernate.sql.ast.tree.select.QueryPart;
|
import org.hibernate.sql.ast.tree.select.QueryPart;
|
||||||
import org.hibernate.sql.ast.tree.select.QuerySpec;
|
import org.hibernate.sql.ast.tree.select.QuerySpec;
|
||||||
|
@ -31,6 +32,16 @@ public class PostgreSQLSqlAstTranslator<T extends JdbcOperation> extends Abstrac
|
||||||
super( sessionFactory, statement );
|
super( sessionFactory, statement );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void renderExpressionAsClauseItem(Expression expression) {
|
||||||
|
expression.accept( this );
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void visitBooleanExpressionPredicate(BooleanExpressionPredicate booleanExpressionPredicate) {
|
||||||
|
booleanExpressionPredicate.getExpression().accept( this );
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void renderMaterializationHint(CteMaterialization materialization) {
|
protected void renderMaterializationHint(CteMaterialization materialization) {
|
||||||
if ( getDialect().getVersion() >= 1200 ) {
|
if ( getDialect().getVersion() >= 1200 ) {
|
||||||
|
|
|
@ -148,6 +148,7 @@ public class SQLServerDialect extends AbstractTransactSQLDialect {
|
||||||
|
|
||||||
CommonFunctionFactory.truncate_round( queryEngine );
|
CommonFunctionFactory.truncate_round( queryEngine );
|
||||||
CommonFunctionFactory.everyAny_sumIif( queryEngine );
|
CommonFunctionFactory.everyAny_sumIif( queryEngine );
|
||||||
|
CommonFunctionFactory.bitLength_pattern( queryEngine, "datalength(?1) * 8" );
|
||||||
|
|
||||||
if ( getVersion() >= 10 ) {
|
if ( getVersion() >= 10 ) {
|
||||||
CommonFunctionFactory.locate_charindex( queryEngine );
|
CommonFunctionFactory.locate_charindex( queryEngine );
|
||||||
|
@ -237,7 +238,7 @@ public class SQLServerDialect extends AbstractTransactSQLDialect {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String currentTime() {
|
public String currentTime() {
|
||||||
return "convert(time, getdate())";
|
return "convert(time,getdate())";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -298,24 +299,24 @@ public class SQLServerDialect extends AbstractTransactSQLDialect {
|
||||||
lockMode = lockOptions.getLockMode();
|
lockMode = lockOptions.getLockMode();
|
||||||
}
|
}
|
||||||
|
|
||||||
final String writeLockStr = lockOptions.getTimeOut() == LockOptions.SKIP_LOCKED ? "updlock" : "updlock, holdlock";
|
final String writeLockStr = lockOptions.getTimeOut() == LockOptions.SKIP_LOCKED ? "updlock" : "updlock,holdlock";
|
||||||
final String readLockStr = lockOptions.getTimeOut() == LockOptions.SKIP_LOCKED ? "updlock" : "holdlock";
|
final String readLockStr = lockOptions.getTimeOut() == LockOptions.SKIP_LOCKED ? "updlock" : "holdlock";
|
||||||
|
|
||||||
final String noWaitStr = lockOptions.getTimeOut() == LockOptions.NO_WAIT ? ", nowait" : "";
|
final String noWaitStr = lockOptions.getTimeOut() == LockOptions.NO_WAIT ? ",nowait" : "";
|
||||||
final String skipLockStr = lockOptions.getTimeOut() == LockOptions.SKIP_LOCKED ? ", readpast" : "";
|
final String skipLockStr = lockOptions.getTimeOut() == LockOptions.SKIP_LOCKED ? ",readpast" : "";
|
||||||
|
|
||||||
switch ( lockMode ) {
|
switch ( lockMode ) {
|
||||||
//noinspection deprecation
|
//noinspection deprecation
|
||||||
case UPGRADE:
|
case UPGRADE:
|
||||||
case PESSIMISTIC_WRITE:
|
case PESSIMISTIC_WRITE:
|
||||||
case WRITE:
|
case WRITE:
|
||||||
return tableName + " with (" + writeLockStr + ", rowlock" + noWaitStr + skipLockStr + ")";
|
return tableName + " with (" + writeLockStr + ",rowlock" + noWaitStr + skipLockStr + ")";
|
||||||
case PESSIMISTIC_READ:
|
case PESSIMISTIC_READ:
|
||||||
return tableName + " with (" + readLockStr + ", rowlock" + noWaitStr + skipLockStr + ")";
|
return tableName + " with (" + readLockStr + ",rowlock" + noWaitStr + skipLockStr + ")";
|
||||||
case UPGRADE_SKIPLOCKED:
|
case UPGRADE_SKIPLOCKED:
|
||||||
return tableName + " with (updlock, rowlock, readpast" + noWaitStr + ")";
|
return tableName + " with (updlock,rowlock,readpast" + noWaitStr + ")";
|
||||||
case UPGRADE_NOWAIT:
|
case UPGRADE_NOWAIT:
|
||||||
return tableName + " with (updlock, holdlock, rowlock, nowait)";
|
return tableName + " with (updlock,holdlock,rowlock,nowait)";
|
||||||
default:
|
default:
|
||||||
return tableName;
|
return tableName;
|
||||||
}
|
}
|
||||||
|
@ -327,11 +328,11 @@ public class SQLServerDialect extends AbstractTransactSQLDialect {
|
||||||
case UPGRADE_NOWAIT:
|
case UPGRADE_NOWAIT:
|
||||||
case PESSIMISTIC_WRITE:
|
case PESSIMISTIC_WRITE:
|
||||||
case WRITE:
|
case WRITE:
|
||||||
return tableName + " with (updlock, rowlock)";
|
return tableName + " with (updlock,rowlock)";
|
||||||
case PESSIMISTIC_READ:
|
case PESSIMISTIC_READ:
|
||||||
return tableName + " with (holdlock, rowlock)";
|
return tableName + " with (holdlock,rowlock)";
|
||||||
case UPGRADE_SKIPLOCKED:
|
case UPGRADE_SKIPLOCKED:
|
||||||
return tableName + " with (updlock, rowlock, readpast)";
|
return tableName + " with (updlock,rowlock,readpast)";
|
||||||
default:
|
default:
|
||||||
return tableName;
|
return tableName;
|
||||||
}
|
}
|
||||||
|
@ -501,7 +502,7 @@ public class SQLServerDialect extends AbstractTransactSQLDialect {
|
||||||
else {
|
else {
|
||||||
orderByElement.append( "1 else 0" );
|
orderByElement.append( "1 else 0" );
|
||||||
}
|
}
|
||||||
orderByElement.append( " end, " );
|
orderByElement.append( " end," );
|
||||||
}
|
}
|
||||||
|
|
||||||
// Nulls precedence has already been handled so passing NONE value.
|
// Nulls precedence has already been handled so passing NONE value.
|
||||||
|
@ -593,12 +594,12 @@ public class SQLServerDialect extends AbstractTransactSQLDialect {
|
||||||
//Java Durations are usually the only thing
|
//Java Durations are usually the only thing
|
||||||
//we find expressed in nanosecond precision,
|
//we find expressed in nanosecond precision,
|
||||||
//and they can easily be very large
|
//and they can easily be very large
|
||||||
return "dateadd(nanosecond, ?2%1000000000, dateadd(second, ?2/1000000000, ?3))";
|
return "dateadd(nanosecond,?2%1000000000,dateadd(second,?2/1000000000,?3))";
|
||||||
case NATIVE:
|
case NATIVE:
|
||||||
//microsecond is the "native" precision
|
//microsecond is the "native" precision
|
||||||
return "dateadd(microsecond, ?2%1000000, dateadd(second, ?2/1000000, ?3))";
|
return "dateadd(microsecond,?2%1000000,dateadd(second,?2/1000000,?3))";
|
||||||
default:
|
default:
|
||||||
return "dateadd(?1, ?2, ?3)";
|
return "dateadd(?1,?2,?3)";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -607,14 +608,14 @@ public class SQLServerDialect extends AbstractTransactSQLDialect {
|
||||||
switch (unit) {
|
switch (unit) {
|
||||||
case NATIVE:
|
case NATIVE:
|
||||||
//use microsecond as the "native" precision
|
//use microsecond as the "native" precision
|
||||||
return "datediff_big(microsecond, ?2, ?3)";
|
return "datediff_big(microsecond,?2,?3)";
|
||||||
default:
|
default:
|
||||||
//datediff() returns an int, and can easily
|
//datediff() returns an int, and can easily
|
||||||
//overflow when dealing with "physical"
|
//overflow when dealing with "physical"
|
||||||
//durations, so use datediff_big()
|
//durations, so use datediff_big()
|
||||||
return unit.normalized() == NANOSECOND
|
return unit.normalized() == NANOSECOND
|
||||||
? "datediff_big(?1, ?2, ?3)"
|
? "datediff_big(?1,?2,?3)"
|
||||||
: "datediff(?1, ?2, ?3)";
|
: "datediff(?1,?2,?3)";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -66,7 +66,7 @@ public class SQLServerSqlAstTranslator<T extends JdbcOperation> extends Abstract
|
||||||
if ( rendersTableReferenceAlias( currentClause ) ) {
|
if ( rendersTableReferenceAlias( currentClause ) ) {
|
||||||
final String identificationVariable = tableReference.getIdentificationVariable();
|
final String identificationVariable = tableReference.getIdentificationVariable();
|
||||||
if ( identificationVariable != null ) {
|
if ( identificationVariable != null ) {
|
||||||
appendSql( getDialect().getTableAliasSeparator() );
|
appendSql( ' ' );
|
||||||
appendSql( identificationVariable );
|
appendSql( identificationVariable );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -82,27 +82,27 @@ public class SQLServerSqlAstTranslator<T extends JdbcOperation> extends Abstract
|
||||||
private void renderLockHint(LockMode lockMode) {
|
private void renderLockHint(LockMode lockMode) {
|
||||||
if ( getDialect().getVersion() >= 9 ) {
|
if ( getDialect().getVersion() >= 9 ) {
|
||||||
final int effectiveLockTimeout = getEffectiveLockTimeout( lockMode );
|
final int effectiveLockTimeout = getEffectiveLockTimeout( lockMode );
|
||||||
final String writeLockStr = effectiveLockTimeout == LockOptions.SKIP_LOCKED ? "updlock" : "updlock, holdlock";
|
final String writeLockStr = effectiveLockTimeout == LockOptions.SKIP_LOCKED ? "updlock" : "updlock,holdlock";
|
||||||
final String readLockStr = effectiveLockTimeout == LockOptions.SKIP_LOCKED ? "updlock" : "holdlock";
|
final String readLockStr = effectiveLockTimeout == LockOptions.SKIP_LOCKED ? "updlock" : "holdlock";
|
||||||
|
|
||||||
final String noWaitStr = effectiveLockTimeout == LockOptions.NO_WAIT ? ", nowait" : "";
|
final String noWaitStr = effectiveLockTimeout == LockOptions.NO_WAIT ? ",nowait" : "";
|
||||||
final String skipLockStr = effectiveLockTimeout == LockOptions.SKIP_LOCKED ? ", readpast" : "";
|
final String skipLockStr = effectiveLockTimeout == LockOptions.SKIP_LOCKED ? ",readpast" : "";
|
||||||
|
|
||||||
switch ( lockMode ) {
|
switch ( lockMode ) {
|
||||||
//noinspection deprecation
|
//noinspection deprecation
|
||||||
case UPGRADE:
|
case UPGRADE:
|
||||||
case PESSIMISTIC_WRITE:
|
case PESSIMISTIC_WRITE:
|
||||||
case WRITE:
|
case WRITE:
|
||||||
appendSql( " with (" + writeLockStr + ", rowlock" + noWaitStr + skipLockStr + ")" );
|
appendSql( " with (" + writeLockStr + ",rowlock" + noWaitStr + skipLockStr + ")" );
|
||||||
break;
|
break;
|
||||||
case PESSIMISTIC_READ:
|
case PESSIMISTIC_READ:
|
||||||
appendSql( " with (" + readLockStr + ", rowlock" + noWaitStr + skipLockStr + ")" );
|
appendSql( " with (" + readLockStr + ",rowlock" + noWaitStr + skipLockStr + ")" );
|
||||||
break;
|
break;
|
||||||
case UPGRADE_SKIPLOCKED:
|
case UPGRADE_SKIPLOCKED:
|
||||||
appendSql( " with (updlock, rowlock, readpast" + noWaitStr + ")" );
|
appendSql( " with (updlock,rowlock,readpast" + noWaitStr + ")" );
|
||||||
break;
|
break;
|
||||||
case UPGRADE_NOWAIT:
|
case UPGRADE_NOWAIT:
|
||||||
appendSql( " with (updlock, holdlock, rowlock, nowait)" );
|
appendSql( " with (updlock,holdlock,rowlock,nowait)" );
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -113,13 +113,13 @@ public class SQLServerSqlAstTranslator<T extends JdbcOperation> extends Abstract
|
||||||
case UPGRADE_NOWAIT:
|
case UPGRADE_NOWAIT:
|
||||||
case PESSIMISTIC_WRITE:
|
case PESSIMISTIC_WRITE:
|
||||||
case WRITE:
|
case WRITE:
|
||||||
appendSql( " with (updlock, rowlock)" );
|
appendSql( " with (updlock,rowlock)" );
|
||||||
break;
|
break;
|
||||||
case PESSIMISTIC_READ:
|
case PESSIMISTIC_READ:
|
||||||
appendSql(" with (holdlock, rowlock)" );
|
appendSql(" with (holdlock,rowlock)" );
|
||||||
break;
|
break;
|
||||||
case UPGRADE_SKIPLOCKED:
|
case UPGRADE_SKIPLOCKED:
|
||||||
appendSql( " with (updlock, rowlock, readpast)" );
|
appendSql( " with (updlock,rowlock,readpast)" );
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -445,7 +445,7 @@ public class SpannerDialect extends Dialect {
|
||||||
case MONTH:
|
case MONTH:
|
||||||
throw new SemanticException("illegal unit for timestamp_add(): " + unit);
|
throw new SemanticException("illegal unit for timestamp_add(): " + unit);
|
||||||
default:
|
default:
|
||||||
return "timestamp_add(?3, interval ?2 ?1)";
|
return "timestamp_add(?3,interval ?2 ?1)";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
@ -457,7 +457,7 @@ public class SpannerDialect extends Dialect {
|
||||||
case NATIVE:
|
case NATIVE:
|
||||||
throw new SemanticException("illegal unit for date_add(): " + unit);
|
throw new SemanticException("illegal unit for date_add(): " + unit);
|
||||||
default:
|
default:
|
||||||
return "date_add(?3, interval ?2 ?1)";
|
return "date_add(?3,interval ?2 ?1)";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -471,7 +471,7 @@ public class SpannerDialect extends Dialect {
|
||||||
case MONTH:
|
case MONTH:
|
||||||
throw new SemanticException("illegal unit for timestamp_diff(): " + unit);
|
throw new SemanticException("illegal unit for timestamp_diff(): " + unit);
|
||||||
default:
|
default:
|
||||||
return "timestamp_diff(?3, ?2, ?1)";
|
return "timestamp_diff(?3,?2,?1)";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
@ -483,7 +483,7 @@ public class SpannerDialect extends Dialect {
|
||||||
case NATIVE:
|
case NATIVE:
|
||||||
throw new SemanticException("illegal unit for date_diff(): " + unit);
|
throw new SemanticException("illegal unit for date_diff(): " + unit);
|
||||||
default:
|
default:
|
||||||
return "date_diff(?3, ?2, ?1)";
|
return "date_diff(?3,?2,?1)";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -219,13 +219,13 @@ public class SybaseASEDialect extends SybaseDialect {
|
||||||
// If the driver or database do not support bigdatetime and bigtime types,
|
// If the driver or database do not support bigdatetime and bigtime types,
|
||||||
// we try to operate on milliseconds instead
|
// we try to operate on milliseconds instead
|
||||||
if ( getVersion() < 1550 || jtdsDriver ) {
|
if ( getVersion() < 1550 || jtdsDriver ) {
|
||||||
return "dateadd(millisecond, ?2/1000000, ?3)";
|
return "dateadd(millisecond,?2/1000000,?3)";
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
return "dateadd(mcs, ?2/1000, ?3)";
|
return "dateadd(mcs,?2/1000,?3)";
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
return "dateadd(?1, ?2, ?3)";
|
return "dateadd(?1,?2,?3)";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -235,9 +235,9 @@ public class SybaseASEDialect extends SybaseDialect {
|
||||||
switch ( unit ) {
|
switch ( unit ) {
|
||||||
case NANOSECOND:
|
case NANOSECOND:
|
||||||
case NATIVE:
|
case NATIVE:
|
||||||
return "(datediff(mcs, ?2, ?3)*1000)";
|
return "(datediff(mcs,?2,?3)*1000)";
|
||||||
default:
|
default:
|
||||||
return "datediff(?1, ?2, ?3)";
|
return "datediff(?1,?2,?3)";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -7,16 +7,20 @@
|
||||||
package org.hibernate.dialect;
|
package org.hibernate.dialect;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.function.Consumer;
|
||||||
|
|
||||||
import org.hibernate.LockMode;
|
import org.hibernate.LockMode;
|
||||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||||
import org.hibernate.query.ComparisonOperator;
|
import org.hibernate.query.ComparisonOperator;
|
||||||
import org.hibernate.sql.ast.SqlAstJoinType;
|
import org.hibernate.sql.ast.SqlAstJoinType;
|
||||||
|
import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
|
||||||
import org.hibernate.sql.ast.spi.AbstractSqlAstTranslator;
|
import org.hibernate.sql.ast.spi.AbstractSqlAstTranslator;
|
||||||
import org.hibernate.sql.ast.spi.SqlSelection;
|
import org.hibernate.sql.ast.spi.SqlSelection;
|
||||||
import org.hibernate.sql.ast.tree.MutationStatement;
|
import org.hibernate.sql.ast.tree.MutationStatement;
|
||||||
import org.hibernate.sql.ast.tree.Statement;
|
import org.hibernate.sql.ast.tree.Statement;
|
||||||
import org.hibernate.sql.ast.tree.cte.CteStatement;
|
import org.hibernate.sql.ast.tree.cte.CteStatement;
|
||||||
|
import org.hibernate.sql.ast.tree.expression.CaseSearchedExpression;
|
||||||
|
import org.hibernate.sql.ast.tree.expression.CaseSimpleExpression;
|
||||||
import org.hibernate.sql.ast.tree.expression.ColumnReference;
|
import org.hibernate.sql.ast.tree.expression.ColumnReference;
|
||||||
import org.hibernate.sql.ast.tree.expression.Expression;
|
import org.hibernate.sql.ast.tree.expression.Expression;
|
||||||
import org.hibernate.sql.ast.tree.expression.JdbcParameter;
|
import org.hibernate.sql.ast.tree.expression.JdbcParameter;
|
||||||
|
@ -44,6 +48,58 @@ public class SybaseASESqlAstTranslator<T extends JdbcOperation> extends Abstract
|
||||||
super( sessionFactory, statement );
|
super( sessionFactory, statement );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Sybase ASE does not allow CASE expressions where all result arms contain plain parameters.
|
||||||
|
// At least one result arm must provide some type context for inference,
|
||||||
|
// so we cast the first result arm if we encounter this condition
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void visitAnsiCaseSearchedExpression(
|
||||||
|
CaseSearchedExpression caseSearchedExpression,
|
||||||
|
Consumer<Expression> resultRenderer) {
|
||||||
|
if ( getParameterRenderingMode() == SqlAstNodeRenderingMode.DEFAULT && areAllResultsParameters( caseSearchedExpression ) ) {
|
||||||
|
final List<CaseSearchedExpression.WhenFragment> whenFragments = caseSearchedExpression.getWhenFragments();
|
||||||
|
final Expression firstResult = whenFragments.get( 0 ).getResult();
|
||||||
|
super.visitAnsiCaseSearchedExpression(
|
||||||
|
caseSearchedExpression,
|
||||||
|
e -> {
|
||||||
|
if ( e == firstResult ) {
|
||||||
|
renderCasted( e );
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
resultRenderer.accept( e );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
super.visitAnsiCaseSearchedExpression( caseSearchedExpression, resultRenderer );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void visitAnsiCaseSimpleExpression(
|
||||||
|
CaseSimpleExpression caseSimpleExpression,
|
||||||
|
Consumer<Expression> resultRenderer) {
|
||||||
|
if ( getParameterRenderingMode() == SqlAstNodeRenderingMode.DEFAULT && areAllResultsParameters( caseSimpleExpression ) ) {
|
||||||
|
final List<CaseSimpleExpression.WhenFragment> whenFragments = caseSimpleExpression.getWhenFragments();
|
||||||
|
final Expression firstResult = whenFragments.get( 0 ).getResult();
|
||||||
|
super.visitAnsiCaseSimpleExpression(
|
||||||
|
caseSimpleExpression,
|
||||||
|
e -> {
|
||||||
|
if ( e == firstResult ) {
|
||||||
|
renderCasted( e );
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
resultRenderer.accept( e );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
super.visitAnsiCaseSimpleExpression( caseSimpleExpression, resultRenderer );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected boolean renderTableReference(TableReference tableReference, LockMode lockMode) {
|
protected boolean renderTableReference(TableReference tableReference, LockMode lockMode) {
|
||||||
super.renderTableReference( tableReference, lockMode );
|
super.renderTableReference( tableReference, lockMode );
|
||||||
|
@ -168,14 +224,13 @@ public class SybaseASESqlAstTranslator<T extends JdbcOperation> extends Abstract
|
||||||
boolean rhsNotNullPredicate =
|
boolean rhsNotNullPredicate =
|
||||||
lhs instanceof NullnessLiteral
|
lhs instanceof NullnessLiteral
|
||||||
|| lhs instanceof Literal
|
|| lhs instanceof Literal
|
||||||
|| lhs instanceof JdbcParameter;
|
|| isParameter( lhs );
|
||||||
boolean lhsNotNullPredicate =
|
boolean lhsNotNullPredicate =
|
||||||
rhs instanceof NullnessLiteral
|
rhs instanceof NullnessLiteral
|
||||||
|| rhs instanceof Literal
|
|| rhs instanceof Literal
|
||||||
|| rhs instanceof JdbcParameter;
|
|| isParameter( rhs );
|
||||||
if ( rhsNotNullPredicate || lhsNotNullPredicate ) {
|
if ( rhsNotNullPredicate || lhsNotNullPredicate ) {
|
||||||
lhs.accept( this );
|
lhs.accept( this );
|
||||||
appendSql( " " );
|
|
||||||
switch ( operator ) {
|
switch ( operator ) {
|
||||||
case DISTINCT_FROM:
|
case DISTINCT_FROM:
|
||||||
appendSql( "<>" );
|
appendSql( "<>" );
|
||||||
|
@ -194,7 +249,6 @@ public class SybaseASESqlAstTranslator<T extends JdbcOperation> extends Abstract
|
||||||
appendSql( operator.sqlText() );
|
appendSql( operator.sqlText() );
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
appendSql( " " );
|
|
||||||
rhs.accept( this );
|
rhs.accept( this );
|
||||||
if ( lhsNotNullPredicate ) {
|
if ( lhsNotNullPredicate ) {
|
||||||
appendSql( " and " );
|
appendSql( " and " );
|
||||||
|
@ -308,7 +362,7 @@ public class SybaseASESqlAstTranslator<T extends JdbcOperation> extends Abstract
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected String getFromDual() {
|
protected String getFromDual() {
|
||||||
return " from (select 1) as dual(c1)";
|
return " from (select 1) dual(c1)";
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean supportsTopClause() {
|
private boolean supportsTopClause() {
|
||||||
|
|
|
@ -215,6 +215,7 @@ public class SybaseDialect extends AbstractTransactSQLDialect {
|
||||||
|
|
||||||
CommonFunctionFactory.replace_strReplace( queryEngine );
|
CommonFunctionFactory.replace_strReplace( queryEngine );
|
||||||
CommonFunctionFactory.everyAny_sumCaseCase( queryEngine );
|
CommonFunctionFactory.everyAny_sumCaseCase( queryEngine );
|
||||||
|
CommonFunctionFactory.bitLength_pattern( queryEngine, "datalength(?1) * 8" );
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -238,11 +239,11 @@ public class SybaseDialect extends AbstractTransactSQLDialect {
|
||||||
if ( to == CastType.STRING ) {
|
if ( to == CastType.STRING ) {
|
||||||
switch ( from ) {
|
switch ( from ) {
|
||||||
case DATE:
|
case DATE:
|
||||||
return "str_replace(convert(varchar, ?1, 102), '.', '-')";
|
return "str_replace(convert(varchar,?1,102),'.','-')";
|
||||||
case TIME:
|
case TIME:
|
||||||
return "convert(varchar, ?1, 108)";
|
return "convert(varchar,?1,108)";
|
||||||
case TIMESTAMP:
|
case TIMESTAMP:
|
||||||
return "str_replace(convert(varchar, ?1, 23), 'T', ' ')";
|
return "str_replace(convert(varchar,?1,23),'T',' ')";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return super.castPattern( from, to );
|
return super.castPattern( from, to );
|
||||||
|
@ -259,7 +260,7 @@ public class SybaseDialect extends AbstractTransactSQLDialect {
|
||||||
@Override
|
@Override
|
||||||
public String extractPattern(TemporalUnit unit) {
|
public String extractPattern(TemporalUnit unit) {
|
||||||
//TODO!!
|
//TODO!!
|
||||||
return "datepart(?1, ?2)";
|
return "datepart(?1,?2)";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -270,13 +271,13 @@ public class SybaseDialect extends AbstractTransactSQLDialect {
|
||||||
@Override
|
@Override
|
||||||
public String timestampaddPattern(TemporalUnit unit, TemporalType temporalType) {
|
public String timestampaddPattern(TemporalUnit unit, TemporalType temporalType) {
|
||||||
//TODO!!
|
//TODO!!
|
||||||
return "dateadd(?1, ?2, ?3)";
|
return "dateadd(?1,?2,?3)";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String timestampdiffPattern(TemporalUnit unit, TemporalType fromTemporalType, TemporalType toTemporalType) {
|
public String timestampdiffPattern(TemporalUnit unit, TemporalType fromTemporalType, TemporalType toTemporalType) {
|
||||||
//TODO!!
|
//TODO!!
|
||||||
return "datediff(?1, ?2, ?3)";
|
return "datediff(?1,?2,?3)";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -7,14 +7,18 @@
|
||||||
package org.hibernate.dialect;
|
package org.hibernate.dialect;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.function.Consumer;
|
||||||
|
|
||||||
import org.hibernate.LockMode;
|
import org.hibernate.LockMode;
|
||||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||||
import org.hibernate.query.ComparisonOperator;
|
import org.hibernate.query.ComparisonOperator;
|
||||||
|
import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
|
||||||
import org.hibernate.sql.ast.spi.AbstractSqlAstTranslator;
|
import org.hibernate.sql.ast.spi.AbstractSqlAstTranslator;
|
||||||
import org.hibernate.sql.ast.spi.SqlSelection;
|
import org.hibernate.sql.ast.spi.SqlSelection;
|
||||||
import org.hibernate.sql.ast.tree.Statement;
|
import org.hibernate.sql.ast.tree.Statement;
|
||||||
import org.hibernate.sql.ast.tree.cte.CteStatement;
|
import org.hibernate.sql.ast.tree.cte.CteStatement;
|
||||||
|
import org.hibernate.sql.ast.tree.expression.CaseSearchedExpression;
|
||||||
|
import org.hibernate.sql.ast.tree.expression.CaseSimpleExpression;
|
||||||
import org.hibernate.sql.ast.tree.expression.Expression;
|
import org.hibernate.sql.ast.tree.expression.Expression;
|
||||||
import org.hibernate.sql.ast.tree.expression.Literal;
|
import org.hibernate.sql.ast.tree.expression.Literal;
|
||||||
import org.hibernate.sql.ast.tree.expression.SqlTuple;
|
import org.hibernate.sql.ast.tree.expression.SqlTuple;
|
||||||
|
@ -35,6 +39,58 @@ public class SybaseSqlAstTranslator<T extends JdbcOperation> extends AbstractSql
|
||||||
super( sessionFactory, statement );
|
super( sessionFactory, statement );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Sybase does not allow CASE expressions where all result arms contain plain parameters.
|
||||||
|
// At least one result arm must provide some type context for inference,
|
||||||
|
// so we cast the first result arm if we encounter this condition
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void visitAnsiCaseSearchedExpression(
|
||||||
|
CaseSearchedExpression caseSearchedExpression,
|
||||||
|
Consumer<Expression> resultRenderer) {
|
||||||
|
if ( getParameterRenderingMode() == SqlAstNodeRenderingMode.DEFAULT && areAllResultsParameters( caseSearchedExpression ) ) {
|
||||||
|
final List<CaseSearchedExpression.WhenFragment> whenFragments = caseSearchedExpression.getWhenFragments();
|
||||||
|
final Expression firstResult = whenFragments.get( 0 ).getResult();
|
||||||
|
super.visitAnsiCaseSearchedExpression(
|
||||||
|
caseSearchedExpression,
|
||||||
|
e -> {
|
||||||
|
if ( e == firstResult ) {
|
||||||
|
renderCasted( e );
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
resultRenderer.accept( e );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
super.visitAnsiCaseSearchedExpression( caseSearchedExpression, resultRenderer );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void visitAnsiCaseSimpleExpression(
|
||||||
|
CaseSimpleExpression caseSimpleExpression,
|
||||||
|
Consumer<Expression> resultRenderer) {
|
||||||
|
if ( getParameterRenderingMode() == SqlAstNodeRenderingMode.DEFAULT && areAllResultsParameters( caseSimpleExpression ) ) {
|
||||||
|
final List<CaseSimpleExpression.WhenFragment> whenFragments = caseSimpleExpression.getWhenFragments();
|
||||||
|
final Expression firstResult = whenFragments.get( 0 ).getResult();
|
||||||
|
super.visitAnsiCaseSimpleExpression(
|
||||||
|
caseSimpleExpression,
|
||||||
|
e -> {
|
||||||
|
if ( e == firstResult ) {
|
||||||
|
renderCasted( e );
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
resultRenderer.accept( e );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
super.visitAnsiCaseSimpleExpression( caseSimpleExpression, resultRenderer );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected boolean renderTableReference(TableReference tableReference, LockMode lockMode) {
|
protected boolean renderTableReference(TableReference tableReference, LockMode lockMode) {
|
||||||
super.renderTableReference( tableReference, lockMode );
|
super.renderTableReference( tableReference, lockMode );
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,39 @@
|
||||||
|
/*
|
||||||
|
* Hibernate, Relational Persistence for Idiomatic Java
|
||||||
|
*
|
||||||
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
|
||||||
|
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
|
||||||
|
*/
|
||||||
|
package org.hibernate.dialect.function;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.hibernate.query.sqm.function.AbstractSqmSelfRenderingFunctionDescriptor;
|
||||||
|
import org.hibernate.query.sqm.produce.function.StandardArgumentsValidators;
|
||||||
|
import org.hibernate.query.sqm.produce.function.StandardFunctionReturnTypeResolvers;
|
||||||
|
import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
|
||||||
|
import org.hibernate.sql.ast.SqlAstTranslator;
|
||||||
|
import org.hibernate.sql.ast.spi.SqlAppender;
|
||||||
|
import org.hibernate.sql.ast.tree.SqlAstNode;
|
||||||
|
import org.hibernate.type.StandardBasicTypes;
|
||||||
|
|
||||||
|
public class DerbyConcatFunction extends AbstractSqmSelfRenderingFunctionDescriptor {
|
||||||
|
public DerbyConcatFunction() {
|
||||||
|
super(
|
||||||
|
"concat",
|
||||||
|
StandardArgumentsValidators.min( 1 ),
|
||||||
|
StandardFunctionReturnTypeResolvers.invariant( StandardBasicTypes.STRING )
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void render(SqlAppender sqlAppender, List<SqlAstNode> sqlAstArguments, SqlAstTranslator<?> walker) {
|
||||||
|
sqlAppender.appendSql( '(' );
|
||||||
|
walker.render( sqlAstArguments.get( 0 ), SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER );
|
||||||
|
for ( int i = 1; i < sqlAstArguments.size(); i++ ) {
|
||||||
|
sqlAppender.appendSql( "||" );
|
||||||
|
walker.render( sqlAstArguments.get( i ), SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER );
|
||||||
|
}
|
||||||
|
sqlAppender.appendSql( ')' );
|
||||||
|
}
|
||||||
|
}
|
|
@ -34,7 +34,7 @@ public class FieldFunction extends AbstractSqmSelfRenderingFunctionDescriptor {
|
||||||
sqlAppender.appendSql( "field(" );
|
sqlAppender.appendSql( "field(" );
|
||||||
sqlAstArguments.get( 0 ).accept( walker );
|
sqlAstArguments.get( 0 ).accept( walker );
|
||||||
for ( int i = 1; i < sqlAstArguments.size(); i++ ) {
|
for ( int i = 1; i < sqlAstArguments.size(); i++ ) {
|
||||||
sqlAppender.appendSql( ", " );
|
sqlAppender.appendSql( ',' );
|
||||||
|
|
||||||
final SqlAstNode argument = sqlAstArguments.get( i );
|
final SqlAstNode argument = sqlAstArguments.get( i );
|
||||||
final SqlTuple sqlTuple = SqlTupleContainer.getSqlTuple( argument );
|
final SqlTuple sqlTuple = SqlTupleContainer.getSqlTuple( argument );
|
||||||
|
@ -42,7 +42,7 @@ public class FieldFunction extends AbstractSqmSelfRenderingFunctionDescriptor {
|
||||||
final List<? extends Expression> expressions = sqlTuple.getExpressions();
|
final List<? extends Expression> expressions = sqlTuple.getExpressions();
|
||||||
for ( int j = 0; j < expressions.size(); j++ ) {
|
for ( int j = 0; j < expressions.size(); j++ ) {
|
||||||
if ( j != 0 ) {
|
if ( j != 0 ) {
|
||||||
sqlAppender.appendSql( ", " );
|
sqlAppender.appendSql( ',' );
|
||||||
}
|
}
|
||||||
expressions.get( j ).accept( walker );
|
expressions.get( j ).accept( walker );
|
||||||
}
|
}
|
||||||
|
|
|
@ -46,12 +46,12 @@ public class QuantifiedLeastGreatestEmulation
|
||||||
sqlAppender.appendSql( " when " );
|
sqlAppender.appendSql( " when " );
|
||||||
arguments.get( i ).accept( walker );
|
arguments.get( i ).accept( walker );
|
||||||
sqlAppender.appendSql( operator );
|
sqlAppender.appendSql( operator );
|
||||||
sqlAppender.appendSql( " all(" );
|
sqlAppender.appendSql( "all(" );
|
||||||
String separator = "";
|
String separator = "";
|
||||||
for ( int j = i + 1; j < numberOfArguments; j++ ) {
|
for ( int j = i + 1; j < numberOfArguments; j++ ) {
|
||||||
sqlAppender.appendSql( separator );
|
sqlAppender.appendSql( separator );
|
||||||
arguments.get( j ).accept( walker );
|
arguments.get( j ).accept( walker );
|
||||||
separator = ", ";
|
separator = ",";
|
||||||
}
|
}
|
||||||
sqlAppender.appendSql( ") then " );
|
sqlAppender.appendSql( ") then " );
|
||||||
arguments.get( i ).accept( walker );
|
arguments.get( i ).accept( walker );
|
||||||
|
|
|
@ -63,7 +63,7 @@ public class TransactSQLStrFunction extends CastStrEmulation implements Function
|
||||||
sqlAppender.appendSql( "str(" );
|
sqlAppender.appendSql( "str(" );
|
||||||
arguments.get( 0 ).accept( walker );
|
arguments.get( 0 ).accept( walker );
|
||||||
for ( int i = 1; i < arguments.size(); i++ ) {
|
for ( int i = 1; i < arguments.size(); i++ ) {
|
||||||
sqlAppender.appendSql( ", " );
|
sqlAppender.appendSql( ',' );
|
||||||
arguments.get( i ).accept( walker );
|
arguments.get( i ).accept( walker );
|
||||||
}
|
}
|
||||||
sqlAppender.appendSql( ')' );
|
sqlAppender.appendSql( ')' );
|
||||||
|
|
|
@ -22,9 +22,9 @@ public class LegacyDB2LimitHandler extends AbstractLimitHandler {
|
||||||
public String processSql(String sql, RowSelection selection) {
|
public String processSql(String sql, RowSelection selection) {
|
||||||
if ( hasFirstRow( selection ) ) {
|
if ( hasFirstRow( selection ) ) {
|
||||||
//nest the main query in an outer select
|
//nest the main query in an outer select
|
||||||
return "select * from ( select row_.*, rownumber() over(order by order of row_) as rownumber_ from ( "
|
return "select * from (select row_.*,rownumber() over(order by order of row_) as rownumber_ from ("
|
||||||
+ sql + fetchFirstRows( selection )
|
+ sql + fetchFirstRows( selection )
|
||||||
+ " ) as row_ ) as query_ where rownumber_ > "
|
+ ") as row_) as query_ where rownumber_>"
|
||||||
+ selection.getFirstRow()
|
+ selection.getFirstRow()
|
||||||
+ " order by rownumber_";
|
+ " order by rownumber_";
|
||||||
}
|
}
|
||||||
|
@ -43,9 +43,9 @@ public class LegacyDB2LimitHandler extends AbstractLimitHandler {
|
||||||
public String processSql(String sql, Limit limit) {
|
public String processSql(String sql, Limit limit) {
|
||||||
if ( hasFirstRow( limit ) ) {
|
if ( hasFirstRow( limit ) ) {
|
||||||
//nest the main query in an outer select
|
//nest the main query in an outer select
|
||||||
return "select * from ( select row_.*, rownumber() over(order by order of row_) as rownumber_ from ( "
|
return "select * from (select row_.*,rownumber() over(order by order of row_) as rownumber_ from ("
|
||||||
+ sql + fetchFirstRows( limit )
|
+ sql + fetchFirstRows( limit )
|
||||||
+ " ) as row_ ) as query_ where rownumber_ > "
|
+ ") as row_) as query_ where rownumber_>"
|
||||||
+ limit.getFirstRow()
|
+ limit.getFirstRow()
|
||||||
+ " order by rownumber_";
|
+ " order by rownumber_";
|
||||||
}
|
}
|
||||||
|
|
|
@ -38,16 +38,16 @@ public class LegacyOracleLimitHandler extends AbstractLimitHandler {
|
||||||
|
|
||||||
final StringBuilder pagingSelect = new StringBuilder( sql.length() + 100 );
|
final StringBuilder pagingSelect = new StringBuilder( sql.length() + 100 );
|
||||||
if ( hasOffset ) {
|
if ( hasOffset ) {
|
||||||
pagingSelect.append( "select * from (select row_.*, rownum rownum_ from (" ).append( sql );
|
pagingSelect.append( "select * from (select row_.*,rownum rownum_ from (" ).append( sql );
|
||||||
if ( version < 900 ) {
|
if ( version < 900 ) {
|
||||||
pagingSelect.append( ") row_) where rownum_ <= ? and rownum_ > ?" );
|
pagingSelect.append( ") row_) where rownum_<=? and rownum_>?" );
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
pagingSelect.append( ") row_ where rownum <= ?) where rownum_ > ?" );
|
pagingSelect.append( ") row_ where rownum<=?) where rownum_>?" );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
pagingSelect.append( "select * from (" ).append( sql ).append( ") where rownum <= ?" );
|
pagingSelect.append( "select * from (" ).append( sql ).append( ") where rownum<=?" );
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( forUpdateClause != null ) {
|
if ( forUpdateClause != null ) {
|
||||||
|
@ -73,16 +73,16 @@ public class LegacyOracleLimitHandler extends AbstractLimitHandler {
|
||||||
|
|
||||||
final StringBuilder pagingSelect = new StringBuilder( sql.length() + 100 );
|
final StringBuilder pagingSelect = new StringBuilder( sql.length() + 100 );
|
||||||
if ( hasOffset ) {
|
if ( hasOffset ) {
|
||||||
pagingSelect.append( "select * from (select row_.*, rownum rownum_ from (" ).append( sql );
|
pagingSelect.append( "select * from (select row_.*,rownum rownum_ from (" ).append( sql );
|
||||||
if ( version < 900 ) {
|
if ( version < 900 ) {
|
||||||
pagingSelect.append( ") row_) where rownum_ <= ? and rownum_ > ?" );
|
pagingSelect.append( ") row_) where rownum_<=? and rownum_>?" );
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
pagingSelect.append( ") row_ where rownum <= ?) where rownum_ > ?" );
|
pagingSelect.append( ") row_ where rownum<=?) where rownum_>?" );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
pagingSelect.append( "select * from (" ).append( sql ).append( ") where rownum <= ?" );
|
pagingSelect.append( "select * from (" ).append( sql ).append( ") where rownum<=?" );
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( forUpdateClause != null ) {
|
if ( forUpdateClause != null ) {
|
||||||
|
|
|
@ -25,7 +25,7 @@ public class LimitLimitHandler extends AbstractSimpleLimitHandler {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected String limitClause(boolean hasFirstRow) {
|
protected String limitClause(boolean hasFirstRow) {
|
||||||
return hasFirstRow ? " limit ?, ?" : " limit ?";
|
return hasFirstRow ? " limit ?,?" : " limit ?";
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final Pattern FOR_UPDATE_PATTERN =
|
private static final Pattern FOR_UPDATE_PATTERN =
|
||||||
|
|
|
@ -146,15 +146,15 @@ public class Oracle12LimitHandler extends AbstractLimitHandler {
|
||||||
|
|
||||||
if ( hasFirstRow ) {
|
if ( hasFirstRow ) {
|
||||||
pagingSelect = new StringBuilder( sql.length() + forUpdateClauseLength + 98 );
|
pagingSelect = new StringBuilder( sql.length() + forUpdateClauseLength + 98 );
|
||||||
pagingSelect.append( "select * from ( select row_.*, rownum rownum_ from ( " );
|
pagingSelect.append( "select * from (select row_.*,rownum rownum_ from (" );
|
||||||
pagingSelect.append( sql );
|
pagingSelect.append( sql );
|
||||||
pagingSelect.append( " ) row_ where rownum <= ?) where rownum_ > ?" );
|
pagingSelect.append( ") row_ where rownum<=?) where rownum_>?" );
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
pagingSelect = new StringBuilder( sql.length() + forUpdateClauseLength + 37 );
|
pagingSelect = new StringBuilder( sql.length() + forUpdateClauseLength + 37 );
|
||||||
pagingSelect.append( "select * from ( " );
|
pagingSelect.append( "select * from (" );
|
||||||
pagingSelect.append( sql );
|
pagingSelect.append( sql );
|
||||||
pagingSelect.append( " ) where rownum <= ?" );
|
pagingSelect.append( ") where rownum<=?" );
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( isForUpdate ) {
|
if ( isForUpdate ) {
|
||||||
|
|
|
@ -108,9 +108,9 @@ public class SQLServer2005LimitHandler extends AbstractLimitHandler {
|
||||||
|
|
||||||
String aliases = selectAliases( sql, afterSelectOffset, fromOffset, result ); //warning: changes result by side-effect
|
String aliases = selectAliases( sql, afterSelectOffset, fromOffset, result ); //warning: changes result by side-effect
|
||||||
result.insert( selectOffset, ( hasCommonTables ? "," : "with" )
|
result.insert( selectOffset, ( hasCommonTables ? "," : "with" )
|
||||||
+ " query_ as (select row_.*, row_number() over (order by current_timestamp) as rownumber_ from (" )
|
+ " query_ as (select row_.*,row_number() over (order by current_timestamp) as rownumber_ from (" )
|
||||||
.append( ") row_) select " ).append( aliases )
|
.append( ") row_) select " ).append( aliases )
|
||||||
.append( " from query_ where rownumber_ >= ? and rownumber_ < ?" );
|
.append( " from query_ where rownumber_>=? and rownumber_<?" );
|
||||||
}
|
}
|
||||||
|
|
||||||
return result.toString();
|
return result.toString();
|
||||||
|
@ -186,9 +186,9 @@ public class SQLServer2005LimitHandler extends AbstractLimitHandler {
|
||||||
|
|
||||||
String aliases = selectAliases( sql, afterSelectOffset, fromOffset, result ); //warning: changes result by side-effect
|
String aliases = selectAliases( sql, afterSelectOffset, fromOffset, result ); //warning: changes result by side-effect
|
||||||
result.insert( selectOffset, ( hasCommonTables ? "," : "with" )
|
result.insert( selectOffset, ( hasCommonTables ? "," : "with" )
|
||||||
+ " query_ as (select row_.*, row_number() over (order by current_timestamp) as rownumber_ from (" )
|
+ " query_ as (select row_.*,row_number() over (order by current_timestamp) as rownumber_ from (" )
|
||||||
.append( ") row_) select " ).append( aliases )
|
.append( ") row_) select " ).append( aliases )
|
||||||
.append( " from query_ where rownumber_ >= ? and rownumber_ < ?" );
|
.append( " from query_ where rownumber_>=? and rownumber_<?" );
|
||||||
}
|
}
|
||||||
|
|
||||||
return result.toString();
|
return result.toString();
|
||||||
|
@ -272,7 +272,7 @@ public class SQLServer2005LimitHandler extends AbstractLimitHandler {
|
||||||
}
|
}
|
||||||
while ( offset < fromOffset );
|
while ( offset < fromOffset );
|
||||||
|
|
||||||
return String.join( ", ", aliases );
|
return String.join( ",", aliases );
|
||||||
}
|
}
|
||||||
|
|
||||||
private int getAliasIndex(String sql) {
|
private int getAliasIndex(String sql) {
|
||||||
|
|
|
@ -19,6 +19,6 @@ public final class DerbySequenceSupport extends DB2SequenceSupport {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getSelectSequencePreviousValString(String sequenceName) throws MappingException {
|
public String getSelectSequencePreviousValString(String sequenceName) throws MappingException {
|
||||||
return "SYSCS_UTIL.SYSCS_PEEK_AT_SEQUENCE('HIBERNATE_ORM_TEST', '" + sequenceName.toUpperCase() + "')";
|
return "SYSCS_UTIL.SYSCS_PEEK_AT_SEQUENCE('HIBERNATE_ORM_TEST','" + sequenceName.toUpperCase() + "')";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,6 +19,6 @@ public final class SQLServerSequenceSupport extends ANSISequenceSupport {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getSequencePreviousValString(String sequenceName) throws MappingException {
|
public String getSequencePreviousValString(String sequenceName) throws MappingException {
|
||||||
return "select convert(varchar(200), current_value) from sys.sequences where name = '" + sequenceName + "'";
|
return "select convert(varchar(200),current_value) from sys.sequences where name='" + sequenceName + "'";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,6 +27,19 @@ public interface CollectionPart extends ModelPart, Fetchable {
|
||||||
return name;
|
return name;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static Nature fromNameExact(String name) {
|
||||||
|
switch ( name ) {
|
||||||
|
case "{element}":
|
||||||
|
return ELEMENT;
|
||||||
|
case "{index}":
|
||||||
|
return INDEX;
|
||||||
|
case "{collection-id}":
|
||||||
|
return ID;
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
public static Nature fromName(String name) {
|
public static Nature fromName(String name) {
|
||||||
// NOTE : the `$x$` form comes form order-by handling
|
// NOTE : the `$x$` form comes form order-by handling
|
||||||
// todo (6.0) : ^^ convert these to use the `{x}` form instead?
|
// todo (6.0) : ^^ convert these to use the `{x}` form instead?
|
||||||
|
|
|
@ -52,10 +52,28 @@ public interface ForeignKeyDescriptor extends VirtualModelPart {
|
||||||
|
|
||||||
ModelPart getTargetPart();
|
ModelPart getTargetPart();
|
||||||
|
|
||||||
|
default ModelPart getPart(Nature nature) {
|
||||||
|
if ( nature == Nature.KEY ) {
|
||||||
|
return getKeyPart();
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return getTargetPart();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Side getKeySide();
|
Side getKeySide();
|
||||||
|
|
||||||
Side getTargetSide();
|
Side getTargetSide();
|
||||||
|
|
||||||
|
default Side getSide(Nature nature) {
|
||||||
|
if ( nature == Nature.KEY ) {
|
||||||
|
return getKeySide();
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return getTargetSide();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a DomainResult for the referring-side of the fk
|
* Create a DomainResult for the referring-side of the fk
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -241,7 +241,7 @@ public class ToOneAttributeMapping
|
||||||
}
|
}
|
||||||
|
|
||||||
this.navigableRole = navigableRole;
|
this.navigableRole = navigableRole;
|
||||||
final CollectionPart.Nature nature = CollectionPart.Nature.fromName(
|
final CollectionPart.Nature nature = CollectionPart.Nature.fromNameExact(
|
||||||
getNavigableRole().getParent().getLocalName()
|
getNavigableRole().getParent().getLocalName()
|
||||||
);
|
);
|
||||||
if ( nature == null ) {
|
if ( nature == null ) {
|
||||||
|
|
|
@ -6,6 +6,7 @@
|
||||||
*/
|
*/
|
||||||
package org.hibernate.metamodel.model.domain;
|
package org.hibernate.metamodel.model.domain;
|
||||||
|
|
||||||
|
import java.io.ObjectStreamException;
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
@ -649,6 +650,28 @@ public abstract class AbstractManagedType<J>
|
||||||
return DomainModelHelper.resolveSubType( this, subType, jpaMetamodel() );
|
return DomainModelHelper.resolveSubType( this, subType, jpaMetamodel() );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
// Serialization
|
||||||
|
|
||||||
|
protected Object writeReplace() throws ObjectStreamException {
|
||||||
|
return new SerialForm( jpaMetamodel(), getJavaType() );
|
||||||
|
}
|
||||||
|
|
||||||
|
private static class SerialForm implements Serializable {
|
||||||
|
private final JpaMetamodel jpaMetamodel;
|
||||||
|
private final Class<?> typeClass;
|
||||||
|
|
||||||
|
public SerialForm(JpaMetamodel jpaMetamodel, Class<?> typeClass) {
|
||||||
|
this.jpaMetamodel = jpaMetamodel;
|
||||||
|
this.typeClass = typeClass;
|
||||||
|
}
|
||||||
|
|
||||||
|
private Object readResolve() {
|
||||||
|
return jpaMetamodel.managedType( typeClass );
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
// Creation
|
// Creation
|
||||||
|
|
|
@ -17,7 +17,7 @@ import org.hibernate.query.sqm.SqmPathSource;
|
||||||
* @author Steve Ebersole
|
* @author Steve Ebersole
|
||||||
*/
|
*/
|
||||||
public interface MapPersistentAttribute<D,K,V> extends MapAttribute<D, K, V>, PluralPersistentAttribute<D,Map<K,V>,V> {
|
public interface MapPersistentAttribute<D,K,V> extends MapAttribute<D, K, V>, PluralPersistentAttribute<D,Map<K,V>,V> {
|
||||||
SqmPathSource getKeyPathSource();
|
SqmPathSource<K> getKeyPathSource();
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
SimpleDomainType<K> getKeyType();
|
SimpleDomainType<K> getKeyType();
|
||||||
|
|
|
@ -9,11 +9,13 @@ package org.hibernate.metamodel.model.domain.internal;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.ObjectInputStream;
|
import java.io.ObjectInputStream;
|
||||||
import java.io.ObjectOutputStream;
|
import java.io.ObjectOutputStream;
|
||||||
|
import java.io.ObjectStreamException;
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
import java.lang.reflect.Member;
|
import java.lang.reflect.Member;
|
||||||
import java.lang.reflect.Method;
|
import java.lang.reflect.Method;
|
||||||
import javax.persistence.metamodel.Attribute;
|
import javax.persistence.metamodel.Attribute;
|
||||||
|
|
||||||
|
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||||
import org.hibernate.internal.util.ReflectHelper;
|
import org.hibernate.internal.util.ReflectHelper;
|
||||||
import org.hibernate.metamodel.AttributeClassification;
|
import org.hibernate.metamodel.AttributeClassification;
|
||||||
import org.hibernate.metamodel.internal.MetadataContext;
|
import org.hibernate.metamodel.internal.MetadataContext;
|
||||||
|
@ -106,49 +108,25 @@ public abstract class AbstractAttribute<D,J,B> implements PersistentAttribute<D,
|
||||||
return declaringType.getTypeName() + '#' + name + '(' + attributeClassification + ')';
|
return declaringType.getTypeName() + '#' + name + '(' + attributeClassification + ')';
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
* Used by JDK serialization...
|
// Serialization
|
||||||
*
|
|
||||||
* @param ois The input stream from which we are being read...
|
|
||||||
* @throws java.io.IOException Indicates a general IO stream exception
|
|
||||||
* @throws ClassNotFoundException Indicates a class resolution issue
|
|
||||||
*/
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
protected void readObject(ObjectInputStream ois) throws IOException, ClassNotFoundException {
|
|
||||||
ois.defaultReadObject();
|
|
||||||
final String memberDeclaringClassName = ( String ) ois.readObject();
|
|
||||||
final String memberName = ( String ) ois.readObject();
|
|
||||||
final String memberType = ( String ) ois.readObject();
|
|
||||||
|
|
||||||
final Class memberDeclaringClass = Class.forName(
|
protected Object writeReplace() throws ObjectStreamException {
|
||||||
memberDeclaringClassName,
|
return new SerialForm( declaringType, name );
|
||||||
false,
|
}
|
||||||
declaringType.getJavaType().getClassLoader()
|
|
||||||
);
|
private static class SerialForm implements Serializable {
|
||||||
try {
|
private final ManagedDomainType<?> declaringType;
|
||||||
this.member = "method".equals( memberType )
|
private final String name;
|
||||||
? memberDeclaringClass.getMethod( memberName, ReflectHelper.NO_PARAM_SIGNATURE )
|
|
||||||
: memberDeclaringClass.getField( memberName );
|
public SerialForm(ManagedDomainType<?> declaringType, String name) {
|
||||||
}
|
this.declaringType = declaringType;
|
||||||
catch ( Exception e ) {
|
this.name = name;
|
||||||
throw new IllegalStateException(
|
}
|
||||||
"Unable to locate member [" + memberDeclaringClassName + "#"
|
|
||||||
+ memberName + "]"
|
private Object readResolve() {
|
||||||
);
|
return declaringType.findAttribute( name );
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Used by JDK serialization...
|
|
||||||
*
|
|
||||||
* @param oos The output stream to which we are being written...
|
|
||||||
* @throws IOException Indicates a general IO stream exception
|
|
||||||
*/
|
|
||||||
protected void writeObject(ObjectOutputStream oos) throws IOException {
|
|
||||||
oos.defaultWriteObject();
|
|
||||||
oos.writeObject( getJavaMember().getDeclaringClass().getName() );
|
|
||||||
oos.writeObject( getJavaMember().getName() );
|
|
||||||
// should only ever be a field or the getter-method...
|
|
||||||
oos.writeObject( Method.class.isInstance( getJavaMember() ) ? "method" : "field" );
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -76,6 +76,9 @@ public abstract class AbstractPluralAttribute<D, C, E>
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SqmPathSource<?> findSubPathSource(String name) {
|
public SqmPathSource<?> findSubPathSource(String name) {
|
||||||
|
if ( CollectionPart.Nature.ELEMENT.getName().equals( name ) ) {
|
||||||
|
return elementPathSource;
|
||||||
|
}
|
||||||
return elementPathSource.findSubPathSource( name );
|
return elementPathSource.findSubPathSource( name );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,6 @@ import org.hibernate.metamodel.model.domain.AllowableFunctionReturnType;
|
||||||
import org.hibernate.metamodel.model.domain.AllowableParameterType;
|
import org.hibernate.metamodel.model.domain.AllowableParameterType;
|
||||||
import org.hibernate.metamodel.model.domain.BasicDomainType;
|
import org.hibernate.metamodel.model.domain.BasicDomainType;
|
||||||
import org.hibernate.query.NavigablePath;
|
import org.hibernate.query.NavigablePath;
|
||||||
import org.hibernate.query.sqm.IllegalPathUsageException;
|
|
||||||
import org.hibernate.query.sqm.SqmPathSource;
|
import org.hibernate.query.sqm.SqmPathSource;
|
||||||
import org.hibernate.query.sqm.tree.domain.SqmBasicValuedSimplePath;
|
import org.hibernate.query.sqm.tree.domain.SqmBasicValuedSimplePath;
|
||||||
import org.hibernate.query.sqm.tree.domain.SqmPath;
|
import org.hibernate.query.sqm.tree.domain.SqmPath;
|
||||||
|
@ -37,7 +36,7 @@ public class BasicSqmPathSource<J>
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SqmPathSource<?> findSubPathSource(String name) {
|
public SqmPathSource<?> findSubPathSource(String name) {
|
||||||
throw new IllegalPathUsageException( "Basic paths cannot be dereferenced" );
|
throw new IllegalStateException( "Basic paths cannot be dereferenced" );
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -9,11 +9,12 @@ package org.hibernate.metamodel.model.domain.internal;
|
||||||
import org.hibernate.metamodel.mapping.EntityDiscriminatorMapping;
|
import org.hibernate.metamodel.mapping.EntityDiscriminatorMapping;
|
||||||
import org.hibernate.metamodel.mapping.EntityMappingType;
|
import org.hibernate.metamodel.mapping.EntityMappingType;
|
||||||
import org.hibernate.metamodel.mapping.EntityValuedModelPart;
|
import org.hibernate.metamodel.mapping.EntityValuedModelPart;
|
||||||
|
import org.hibernate.metamodel.mapping.ModelPartContainer;
|
||||||
|
import org.hibernate.metamodel.mapping.PluralAttributeMapping;
|
||||||
import org.hibernate.metamodel.model.domain.EntityDomainType;
|
import org.hibernate.metamodel.model.domain.EntityDomainType;
|
||||||
import org.hibernate.query.PathException;
|
import org.hibernate.query.PathException;
|
||||||
import org.hibernate.query.hql.spi.SemanticPathPart;
|
import org.hibernate.query.hql.spi.SemanticPathPart;
|
||||||
import org.hibernate.query.hql.spi.SqmCreationState;
|
import org.hibernate.query.hql.spi.SqmCreationState;
|
||||||
import org.hibernate.query.sqm.IllegalPathUsageException;
|
|
||||||
import org.hibernate.query.sqm.NodeBuilder;
|
import org.hibernate.query.sqm.NodeBuilder;
|
||||||
import org.hibernate.query.sqm.SemanticQueryWalker;
|
import org.hibernate.query.sqm.SemanticQueryWalker;
|
||||||
import org.hibernate.query.sqm.SqmPathSource;
|
import org.hibernate.query.sqm.SqmPathSource;
|
||||||
|
@ -65,7 +66,14 @@ public class DiscriminatorSqmPath extends AbstractSqmPath implements SelfInterpr
|
||||||
assert entityDescriptor.hasSubclasses();
|
assert entityDescriptor.hasSubclasses();
|
||||||
|
|
||||||
final TableGroup tableGroup = sqlAstCreationState.getFromClauseAccess().getTableGroup( getLhs().getNavigablePath() );
|
final TableGroup tableGroup = sqlAstCreationState.getFromClauseAccess().getTableGroup( getLhs().getNavigablePath() );
|
||||||
final EntityMappingType entityMapping = ( (EntityValuedModelPart) tableGroup.getModelPart() ).getEntityMappingType();
|
final ModelPartContainer modelPart = tableGroup.getModelPart();
|
||||||
|
final EntityMappingType entityMapping;
|
||||||
|
if ( modelPart instanceof EntityValuedModelPart ) {
|
||||||
|
entityMapping = ( (EntityValuedModelPart) modelPart ).getEntityMappingType();
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
entityMapping = (EntityMappingType) ( (PluralAttributeMapping) modelPart ).getElementDescriptor().getPartMappingType();
|
||||||
|
}
|
||||||
|
|
||||||
return new DiscriminatorPathInterpretation( getNavigablePath(), entityMapping, tableGroup, sqlAstCreationState );
|
return new DiscriminatorPathInterpretation( getNavigablePath(), entityMapping, tableGroup, sqlAstCreationState );
|
||||||
}
|
}
|
||||||
|
@ -79,7 +87,7 @@ public class DiscriminatorSqmPath extends AbstractSqmPath implements SelfInterpr
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SemanticPathPart resolvePathPart(String name, boolean isTerminal, SqmCreationState creationState) {
|
public SemanticPathPart resolvePathPart(String name, boolean isTerminal, SqmCreationState creationState) {
|
||||||
throw new IllegalPathUsageException( "Discriminator cannot be de-referenced" );
|
throw new IllegalStateException( "Discriminator cannot be de-referenced" );
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -10,7 +10,6 @@ import org.hibernate.metamodel.mapping.EntityDiscriminatorMapping;
|
||||||
import org.hibernate.metamodel.mapping.EntityMappingType;
|
import org.hibernate.metamodel.mapping.EntityMappingType;
|
||||||
import org.hibernate.metamodel.model.domain.DomainType;
|
import org.hibernate.metamodel.model.domain.DomainType;
|
||||||
import org.hibernate.metamodel.model.domain.EntityDomainType;
|
import org.hibernate.metamodel.model.domain.EntityDomainType;
|
||||||
import org.hibernate.query.sqm.IllegalPathUsageException;
|
|
||||||
import org.hibernate.query.sqm.SqmPathSource;
|
import org.hibernate.query.sqm.SqmPathSource;
|
||||||
import org.hibernate.query.sqm.tree.domain.SqmPath;
|
import org.hibernate.query.sqm.tree.domain.SqmPath;
|
||||||
|
|
||||||
|
@ -38,7 +37,7 @@ public class DiscriminatorSqmPathSource<D> extends AbstractSqmPathSource<D> {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SqmPathSource<?> findSubPathSource(String name) throws IllegalPathUsageException {
|
public SqmPathSource<?> findSubPathSource(String name) {
|
||||||
throw new IllegalPathUsageException( "Entity discriminator cannot be de-referenced" );
|
throw new IllegalStateException( "Entity discriminator cannot be de-referenced" );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,6 +6,7 @@
|
||||||
*/
|
*/
|
||||||
package org.hibernate.metamodel.model.domain.internal;
|
package org.hibernate.metamodel.model.domain.internal;
|
||||||
|
|
||||||
|
import java.io.ObjectStreamException;
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
import javax.persistence.metamodel.EntityType;
|
import javax.persistence.metamodel.EntityType;
|
||||||
|
|
||||||
|
@ -13,6 +14,7 @@ import org.hibernate.graph.internal.SubGraphImpl;
|
||||||
import org.hibernate.graph.spi.SubGraphImplementor;
|
import org.hibernate.graph.spi.SubGraphImplementor;
|
||||||
import org.hibernate.mapping.PersistentClass;
|
import org.hibernate.mapping.PersistentClass;
|
||||||
import org.hibernate.metamodel.mapping.EntityDiscriminatorMapping;
|
import org.hibernate.metamodel.mapping.EntityDiscriminatorMapping;
|
||||||
|
import org.hibernate.metamodel.mapping.EntityIdentifierMapping;
|
||||||
import org.hibernate.metamodel.model.domain.AbstractIdentifiableType;
|
import org.hibernate.metamodel.model.domain.AbstractIdentifiableType;
|
||||||
import org.hibernate.metamodel.model.domain.DomainType;
|
import org.hibernate.metamodel.model.domain.DomainType;
|
||||||
import org.hibernate.metamodel.model.domain.EntityDomainType;
|
import org.hibernate.metamodel.model.domain.EntityDomainType;
|
||||||
|
@ -123,7 +125,7 @@ public class EntityTypeImpl<J>
|
||||||
return attribute;
|
return attribute;
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( "id".equalsIgnoreCase( name ) ) {
|
if ( "id".equalsIgnoreCase( name ) || EntityIdentifierMapping.ROLE_LOCAL_NAME.equals( name ) ) {
|
||||||
//noinspection unchecked
|
//noinspection unchecked
|
||||||
final SingularPersistentAttribute<J, ?> idAttribute = findIdAttribute();
|
final SingularPersistentAttribute<J, ?> idAttribute = findIdAttribute();
|
||||||
//noinspection RedundantIfStatement
|
//noinspection RedundantIfStatement
|
||||||
|
@ -188,4 +190,26 @@ public class EntityTypeImpl<J>
|
||||||
"EntityType cannot be used to create an SqmPath - that would be an SqmFrom which are created directly"
|
"EntityType cannot be used to create an SqmPath - that would be an SqmFrom which are created directly"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
// Serialization
|
||||||
|
|
||||||
|
protected Object writeReplace() throws ObjectStreamException {
|
||||||
|
return new SerialForm( jpaMetamodel(), getHibernateEntityName() );
|
||||||
|
}
|
||||||
|
|
||||||
|
private static class SerialForm implements Serializable {
|
||||||
|
private final JpaMetamodel jpaMetamodel;
|
||||||
|
private final String hibernateEntityName;
|
||||||
|
|
||||||
|
public SerialForm(JpaMetamodel jpaMetamodel, String hibernateEntityName) {
|
||||||
|
this.jpaMetamodel = jpaMetamodel;
|
||||||
|
this.hibernateEntityName = hibernateEntityName;
|
||||||
|
}
|
||||||
|
|
||||||
|
private Object readResolve() {
|
||||||
|
return jpaMetamodel.entity( hibernateEntityName );
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,6 +6,8 @@
|
||||||
*/
|
*/
|
||||||
package org.hibernate.metamodel.model.domain.internal;
|
package org.hibernate.metamodel.model.domain.internal;
|
||||||
|
|
||||||
|
import java.io.ObjectStreamException;
|
||||||
|
import java.io.Serializable;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
|
@ -24,10 +26,12 @@ import javax.persistence.metamodel.EmbeddableType;
|
||||||
import javax.persistence.metamodel.EntityType;
|
import javax.persistence.metamodel.EntityType;
|
||||||
import javax.persistence.metamodel.ManagedType;
|
import javax.persistence.metamodel.ManagedType;
|
||||||
|
|
||||||
|
import org.hibernate.SessionFactory;
|
||||||
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
|
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
|
||||||
import org.hibernate.boot.registry.classloading.spi.ClassLoadingException;
|
import org.hibernate.boot.registry.classloading.spi.ClassLoadingException;
|
||||||
import org.hibernate.boot.spi.MetadataImplementor;
|
import org.hibernate.boot.spi.MetadataImplementor;
|
||||||
import org.hibernate.cfg.annotations.NamedEntityGraphDefinition;
|
import org.hibernate.cfg.annotations.NamedEntityGraphDefinition;
|
||||||
|
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||||
import org.hibernate.graph.internal.RootGraphImpl;
|
import org.hibernate.graph.internal.RootGraphImpl;
|
||||||
import org.hibernate.graph.spi.AttributeNodeImplementor;
|
import org.hibernate.graph.spi.AttributeNodeImplementor;
|
||||||
import org.hibernate.graph.spi.GraphImplementor;
|
import org.hibernate.graph.spi.GraphImplementor;
|
||||||
|
@ -58,7 +62,7 @@ import org.hibernate.type.spi.TypeConfiguration;
|
||||||
/**
|
/**
|
||||||
* @author Steve Ebersole
|
* @author Steve Ebersole
|
||||||
*/
|
*/
|
||||||
public class JpaMetamodelImpl implements JpaMetamodel {
|
public class JpaMetamodelImpl implements JpaMetamodel, Serializable {
|
||||||
private static final EntityManagerMessageLogger log = HEMLogging.messageLogger( JpaMetamodel.class );
|
private static final EntityManagerMessageLogger log = HEMLogging.messageLogger( JpaMetamodel.class );
|
||||||
private static final ImportInfo<?> INVALID_IMPORT = new ImportInfo<>( null, null );
|
private static final ImportInfo<?> INVALID_IMPORT = new ImportInfo<>( null, null );
|
||||||
|
|
||||||
|
@ -655,4 +659,24 @@ public class JpaMetamodelImpl implements JpaMetamodel {
|
||||||
context.registerMappedSuperclassType( mappedSuperclass, mappedSuperclassType );
|
context.registerMappedSuperclassType( mappedSuperclass, mappedSuperclassType );
|
||||||
return mappedSuperclassType;
|
return mappedSuperclassType;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
// Serialization
|
||||||
|
|
||||||
|
private Object writeReplace() throws ObjectStreamException {
|
||||||
|
return new SerialForm( typeConfiguration.getSessionFactory() );
|
||||||
|
}
|
||||||
|
|
||||||
|
private static class SerialForm implements Serializable {
|
||||||
|
private final SessionFactoryImplementor sessionFactory;
|
||||||
|
|
||||||
|
public SerialForm(SessionFactoryImplementor sessionFactory) {
|
||||||
|
this.sessionFactory = sessionFactory;
|
||||||
|
}
|
||||||
|
|
||||||
|
private Object readResolve() {
|
||||||
|
return sessionFactory.getJpaMetamodel();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,6 +9,7 @@ package org.hibernate.metamodel.model.domain.internal;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.hibernate.metamodel.internal.MetadataContext;
|
import org.hibernate.metamodel.internal.MetadataContext;
|
||||||
|
import org.hibernate.metamodel.mapping.CollectionPart;
|
||||||
import org.hibernate.metamodel.model.domain.ListPersistentAttribute;
|
import org.hibernate.metamodel.model.domain.ListPersistentAttribute;
|
||||||
import org.hibernate.query.sqm.SqmPathSource;
|
import org.hibernate.query.sqm.SqmPathSource;
|
||||||
import org.hibernate.query.hql.spi.SqmCreationState;
|
import org.hibernate.query.hql.spi.SqmCreationState;
|
||||||
|
@ -44,6 +45,20 @@ class ListAttributeImpl<X, E> extends AbstractPluralAttribute<X, List<E>, E> imp
|
||||||
return indexPathSource;
|
return indexPathSource;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public SqmPathSource<?> findSubPathSource(String name) {
|
||||||
|
final CollectionPart.Nature nature = CollectionPart.Nature.fromNameExact( name );
|
||||||
|
if ( nature != null ) {
|
||||||
|
switch ( nature ) {
|
||||||
|
case INDEX:
|
||||||
|
return indexPathSource;
|
||||||
|
case ELEMENT:
|
||||||
|
return getElementPathSource();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return getElementPathSource().findSubPathSource( name );
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SqmAttributeJoin createSqmJoin(
|
public SqmAttributeJoin createSqmJoin(
|
||||||
SqmFrom lhs,
|
SqmFrom lhs,
|
||||||
|
|
|
@ -55,6 +55,20 @@ class MapAttributeImpl<X, K, V> extends AbstractPluralAttribute<X, Map<K, V>, V>
|
||||||
return getKeyPathSource();
|
return getKeyPathSource();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public SqmPathSource<?> findSubPathSource(String name) {
|
||||||
|
final CollectionPart.Nature nature = CollectionPart.Nature.fromNameExact( name );
|
||||||
|
if ( nature != null ) {
|
||||||
|
switch ( nature ) {
|
||||||
|
case INDEX:
|
||||||
|
return keyPathSource;
|
||||||
|
case ELEMENT:
|
||||||
|
return getElementPathSource();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return getElementPathSource().findSubPathSource( name );
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SimpleDomainType<K> getKeyType() {
|
public SimpleDomainType<K> getKeyType() {
|
||||||
return (SimpleDomainType<K>) keyPathSource.getSqmPathType();
|
return (SimpleDomainType<K>) keyPathSource.getSqmPathType();
|
||||||
|
|
|
@ -7,7 +7,6 @@
|
||||||
package org.hibernate.metamodel.model.domain.internal;
|
package org.hibernate.metamodel.model.domain.internal;
|
||||||
|
|
||||||
import org.hibernate.metamodel.model.domain.ManagedDomainType;
|
import org.hibernate.metamodel.model.domain.ManagedDomainType;
|
||||||
import org.hibernate.query.sqm.IllegalPathUsageException;
|
|
||||||
import org.hibernate.query.sqm.SqmPathSource;
|
import org.hibernate.query.sqm.SqmPathSource;
|
||||||
import org.hibernate.query.sqm.tree.domain.NonAggregatedCompositeSimplePath;
|
import org.hibernate.query.sqm.tree.domain.NonAggregatedCompositeSimplePath;
|
||||||
import org.hibernate.query.sqm.tree.domain.SqmPath;
|
import org.hibernate.query.sqm.tree.domain.SqmPath;
|
||||||
|
@ -31,7 +30,7 @@ public class NonAggregatedCompositeSqmPathSource extends AbstractSqmPathSource i
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SqmPathSource<?> findSubPathSource(String name) throws IllegalPathUsageException {
|
public SqmPathSource<?> findSubPathSource(String name) {
|
||||||
return (SqmPathSource<?>) getSqmPathType().findAttribute( name );
|
return (SqmPathSource<?>) getSqmPathType().findAttribute( name );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -221,7 +221,7 @@ public class SingularAttributeImpl<D,J>
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SqmPath<J> createSqmPath(SqmPath lhs) {
|
public SqmPath<J> createSqmPath(SqmPath<?> lhs) {
|
||||||
return sqmPathSource.createSqmPath( lhs );
|
return sqmPathSource.createSqmPath( lhs );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1289,30 +1289,6 @@ public abstract class AbstractEntityPersister
|
||||||
tableGroup,
|
tableGroup,
|
||||||
creationState
|
creationState
|
||||||
);
|
);
|
||||||
if ( discriminatorMapping != null ) {
|
|
||||||
discriminatorMapping.applySqlSelections(
|
|
||||||
navigablePath.append( discriminatorMapping.getPartName() ),
|
|
||||||
tableGroup,
|
|
||||||
creationState
|
|
||||||
);
|
|
||||||
}
|
|
||||||
if ( versionMapping != null ) {
|
|
||||||
versionMapping.applySqlSelections(
|
|
||||||
navigablePath.append( versionMapping.getPartName() ),
|
|
||||||
tableGroup,
|
|
||||||
creationState
|
|
||||||
);
|
|
||||||
}
|
|
||||||
for ( int i = 0; i < attributeMappings.size(); i++ ) {
|
|
||||||
final AttributeMapping attributeMapping = attributeMappings.get( i );
|
|
||||||
if ( attributeMapping instanceof SingularAttributeMapping ) {
|
|
||||||
attributeMapping.applySqlSelections(
|
|
||||||
navigablePath.append( attributeMapping.getPartName() ),
|
|
||||||
tableGroup,
|
|
||||||
creationState
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -1327,33 +1303,6 @@ public abstract class AbstractEntityPersister
|
||||||
creationState,
|
creationState,
|
||||||
selectionConsumer
|
selectionConsumer
|
||||||
);
|
);
|
||||||
if ( discriminatorMapping != null ) {
|
|
||||||
discriminatorMapping.applySqlSelections(
|
|
||||||
navigablePath.append( discriminatorMapping.getPartName() ),
|
|
||||||
tableGroup,
|
|
||||||
creationState,
|
|
||||||
selectionConsumer
|
|
||||||
);
|
|
||||||
}
|
|
||||||
if ( versionMapping != null ) {
|
|
||||||
versionMapping.applySqlSelections(
|
|
||||||
navigablePath.append( versionMapping.getPartName() ),
|
|
||||||
tableGroup,
|
|
||||||
creationState,
|
|
||||||
selectionConsumer
|
|
||||||
);
|
|
||||||
}
|
|
||||||
for ( int i = 0; i < attributeMappings.size(); i++ ) {
|
|
||||||
final AttributeMapping attributeMapping = attributeMappings.get( i );
|
|
||||||
if ( attributeMapping instanceof SingularAttributeMapping ) {
|
|
||||||
attributeMapping.applySqlSelections(
|
|
||||||
navigablePath.append( attributeMapping.getPartName() ),
|
|
||||||
tableGroup,
|
|
||||||
creationState,
|
|
||||||
selectionConsumer
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -87,7 +87,7 @@ public enum ComparisonOperator {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String sqlText() {
|
public String sqlText() {
|
||||||
return "is not distinct from";
|
return " is not distinct from ";
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
|
@ -114,7 +114,7 @@ public enum ComparisonOperator {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String sqlText() {
|
public String sqlText() {
|
||||||
return "is distinct from";
|
return " is distinct from ";
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
|
|
|
@ -6,6 +6,8 @@
|
||||||
*/
|
*/
|
||||||
package org.hibernate.query;
|
package org.hibernate.query;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
import org.hibernate.internal.util.StringHelper;
|
import org.hibernate.internal.util.StringHelper;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -14,7 +16,7 @@ import org.hibernate.internal.util.StringHelper;
|
||||||
*
|
*
|
||||||
* @author Steve Ebersole
|
* @author Steve Ebersole
|
||||||
*/
|
*/
|
||||||
public class NavigablePath implements DotIdentifierSequence {
|
public class NavigablePath implements DotIdentifierSequence, Serializable {
|
||||||
public static final String IDENTIFIER_MAPPER_PROPERTY = "_identifierMapper";
|
public static final String IDENTIFIER_MAPPER_PROPERTY = "_identifierMapper";
|
||||||
|
|
||||||
private final NavigablePath parent;
|
private final NavigablePath parent;
|
||||||
|
|
|
@ -11,6 +11,7 @@ import java.util.function.Consumer;
|
||||||
import javax.persistence.Parameter;
|
import javax.persistence.Parameter;
|
||||||
|
|
||||||
import org.hibernate.Incubating;
|
import org.hibernate.Incubating;
|
||||||
|
import org.hibernate.metamodel.model.domain.AllowableParameterType;
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -57,6 +58,10 @@ public interface ParameterMetadata {
|
||||||
*/
|
*/
|
||||||
QueryParameter<?> resolve(Parameter param);
|
QueryParameter<?> resolve(Parameter param);
|
||||||
|
|
||||||
|
default <T> AllowableParameterType<T> getInferredParameterType(QueryParameter<T> parameter) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Is this parameter reference registered in this collection?
|
* Is this parameter reference registered in this collection?
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -145,6 +145,9 @@ public interface HibernateCriteriaBuilder extends CriteriaBuilder {
|
||||||
JpaCompoundSelection<Object[]> array(Selection<?>[] selections);
|
JpaCompoundSelection<Object[]> array(Selection<?>[] selections);
|
||||||
JpaCompoundSelection<Object[]> array(List<? extends JpaSelection<?>> selections);
|
JpaCompoundSelection<Object[]> array(List<? extends JpaSelection<?>> selections);
|
||||||
|
|
||||||
|
<Y> JpaCompoundSelection<Y> array(Class<Y> resultClass, Selection<?>[] selections);
|
||||||
|
<Y> JpaCompoundSelection<Y> array(Class<Y> resultClass, List<? extends JpaSelection<?>> selections);
|
||||||
|
|
||||||
|
|
||||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
// Expressions
|
// Expressions
|
||||||
|
@ -272,6 +275,8 @@ public interface HibernateCriteriaBuilder extends CriteriaBuilder {
|
||||||
@Override
|
@Override
|
||||||
<T> JpaParameterExpression<T> parameter(Class<T> paramClass, String name);
|
<T> JpaParameterExpression<T> parameter(Class<T> paramClass, String name);
|
||||||
|
|
||||||
|
<T> JpaParameterExpression<T> parameter(Class<T> paramClass, T value);
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
JpaExpression<String> concat(Expression<String> x, Expression<String> y);
|
JpaExpression<String> concat(Expression<String> x, Expression<String> y);
|
||||||
|
|
||||||
|
@ -423,13 +428,12 @@ public interface HibernateCriteriaBuilder extends CriteriaBuilder {
|
||||||
* @apiNote This is different from the purely JPA form
|
* @apiNote This is different from the purely JPA form
|
||||||
* {@link CriteriaBuilder#tuple} which is intended only for use as
|
* {@link CriteriaBuilder#tuple} which is intended only for use as
|
||||||
* the selection in a root query.
|
* the selection in a root query.
|
||||||
*
|
*@param tupleType The Java type
|
||||||
* @param tupleType The Java type
|
|
||||||
* @param expressions The individual expressions making up the tuple
|
* @param expressions The individual expressions making up the tuple
|
||||||
*/
|
*/
|
||||||
<R> JpaCompoundSelection<R> tuple(
|
<R> JpaCompoundSelection<R> tuple(
|
||||||
Class<R> tupleType,
|
Class<R> tupleType,
|
||||||
List<JpaExpression<?>> expressions);
|
List<? extends JpaExpression<?>> expressions);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a tuple, as in a composite value, usable in any
|
* Create a tuple, as in a composite value, usable in any
|
||||||
|
@ -459,7 +463,7 @@ public interface HibernateCriteriaBuilder extends CriteriaBuilder {
|
||||||
*/
|
*/
|
||||||
<R> JpaCompoundSelection<R> tuple(
|
<R> JpaCompoundSelection<R> tuple(
|
||||||
DomainType<R> tupleType,
|
DomainType<R> tupleType,
|
||||||
List<JpaExpression<?>> expressions);
|
List<? extends JpaExpression<?>> expressions);
|
||||||
|
|
||||||
|
|
||||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
@ -671,7 +675,7 @@ public interface HibernateCriteriaBuilder extends CriteriaBuilder {
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
<T> JpaInPredicate<T> in(Expression<? extends T> expression, T... values);
|
<T> JpaInPredicate<T> in(Expression<? extends T> expression, T... values);
|
||||||
|
|
||||||
<T> JpaInPredicate<T> in(Expression<? extends T> expression, List<T> values);
|
<T> JpaInPredicate<T> in(Expression<? extends T> expression, Collection<T> values);
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
JpaPredicate exists(Subquery<?> subquery);
|
JpaPredicate exists(Subquery<?> subquery);
|
||||||
|
@ -724,7 +728,7 @@ public interface HibernateCriteriaBuilder extends CriteriaBuilder {
|
||||||
*
|
*
|
||||||
* @return size expression
|
* @return size expression
|
||||||
*/
|
*/
|
||||||
<M extends Map<?,?>> JpaExpression<Integer> mapSize(M map);
|
<M extends Map<?, ?>> JpaExpression<Integer> mapSize(M map);
|
||||||
|
|
||||||
|
|
||||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
|
@ -6,7 +6,13 @@
|
||||||
*/
|
*/
|
||||||
package org.hibernate.query.criteria;
|
package org.hibernate.query.criteria;
|
||||||
|
|
||||||
|
import java.util.Collection;
|
||||||
|
import java.util.Map;
|
||||||
|
import javax.persistence.criteria.Expression;
|
||||||
import javax.persistence.criteria.Path;
|
import javax.persistence.criteria.Path;
|
||||||
|
import javax.persistence.metamodel.MapAttribute;
|
||||||
|
import javax.persistence.metamodel.PluralAttribute;
|
||||||
|
import javax.persistence.metamodel.SingularAttribute;
|
||||||
|
|
||||||
import org.hibernate.metamodel.model.domain.EntityDomainType;
|
import org.hibernate.metamodel.model.domain.EntityDomainType;
|
||||||
import org.hibernate.query.NavigablePath;
|
import org.hibernate.query.NavigablePath;
|
||||||
|
@ -46,4 +52,19 @@ public interface JpaPath<T> extends JpaExpression<T>, Path<T> {
|
||||||
default JpaPath<?> getParentPath() {
|
default JpaPath<?> getParentPath() {
|
||||||
return getLhs();
|
return getLhs();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
<Y> JpaPath<Y> get(SingularAttribute<? super T, Y> attribute);
|
||||||
|
|
||||||
|
@Override
|
||||||
|
<E, C extends Collection<E>> JpaExpression<C> get(PluralAttribute<T, C, E> collection);
|
||||||
|
|
||||||
|
@Override
|
||||||
|
<K, V, M extends Map<K, V>> JpaExpression<M> get(MapAttribute<T, K, V> map);
|
||||||
|
|
||||||
|
@Override
|
||||||
|
JpaExpression<Class<? extends T>> type();
|
||||||
|
|
||||||
|
@Override
|
||||||
|
<Y> JpaPath<Y> get(String attributeName);
|
||||||
}
|
}
|
||||||
|
|
|
@ -289,6 +289,7 @@ public class QuerySplitter {
|
||||||
final SqmRoot<?> copy = new SqmRoot<>(
|
final SqmRoot<?> copy = new SqmRoot<>(
|
||||||
pathSource,
|
pathSource,
|
||||||
sqmRoot.getExplicitAlias(),
|
sqmRoot.getExplicitAlias(),
|
||||||
|
sqmRoot.isAllowJoins(),
|
||||||
sqmRoot.nodeBuilder()
|
sqmRoot.nodeBuilder()
|
||||||
);
|
);
|
||||||
return (SqmRoot<?>) getProcessingStateStack().getCurrent().getPathRegistry().resolvePath(
|
return (SqmRoot<?>) getProcessingStateStack().getCurrent().getPathRegistry().resolvePath(
|
||||||
|
@ -632,7 +633,7 @@ public class QuerySplitter {
|
||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SqmPositionalParameter visitPositionalParameterExpression(SqmPositionalParameter expression) {
|
public SqmPositionalParameter visitPositionalParameterExpression(SqmPositionalParameter<?> expression) {
|
||||||
return new SqmPositionalParameter(
|
return new SqmPositionalParameter(
|
||||||
expression.getPosition(),
|
expression.getPosition(),
|
||||||
expression.allowMultiValuedBinding(),
|
expression.allowMultiValuedBinding(),
|
||||||
|
@ -641,7 +642,7 @@ public class QuerySplitter {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SqmNamedParameter visitNamedParameterExpression(SqmNamedParameter expression) {
|
public SqmNamedParameter visitNamedParameterExpression(SqmNamedParameter<?> expression) {
|
||||||
return new SqmNamedParameter(
|
return new SqmNamedParameter(
|
||||||
expression.getName(),
|
expression.getName(),
|
||||||
expression.allowMultiValuedBinding(),
|
expression.allowMultiValuedBinding(),
|
||||||
|
@ -650,12 +651,12 @@ public class QuerySplitter {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SqmLiteralEntityType visitEntityTypeLiteralExpression(SqmLiteralEntityType expression) {
|
public SqmLiteralEntityType visitEntityTypeLiteralExpression(SqmLiteralEntityType<?> expression) {
|
||||||
return new SqmLiteralEntityType( expression.getNodeType(), expression.nodeBuilder() );
|
return new SqmLiteralEntityType( expression.getNodeType(), expression.nodeBuilder() );
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SqmUnaryOperation visitUnaryOperationExpression(SqmUnaryOperation expression) {
|
public SqmUnaryOperation visitUnaryOperationExpression(SqmUnaryOperation<?> expression) {
|
||||||
return new SqmUnaryOperation(
|
return new SqmUnaryOperation(
|
||||||
expression.getOperation(),
|
expression.getOperation(),
|
||||||
(SqmExpression) expression.getOperand().accept( this )
|
(SqmExpression) expression.getOperand().accept( this )
|
||||||
|
@ -672,7 +673,7 @@ public class QuerySplitter {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SqmBinaryArithmetic visitBinaryArithmeticExpression(SqmBinaryArithmetic expression) {
|
public SqmBinaryArithmetic visitBinaryArithmeticExpression(SqmBinaryArithmetic<?> expression) {
|
||||||
return new SqmBinaryArithmetic(
|
return new SqmBinaryArithmetic(
|
||||||
expression.getOperator(), (SqmExpression) expression.getLeftHandOperand().accept( this ),
|
expression.getOperator(), (SqmExpression) expression.getLeftHandOperand().accept( this ),
|
||||||
(SqmExpression) expression.getRightHandOperand().accept( this ),
|
(SqmExpression) expression.getRightHandOperand().accept( this ),
|
||||||
|
@ -682,7 +683,7 @@ public class QuerySplitter {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SqmSubQuery visitSubQueryExpression(SqmSubQuery expression) {
|
public SqmSubQuery visitSubQueryExpression(SqmSubQuery<?> expression) {
|
||||||
// its not supported for a SubQuery to define a dynamic instantiation, so
|
// its not supported for a SubQuery to define a dynamic instantiation, so
|
||||||
// any "selectable node" will only ever be an SqmExpression
|
// any "selectable node" will only ever be an SqmExpression
|
||||||
return new SqmSubQuery(
|
return new SqmSubQuery(
|
||||||
|
|
|
@ -43,6 +43,7 @@ import org.hibernate.grammars.hql.HqlParserBaseVisitor;
|
||||||
import org.hibernate.internal.util.collections.Stack;
|
import org.hibernate.internal.util.collections.Stack;
|
||||||
import org.hibernate.internal.util.collections.StandardStack;
|
import org.hibernate.internal.util.collections.StandardStack;
|
||||||
import org.hibernate.metamodel.CollectionClassification;
|
import org.hibernate.metamodel.CollectionClassification;
|
||||||
|
import org.hibernate.metamodel.mapping.CollectionPart;
|
||||||
import org.hibernate.metamodel.model.domain.AllowableFunctionReturnType;
|
import org.hibernate.metamodel.model.domain.AllowableFunctionReturnType;
|
||||||
import org.hibernate.metamodel.model.domain.BasicDomainType;
|
import org.hibernate.metamodel.model.domain.BasicDomainType;
|
||||||
import org.hibernate.metamodel.model.domain.DomainType;
|
import org.hibernate.metamodel.model.domain.DomainType;
|
||||||
|
@ -54,6 +55,7 @@ import org.hibernate.metamodel.model.domain.SingularPersistentAttribute;
|
||||||
import org.hibernate.query.BinaryArithmeticOperator;
|
import org.hibernate.query.BinaryArithmeticOperator;
|
||||||
import org.hibernate.query.ComparisonOperator;
|
import org.hibernate.query.ComparisonOperator;
|
||||||
import org.hibernate.query.FetchClauseType;
|
import org.hibernate.query.FetchClauseType;
|
||||||
|
import org.hibernate.query.NavigablePath;
|
||||||
import org.hibernate.query.NullPrecedence;
|
import org.hibernate.query.NullPrecedence;
|
||||||
import org.hibernate.query.PathException;
|
import org.hibernate.query.PathException;
|
||||||
import org.hibernate.query.SemanticException;
|
import org.hibernate.query.SemanticException;
|
||||||
|
@ -403,6 +405,7 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
|
||||||
return new SqmRoot<>(
|
return new SqmRoot<>(
|
||||||
(EntityDomainType<R>) visitEntityName( entityNameContext ),
|
(EntityDomainType<R>) visitEntityName( entityNameContext ),
|
||||||
identificationVariable,
|
identificationVariable,
|
||||||
|
false,
|
||||||
creationContext.getNodeBuilder()
|
creationContext.getNodeBuilder()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -852,7 +855,7 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
|
||||||
|
|
||||||
fromClause.visitRoots(
|
fromClause.visitRoots(
|
||||||
sqmRoot -> selectClause.addSelection(
|
sqmRoot -> selectClause.addSelection(
|
||||||
new SqmSelection<>( sqmRoot, sqmRoot.getExplicitAlias(), creationContext.getNodeBuilder() )
|
new SqmSelection<>( sqmRoot, sqmRoot.getAlias(), creationContext.getNodeBuilder() )
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
return selectClause;
|
return selectClause;
|
||||||
|
@ -952,8 +955,7 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
final PluralPersistentAttribute<?, ?, ?> pluralAttribute = (PluralPersistentAttribute<?, ?, ?>) sqmPath.getReferencedPathSource();
|
final SqmPath<?> elementPath = (SqmPath<?>) sqmPath.resolvePathPart( CollectionPart.Nature.ELEMENT.getName(), true, this );
|
||||||
final SqmPath<?> elementPath = pluralAttribute.getElementPathSource().createSqmPath( sqmPath );
|
|
||||||
processingStateStack.getCurrent().getPathRegistry().register( elementPath );
|
processingStateStack.getCurrent().getPathRegistry().register( elementPath );
|
||||||
return elementPath;
|
return elementPath;
|
||||||
}
|
}
|
||||||
|
@ -1534,7 +1536,7 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
final SqmRoot<?> sqmRoot = new SqmRoot<>( entityDescriptor, alias, creationContext.getNodeBuilder() );
|
final SqmRoot<?> sqmRoot = new SqmRoot<>( entityDescriptor, alias, true, creationContext.getNodeBuilder() );
|
||||||
|
|
||||||
pathRegistry.register( sqmRoot );
|
pathRegistry.register( sqmRoot );
|
||||||
|
|
||||||
|
@ -1688,7 +1690,7 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
if ( getCreationOptions().useStrictJpaCompliance() ) {
|
if ( getCreationOptions().useStrictJpaCompliance() ) {
|
||||||
if ( join.getExplicitAlias() != null ){
|
if ( join.getExplicitAlias() != null ) {
|
||||||
//noinspection rawtypes
|
//noinspection rawtypes
|
||||||
if ( ( (SqmAttributeJoin) join ).isFetched() ) {
|
if ( ( (SqmAttributeJoin) join ).isFetched() ) {
|
||||||
throw new StrictJpaComplianceViolation(
|
throw new StrictJpaComplianceViolation(
|
||||||
|
@ -2158,13 +2160,12 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SqmPath<?> visitEntityIdReference(HqlParser.EntityIdReferenceContext ctx) {
|
public SqmPath<?> visitEntityIdReference(HqlParser.EntityIdReferenceContext ctx) {
|
||||||
final SqmPath<?> sqmPath = consumeDomainPath( (HqlParser.PathContext) ctx.getChild( 2 ) );
|
final SqmPath<Object> sqmPath = consumeDomainPath( (HqlParser.PathContext) ctx.getChild( 2 ) );
|
||||||
final DomainType<?> sqmPathType = sqmPath.getReferencedPathSource().getSqmPathType();
|
final DomainType<?> sqmPathType = sqmPath.getReferencedPathSource().getSqmPathType();
|
||||||
|
|
||||||
if ( sqmPathType instanceof IdentifiableDomainType<?> ) {
|
if ( sqmPathType instanceof IdentifiableDomainType<?> ) {
|
||||||
//noinspection unchecked
|
final SqmPathSource<?> identifierDescriptor = ( (IdentifiableDomainType<?>) sqmPathType ).getIdentifierDescriptor();
|
||||||
final SqmPath<?> idPath = ( (IdentifiableDomainType<?>) sqmPathType ).getIdentifierDescriptor()
|
final SqmPath<?> idPath = sqmPath.get( identifierDescriptor.getPathName() );
|
||||||
.createSqmPath( sqmPath );
|
|
||||||
|
|
||||||
if ( ctx.getChildCount() != 5 ) {
|
if ( ctx.getChildCount() != 5 ) {
|
||||||
return idPath;
|
return idPath;
|
||||||
|
@ -2184,12 +2185,13 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SqmPath<?> visitEntityVersionReference(HqlParser.EntityVersionReferenceContext ctx) {
|
public SqmPath<?> visitEntityVersionReference(HqlParser.EntityVersionReferenceContext ctx) {
|
||||||
final SqmPath<?> sqmPath = consumeDomainPath( (HqlParser.PathContext) ctx.getChild( 2 ) );
|
final SqmPath<Object> sqmPath = consumeDomainPath( (HqlParser.PathContext) ctx.getChild( 2 ) );
|
||||||
final DomainType<?> sqmPathType = sqmPath.getReferencedPathSource().getSqmPathType();
|
final DomainType<?> sqmPathType = sqmPath.getReferencedPathSource().getSqmPathType();
|
||||||
|
|
||||||
if ( sqmPathType instanceof IdentifiableDomainType<?> ) {
|
if ( sqmPathType instanceof IdentifiableDomainType<?> ) {
|
||||||
final IdentifiableDomainType<?> identifiableType = (IdentifiableDomainType<?>) sqmPathType;
|
@SuppressWarnings("unchecked")
|
||||||
final SingularPersistentAttribute<?, ?> versionAttribute = identifiableType.findVersionAttribute();
|
final IdentifiableDomainType<Object> identifiableType = (IdentifiableDomainType<Object>) sqmPathType;
|
||||||
|
final SingularPersistentAttribute<Object, ?> versionAttribute = identifiableType.findVersionAttribute();
|
||||||
if ( versionAttribute == null ) {
|
if ( versionAttribute == null ) {
|
||||||
throw new SemanticException(
|
throw new SemanticException(
|
||||||
"`" + sqmPath.getNavigablePath().getFullPath() + "` resolved to an identifiable-type (`" +
|
"`" + sqmPath.getNavigablePath().getFullPath() + "` resolved to an identifiable-type (`" +
|
||||||
|
@ -2197,7 +2199,7 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
return versionAttribute.createSqmPath( sqmPath );
|
return sqmPath.get( versionAttribute );
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new SemanticException( "Path does not reference an identifiable-type : " + sqmPath.getNavigablePath().getFullPath() );
|
throw new SemanticException( "Path does not reference an identifiable-type : " + sqmPath.getNavigablePath().getFullPath() );
|
||||||
|
@ -3614,19 +3616,20 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
|
||||||
selectClause.setSelection( literal );
|
selectClause.setSelection( literal );
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
final SqmPathSource<?> pathSource;
|
final String partName;
|
||||||
switch ( collectionReferenceCtx.getSymbol().getType() ) {
|
switch ( collectionReferenceCtx.getSymbol().getType() ) {
|
||||||
case HqlParser.ELEMENTS:
|
case HqlParser.ELEMENTS:
|
||||||
pathSource = attribute.getElementPathSource();
|
partName = CollectionPart.Nature.ELEMENT.getName();
|
||||||
break;
|
break;
|
||||||
case HqlParser.INDICES:
|
case HqlParser.INDICES:
|
||||||
pathSource = attribute.getIndexPathSource();
|
partName = CollectionPart.Nature.INDEX.getName();
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
throw new ParsingException( "Unexpected collection reference : " + collectionReferenceCtx.getText() );
|
throw new ParsingException( "Unexpected collection reference : " + collectionReferenceCtx.getText() );
|
||||||
}
|
}
|
||||||
subQuery.applyInferableType( pathSource.getSqmPathType() );
|
final SqmPath<?> path = collectionJoin.resolvePathPart( partName, true, this );
|
||||||
selectClause.setSelection( pathSource.createSqmPath( collectionJoin ) );
|
subQuery.applyInferableType( path.getNodeType() );
|
||||||
|
selectClause.setSelection( path );
|
||||||
}
|
}
|
||||||
final SqmQuerySpec<?> querySpec = subQuery.getQuerySpec();
|
final SqmQuerySpec<?> querySpec = subQuery.getQuerySpec();
|
||||||
querySpec.setFromClause( fromClause );
|
querySpec.setFromClause( fromClause );
|
||||||
|
@ -3822,13 +3825,11 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
|
||||||
|
|
||||||
final SqmPathSource<?> pluralAttribute = sqmFrom.getReferencedPathSource();
|
final SqmPathSource<?> pluralAttribute = sqmFrom.getReferencedPathSource();
|
||||||
|
|
||||||
if ( !( pluralAttribute instanceof PluralPersistentAttribute ) ) {
|
if ( !( pluralAttribute instanceof PluralPersistentAttribute<?, ?, ?> ) ) {
|
||||||
throw new ParsingException( "Could not resolve identification variable [" + alias + "] as plural-attribute" );
|
throw new ParsingException( "Could not resolve identification variable [" + alias + "] as plural-attribute" );
|
||||||
}
|
}
|
||||||
|
|
||||||
return ( (PluralPersistentAttribute<?, ?, ?>) pluralAttribute ).getIndexPathSource().createSqmPath(
|
return sqmFrom.resolvePathPart( CollectionPart.Nature.INDEX.getName(), true, this );
|
||||||
sqmFrom
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("BooleanMethodIsAlwaysInverted")
|
@SuppressWarnings("BooleanMethodIsAlwaysInverted")
|
||||||
|
@ -4020,6 +4021,17 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
|
||||||
final DotIdentifierConsumer consumer = dotIdentifierConsumerStack.pop();
|
final DotIdentifierConsumer consumer = dotIdentifierConsumerStack.pop();
|
||||||
final SqmExpression<?> indexExpression = (SqmExpression<?>) ctx.getChild( 1 ).accept( this );
|
final SqmExpression<?> indexExpression = (SqmExpression<?>) ctx.getChild( 1 ).accept( this );
|
||||||
final SqmAttributeJoin<?, ?> attributeJoin = (SqmAttributeJoin<?, ?>) consumer.getConsumedPart();
|
final SqmAttributeJoin<?, ?> attributeJoin = (SqmAttributeJoin<?, ?>) consumer.getConsumedPart();
|
||||||
|
final NavigablePath navigablePath = attributeJoin.getNavigablePath().getParent().append(
|
||||||
|
attributeJoin.getNavigablePath().getLocalName(),
|
||||||
|
indexExpression.toHqlString()
|
||||||
|
);
|
||||||
|
// Reuse an existing indexed path join if possible
|
||||||
|
for ( SqmJoin<?, ?> sqmJoin : attributeJoin.getSqmJoins() ) {
|
||||||
|
if ( sqmJoin.getNavigablePath().getLocalName().equals( navigablePath.getLocalName() ) ) {
|
||||||
|
return sqmJoin;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
final SqmExpression<?> index;
|
final SqmExpression<?> index;
|
||||||
if ( attributeJoin instanceof SqmListJoin<?, ?> ) {
|
if ( attributeJoin instanceof SqmListJoin<?, ?> ) {
|
||||||
index = ( (SqmListJoin<?, ?>) attributeJoin ).index();
|
index = ( (SqmListJoin<?, ?>) attributeJoin ).index();
|
||||||
|
@ -4032,6 +4044,7 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
|
||||||
}
|
}
|
||||||
attributeJoin.setJoinPredicate( creationContext.getNodeBuilder().equal( index, indexExpression ) );
|
attributeJoin.setJoinPredicate( creationContext.getNodeBuilder().equal( index, indexExpression ) );
|
||||||
final SqmIndexedCollectionAccessPath<?> path = new SqmIndexedCollectionAccessPath<>(
|
final SqmIndexedCollectionAccessPath<?> path = new SqmIndexedCollectionAccessPath<>(
|
||||||
|
navigablePath,
|
||||||
attributeJoin,
|
attributeJoin,
|
||||||
indexExpression
|
indexExpression
|
||||||
);
|
);
|
||||||
|
@ -4118,7 +4131,7 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
|
||||||
final SqmPath<?> pluralAttributePath = consumeDomainPath( (HqlParser.PathContext) ctx.getChild( 2 ) );
|
final SqmPath<?> pluralAttributePath = consumeDomainPath( (HqlParser.PathContext) ctx.getChild( 2 ) );
|
||||||
final SqmPathSource<?> referencedPathSource = pluralAttributePath.getReferencedPathSource();
|
final SqmPathSource<?> referencedPathSource = pluralAttributePath.getReferencedPathSource();
|
||||||
|
|
||||||
if ( !(referencedPathSource instanceof PluralPersistentAttribute ) ) {
|
if ( !(referencedPathSource instanceof PluralPersistentAttribute<?, ?, ?> ) ) {
|
||||||
throw new PathException(
|
throw new PathException(
|
||||||
"Illegal attempt to treat non-plural path as a plural path : " + pluralAttributePath.getNavigablePath()
|
"Illegal attempt to treat non-plural path as a plural path : " + pluralAttributePath.getNavigablePath()
|
||||||
);
|
);
|
||||||
|
@ -4130,11 +4143,13 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
|
||||||
if ( attribute.getCollectionClassification() != CollectionClassification.MAP ) {
|
if ( attribute.getCollectionClassification() != CollectionClassification.MAP ) {
|
||||||
throw new StrictJpaComplianceViolation( StrictJpaComplianceViolation.Type.VALUE_FUNCTION_ON_NON_MAP );
|
throw new StrictJpaComplianceViolation( StrictJpaComplianceViolation.Type.VALUE_FUNCTION_ON_NON_MAP );
|
||||||
}
|
}
|
||||||
|
final TerminalNode firstNode = (TerminalNode) ctx.getChild( 0 );
|
||||||
|
if ( firstNode.getSymbol().getType() == HqlParser.ELEMENTS ) {
|
||||||
|
throw new StrictJpaComplianceViolation( StrictJpaComplianceViolation.Type.HQL_COLLECTION_FUNCTION );
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
SqmPath<?> result = attribute.getElementPathSource().createSqmPath(
|
SqmPath<?> result = (SqmPath<?>) pluralAttributePath.resolvePathPart( CollectionPart.Nature.ELEMENT.getName(), true, this );
|
||||||
pluralAttributePath
|
|
||||||
);
|
|
||||||
|
|
||||||
if ( ctx.getChildCount() == 5 ) {
|
if ( ctx.getChildCount() == 5 ) {
|
||||||
result = consumeDomainPath( (HqlParser.DotIdentifierSequenceContext) ctx.getChild( 4 ).getChild( 1 ) );
|
result = consumeDomainPath( (HqlParser.DotIdentifierSequenceContext) ctx.getChild( 4 ).getChild( 1 ) );
|
||||||
|
@ -4146,26 +4161,19 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SqmPath<?> visitCollectionIndexNavigablePath(HqlParser.CollectionIndexNavigablePathContext ctx) {
|
public SqmPath<?> visitCollectionIndexNavigablePath(HqlParser.CollectionIndexNavigablePathContext ctx) {
|
||||||
|
if ( getCreationOptions().useStrictJpaCompliance() ) {
|
||||||
|
throw new StrictJpaComplianceViolation( StrictJpaComplianceViolation.Type.HQL_COLLECTION_FUNCTION );
|
||||||
|
}
|
||||||
final SqmPath<?> pluralAttributePath = consumeDomainPath( (HqlParser.PathContext) ctx.getChild( 2 ) );
|
final SqmPath<?> pluralAttributePath = consumeDomainPath( (HqlParser.PathContext) ctx.getChild( 2 ) );
|
||||||
final SqmPathSource<?> referencedPathSource = pluralAttributePath.getReferencedPathSource();
|
final SqmPathSource<?> referencedPathSource = pluralAttributePath.getReferencedPathSource();
|
||||||
|
|
||||||
if ( !(referencedPathSource instanceof PluralPersistentAttribute ) ) {
|
if ( !(referencedPathSource instanceof PluralPersistentAttribute<?, ?, ?> ) ) {
|
||||||
throw new PathException(
|
throw new PathException(
|
||||||
"Illegal attempt to treat non-plural path as a plural path : " + pluralAttributePath.getNavigablePath()
|
"Illegal attempt to treat non-plural path as a plural path : " + pluralAttributePath.getNavigablePath()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
final PluralPersistentAttribute<?, ?, ?> attribute = (PluralPersistentAttribute<?, ?, ?>) referencedPathSource;
|
return (SqmPath<?>) pluralAttributePath.resolvePathPart( CollectionPart.Nature.INDEX.getName(), true, this );
|
||||||
|
|
||||||
if ( getCreationOptions().useStrictJpaCompliance() ) {
|
|
||||||
if ( attribute.getCollectionClassification() != CollectionClassification.MAP ) {
|
|
||||||
throw new StrictJpaComplianceViolation( StrictJpaComplianceViolation.Type.VALUE_FUNCTION_ON_NON_MAP );
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return attribute.getIndexPathSource().createSqmPath(
|
|
||||||
pluralAttributePath
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -4176,16 +4184,12 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
|
||||||
SqmPath<?> result;
|
SqmPath<?> result;
|
||||||
if ( sqmPath instanceof SqmMapJoin ) {
|
if ( sqmPath instanceof SqmMapJoin ) {
|
||||||
final SqmMapJoin<?, ?, ?> sqmMapJoin = (SqmMapJoin<?, ?, ?>) sqmPath;
|
final SqmMapJoin<?, ?, ?> sqmMapJoin = (SqmMapJoin<?, ?, ?>) sqmPath;
|
||||||
result = sqmMapJoin.getReferencedPathSource().getIndexPathSource().createSqmPath( sqmMapJoin );
|
result = sqmMapJoin.key();
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
assert sqmPath instanceof SqmPluralValuedSimplePath;
|
assert sqmPath instanceof SqmPluralValuedSimplePath;
|
||||||
final SqmPluralValuedSimplePath<?> mapPath = (SqmPluralValuedSimplePath<?>) sqmPath;
|
final SqmPluralValuedSimplePath<?> mapPath = (SqmPluralValuedSimplePath<?>) sqmPath;
|
||||||
final SqmPath<?> keyPath = mapPath.getReferencedPathSource()
|
result = mapPath.resolvePathPart( CollectionPart.Nature.INDEX.getName(), true, this );
|
||||||
.getIndexPathSource()
|
|
||||||
.createSqmPath( mapPath );
|
|
||||||
mapPath.registerReusablePath( keyPath );
|
|
||||||
result = keyPath;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( ctx.getChildCount() == 5 ) {
|
if ( ctx.getChildCount() == 5 ) {
|
||||||
|
@ -4195,10 +4199,11 @@ public class SemanticQueryBuilder<R> extends HqlParserBaseVisitor<Object> implem
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
private SqmPath<?> consumeDomainPath(HqlParser.PathContext parserPath) {
|
private <X> SqmPath<X> consumeDomainPath(HqlParser.PathContext parserPath) {
|
||||||
final SemanticPathPart consumedPart = (SemanticPathPart) parserPath.accept( this );
|
final SemanticPathPart consumedPart = (SemanticPathPart) parserPath.accept( this );
|
||||||
if ( consumedPart instanceof SqmPath ) {
|
if ( consumedPart instanceof SqmPath ) {
|
||||||
return (SqmPath<?>) consumedPart;
|
//noinspection unchecked
|
||||||
|
return (SqmPath<X>) consumedPart;
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new SemanticException( "Expecting domain-model path, but found : " + consumedPart );
|
throw new SemanticException( "Expecting domain-model path, but found : " + consumedPart );
|
||||||
|
|
|
@ -37,12 +37,12 @@ public class SqmPathRegistryImpl implements SqmPathRegistry {
|
||||||
private final SqmCreationProcessingState associatedProcessingState;
|
private final SqmCreationProcessingState associatedProcessingState;
|
||||||
private final JpaCompliance jpaCompliance;
|
private final JpaCompliance jpaCompliance;
|
||||||
|
|
||||||
private final Map<NavigablePath, SqmPath> sqmPathByPath = new HashMap<>();
|
private final Map<NavigablePath, SqmPath<?>> sqmPathByPath = new HashMap<>();
|
||||||
private final Map<NavigablePath, SqmFrom> sqmFromByPath = new HashMap<>();
|
private final Map<NavigablePath, SqmFrom<?, ?>> sqmFromByPath = new HashMap<>();
|
||||||
|
|
||||||
private final Map<String, SqmFrom> sqmFromByAlias = new HashMap<>();
|
private final Map<String, SqmFrom<?, ?>> sqmFromByAlias = new HashMap<>();
|
||||||
|
|
||||||
private final List<SqmAliasedNode> simpleSelectionNodes = new ArrayList<>();
|
private final List<SqmAliasedNode<?>> simpleSelectionNodes = new ArrayList<>();
|
||||||
|
|
||||||
public SqmPathRegistryImpl(SqmCreationProcessingState associatedProcessingState) {
|
public SqmPathRegistryImpl(SqmCreationProcessingState associatedProcessingState) {
|
||||||
this.associatedProcessingState = associatedProcessingState;
|
this.associatedProcessingState = associatedProcessingState;
|
||||||
|
@ -50,7 +50,7 @@ public class SqmPathRegistryImpl implements SqmPathRegistry {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void register(SqmPath sqmPath) {
|
public void register(SqmPath<?> sqmPath) {
|
||||||
SqmTreeCreationLogger.LOGGER.tracef( "SqmProcessingIndex#register(SqmPath) : %s", sqmPath.getNavigablePath().getFullPath() );
|
SqmTreeCreationLogger.LOGGER.tracef( "SqmProcessingIndex#register(SqmPath) : %s", sqmPath.getNavigablePath().getFullPath() );
|
||||||
|
|
||||||
// Generally we:
|
// Generally we:
|
||||||
|
@ -61,8 +61,8 @@ public class SqmPathRegistryImpl implements SqmPathRegistry {
|
||||||
// Regarding part #1 (add to the path-by-path map), it is ok for a SqmFrom to replace a
|
// Regarding part #1 (add to the path-by-path map), it is ok for a SqmFrom to replace a
|
||||||
// non-SqmFrom. This should equate to, e.g., an implicit join.
|
// non-SqmFrom. This should equate to, e.g., an implicit join.
|
||||||
|
|
||||||
if ( sqmPath instanceof SqmFrom ) {
|
if ( sqmPath instanceof SqmFrom<?, ?> ) {
|
||||||
final SqmFrom sqmFrom = (SqmFrom) sqmPath;
|
final SqmFrom<?, ?> sqmFrom = (SqmFrom<?, ?>) sqmPath;
|
||||||
|
|
||||||
final String alias = sqmPath.getExplicitAlias();
|
final String alias = sqmPath.getExplicitAlias();
|
||||||
if ( alias != null ) {
|
if ( alias != null ) {
|
||||||
|
@ -70,7 +70,7 @@ public class SqmPathRegistryImpl implements SqmPathRegistry {
|
||||||
? alias.toLowerCase( Locale.getDefault() )
|
? alias.toLowerCase( Locale.getDefault() )
|
||||||
: alias;
|
: alias;
|
||||||
|
|
||||||
final SqmFrom previousFrom = sqmFromByAlias.put( aliasToUse, sqmFrom );
|
final SqmFrom<?, ?> previousFrom = sqmFromByAlias.put( aliasToUse, sqmFrom );
|
||||||
|
|
||||||
if ( previousFrom != null ) {
|
if ( previousFrom != null ) {
|
||||||
throw new AliasCollisionException(
|
throw new AliasCollisionException(
|
||||||
|
@ -85,7 +85,7 @@ public class SqmPathRegistryImpl implements SqmPathRegistry {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
final SqmFrom previousFromByPath = sqmFromByPath.put( sqmPath.getNavigablePath(), sqmFrom );
|
final SqmFrom<?, ?> previousFromByPath = sqmFromByPath.put( sqmPath.getNavigablePath(), sqmFrom );
|
||||||
|
|
||||||
if ( previousFromByPath != null ) {
|
if ( previousFromByPath != null ) {
|
||||||
// this should never happen
|
// this should never happen
|
||||||
|
@ -101,7 +101,7 @@ public class SqmPathRegistryImpl implements SqmPathRegistry {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
final SqmPath previousPath = sqmPathByPath.put( sqmPath.getNavigablePath(), sqmPath );
|
final SqmPath<?> previousPath = sqmPathByPath.put( sqmPath.getNavigablePath(), sqmPath );
|
||||||
|
|
||||||
if ( previousPath instanceof SqmFrom ) {
|
if ( previousPath instanceof SqmFrom ) {
|
||||||
// this should never happen
|
// this should never happen
|
||||||
|
@ -118,14 +118,15 @@ public class SqmPathRegistryImpl implements SqmPathRegistry {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SqmPath findPath(NavigablePath path) {
|
public <X> SqmPath<X> findPath(NavigablePath path) {
|
||||||
final SqmPath found = sqmPathByPath.get( path );
|
final SqmPath<?> found = sqmPathByPath.get( path );
|
||||||
if ( found != null ) {
|
if ( found != null ) {
|
||||||
return found;
|
//noinspection unchecked
|
||||||
|
return (SqmPath<X>) found;
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( associatedProcessingState.getParentProcessingState() != null ) {
|
if ( associatedProcessingState.getParentProcessingState() != null ) {
|
||||||
final SqmFrom containingQueryFrom = associatedProcessingState.getParentProcessingState()
|
final SqmFrom<?, X> containingQueryFrom = associatedProcessingState.getParentProcessingState()
|
||||||
.getPathRegistry()
|
.getPathRegistry()
|
||||||
.findFromByPath( path );
|
.findFromByPath( path );
|
||||||
if ( containingQueryFrom != null ) {
|
if ( containingQueryFrom != null ) {
|
||||||
|
@ -138,14 +139,15 @@ public class SqmPathRegistryImpl implements SqmPathRegistry {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SqmFrom findFromByPath(NavigablePath navigablePath) {
|
public <X extends SqmFrom<?, ?>> X findFromByPath(NavigablePath navigablePath) {
|
||||||
final SqmFrom found = sqmFromByPath.get( navigablePath );
|
final SqmFrom<?, ?> found = sqmFromByPath.get( navigablePath );
|
||||||
if ( found != null ) {
|
if ( found != null ) {
|
||||||
return found;
|
//noinspection unchecked
|
||||||
|
return (X) found;
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( associatedProcessingState.getParentProcessingState() != null ) {
|
if ( associatedProcessingState.getParentProcessingState() != null ) {
|
||||||
final SqmFrom containingQueryFrom = associatedProcessingState.getParentProcessingState()
|
final X containingQueryFrom = associatedProcessingState.getParentProcessingState()
|
||||||
.getPathRegistry()
|
.getPathRegistry()
|
||||||
.findFromByPath( navigablePath );
|
.findFromByPath( navigablePath );
|
||||||
if ( containingQueryFrom != null ) {
|
if ( containingQueryFrom != null ) {
|
||||||
|
@ -158,15 +160,16 @@ public class SqmPathRegistryImpl implements SqmPathRegistry {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SqmFrom findFromByAlias(String alias) {
|
public <X extends SqmFrom<?, ?>> X findFromByAlias(String alias) {
|
||||||
final String localAlias = jpaCompliance.isJpaQueryComplianceEnabled()
|
final String localAlias = jpaCompliance.isJpaQueryComplianceEnabled()
|
||||||
? alias.toLowerCase( Locale.getDefault() )
|
? alias.toLowerCase( Locale.getDefault() )
|
||||||
: alias;
|
: alias;
|
||||||
|
|
||||||
final SqmFrom registered = sqmFromByAlias.get( localAlias );
|
final SqmFrom<?, ?> registered = sqmFromByAlias.get( localAlias );
|
||||||
|
|
||||||
if ( registered != null ) {
|
if ( registered != null ) {
|
||||||
return registered;
|
//noinspection unchecked
|
||||||
|
return (X) registered;
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( associatedProcessingState.getParentProcessingState() != null ) {
|
if ( associatedProcessingState.getParentProcessingState() != null ) {
|
||||||
|
@ -177,14 +180,14 @@ public class SqmPathRegistryImpl implements SqmPathRegistry {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SqmFrom findFromExposing(String navigableName) {
|
public <X extends SqmFrom<?, ?>> X findFromExposing(String navigableName) {
|
||||||
// todo (6.0) : atm this checks every from-element every time, the idea being to make sure there
|
// todo (6.0) : atm this checks every from-element every time, the idea being to make sure there
|
||||||
// is only one such element obviously that scales poorly across larger from-clauses. Another
|
// is only one such element obviously that scales poorly across larger from-clauses. Another
|
||||||
// (configurable?) option would be to simply pick the first one as a perf optimization
|
// (configurable?) option would be to simply pick the first one as a perf optimization
|
||||||
|
|
||||||
SqmFrom found = null;
|
SqmFrom<?, ?> found = null;
|
||||||
for ( Map.Entry<NavigablePath, SqmFrom> entry : sqmFromByPath.entrySet() ) {
|
for ( Map.Entry<NavigablePath, SqmFrom<?, ?>> entry : sqmFromByPath.entrySet() ) {
|
||||||
final SqmFrom fromElement = entry.getValue();
|
final SqmFrom<?, ?> fromElement = entry.getValue();
|
||||||
if ( definesAttribute( fromElement.getReferencedPathSource(), navigableName ) ) {
|
if ( definesAttribute( fromElement.getReferencedPathSource(), navigableName ) ) {
|
||||||
if ( found != null ) {
|
if ( found != null ) {
|
||||||
throw new IllegalStateException( "Multiple from-elements expose unqualified attribute : " + navigableName );
|
throw new IllegalStateException( "Multiple from-elements expose unqualified attribute : " + navigableName );
|
||||||
|
@ -208,24 +211,26 @@ public class SqmPathRegistryImpl implements SqmPathRegistry {
|
||||||
navigableName
|
navigableName
|
||||||
);
|
);
|
||||||
|
|
||||||
return found;
|
//noinspection unchecked
|
||||||
|
return (X) found;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SqmPath resolvePath(NavigablePath navigablePath, Function<NavigablePath, SqmPath> creator) {
|
public <X> SqmPath<X> resolvePath(NavigablePath navigablePath, Function<NavigablePath, SqmPath<X>> creator) {
|
||||||
SqmTreeCreationLogger.LOGGER.tracef( "SqmProcessingIndex#resolvePath(NavigablePath) : %s", navigablePath );
|
SqmTreeCreationLogger.LOGGER.tracef( "SqmProcessingIndex#resolvePath(NavigablePath) : %s", navigablePath );
|
||||||
|
|
||||||
final SqmPath existing = sqmPathByPath.get( navigablePath );
|
final SqmPath<?> existing = sqmPathByPath.get( navigablePath );
|
||||||
if ( existing != null ) {
|
if ( existing != null ) {
|
||||||
return existing;
|
//noinspection unchecked
|
||||||
|
return (SqmPath<X>) existing;
|
||||||
}
|
}
|
||||||
|
|
||||||
final SqmPath sqmPath = creator.apply( navigablePath );
|
final SqmPath<X> sqmPath = creator.apply( navigablePath );
|
||||||
register( sqmPath );
|
register( sqmPath );
|
||||||
return sqmPath;
|
return sqmPath;
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean definesAttribute(SqmPathSource containerType, String name) {
|
private boolean definesAttribute(SqmPathSource<?> containerType, String name) {
|
||||||
return containerType.findSubPathSource( name ) != null;
|
return containerType.findSubPathSource( name ) != null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -282,7 +287,7 @@ public class SqmPathRegistryImpl implements SqmPathRegistry {
|
||||||
simpleSelectionNodes.add( node );
|
simpleSelectionNodes.add( node );
|
||||||
}
|
}
|
||||||
|
|
||||||
private void checkResultVariable(SqmAliasedNode selection) {
|
private void checkResultVariable(SqmAliasedNode<?> selection) {
|
||||||
final String alias = selection.getAlias();
|
final String alias = selection.getAlias();
|
||||||
if ( alias == null ) {
|
if ( alias == null ) {
|
||||||
return;
|
return;
|
||||||
|
@ -300,7 +305,7 @@ public class SqmPathRegistryImpl implements SqmPathRegistry {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
final SqmFrom registeredFromElement = sqmFromByAlias.get( alias );
|
final SqmFrom<?, ?> registeredFromElement = sqmFromByAlias.get( alias );
|
||||||
if ( registeredFromElement != null ) {
|
if ( registeredFromElement != null ) {
|
||||||
if ( !registeredFromElement.equals( selection.getSelectableNode() ) ) {
|
if ( !registeredFromElement.equals( selection.getSelectableNode() ) ) {
|
||||||
throw new AliasCollisionException(
|
throw new AliasCollisionException(
|
||||||
|
|
|
@ -29,8 +29,8 @@ public interface SemanticPathPart {
|
||||||
boolean isTerminal,
|
boolean isTerminal,
|
||||||
SqmCreationState creationState);
|
SqmCreationState creationState);
|
||||||
|
|
||||||
SqmPath resolveIndexedAccess(
|
SqmPath<?> resolveIndexedAccess(
|
||||||
SqmExpression selector,
|
SqmExpression<?> selector,
|
||||||
boolean isTerminal,
|
boolean isTerminal,
|
||||||
SqmCreationState creationState);
|
SqmCreationState creationState);
|
||||||
}
|
}
|
||||||
|
|
|
@ -30,7 +30,7 @@ public interface SqmPathRegistry {
|
||||||
/**
|
/**
|
||||||
* Register an SqmPath
|
* Register an SqmPath
|
||||||
*/
|
*/
|
||||||
void register(SqmPath sqmPath);
|
void register(SqmPath<?> sqmPath);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Find a SqmFrom by its identification variable (alias). Will search any
|
* Find a SqmFrom by its identification variable (alias). Will search any
|
||||||
|
@ -38,14 +38,14 @@ public interface SqmPathRegistry {
|
||||||
*
|
*
|
||||||
* @return matching SqmFrom or {@code null}
|
* @return matching SqmFrom or {@code null}
|
||||||
*/
|
*/
|
||||||
SqmFrom findFromByAlias(String identificationVariable);
|
<X extends SqmFrom<?, ?>> X findFromByAlias(String identificationVariable);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Find a SqmFrom by its NavigablePath. Will search any parent contexts as well
|
* Find a SqmFrom by its NavigablePath. Will search any parent contexts as well
|
||||||
*
|
*
|
||||||
* @return matching SqmFrom or {@code null}
|
* @return matching SqmFrom or {@code null}
|
||||||
*/
|
*/
|
||||||
SqmFrom findFromByPath(NavigablePath navigablePath);
|
<X extends SqmFrom<?, ?>> X findFromByPath(NavigablePath navigablePath);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Find a SqmFrom which exposes a Navigable by the given name. Will search any
|
* Find a SqmFrom which exposes a Navigable by the given name. Will search any
|
||||||
|
@ -53,7 +53,7 @@ public interface SqmPathRegistry {
|
||||||
*
|
*
|
||||||
* @return matching SqmFrom or {@code null}
|
* @return matching SqmFrom or {@code null}
|
||||||
*/
|
*/
|
||||||
SqmFrom findFromExposing(String navigableName);
|
<X extends SqmFrom<?, ?>> X findFromExposing(String navigableName);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Find an SqmPath by its NavigablePath. Will return a SqmFrom if the NavigablePath
|
* Find an SqmPath by its NavigablePath. Will return a SqmFrom if the NavigablePath
|
||||||
|
@ -61,7 +61,7 @@ public interface SqmPathRegistry {
|
||||||
*
|
*
|
||||||
* @return matching SqmPath or {@code null}
|
* @return matching SqmPath or {@code null}
|
||||||
*/
|
*/
|
||||||
SqmPath findPath(NavigablePath path);
|
<X> SqmPath<X> findPath(NavigablePath path);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Similar to {@link #findPath}, but accepting a producer to be used
|
* Similar to {@link #findPath}, but accepting a producer to be used
|
||||||
|
@ -69,7 +69,7 @@ public interface SqmPathRegistry {
|
||||||
*
|
*
|
||||||
* @return The existing or just-created SqmPath
|
* @return The existing or just-created SqmPath
|
||||||
*/
|
*/
|
||||||
SqmPath resolvePath(NavigablePath path, Function<NavigablePath, SqmPath> creator);
|
<X> SqmPath<X> resolvePath(NavigablePath path, Function<NavigablePath, SqmPath<X>> creator);
|
||||||
|
|
||||||
|
|
||||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
|
@ -9,6 +9,7 @@ package org.hibernate.query.internal;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
|
import java.util.IdentityHashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Locale;
|
import java.util.Locale;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -22,9 +23,11 @@ import org.hibernate.internal.util.StringHelper;
|
||||||
import org.hibernate.internal.util.collections.CollectionHelper;
|
import org.hibernate.internal.util.collections.CollectionHelper;
|
||||||
import org.hibernate.internal.util.collections.IdentitySet;
|
import org.hibernate.internal.util.collections.IdentitySet;
|
||||||
import org.hibernate.internal.util.compare.ComparableComparator;
|
import org.hibernate.internal.util.compare.ComparableComparator;
|
||||||
|
import org.hibernate.metamodel.model.domain.AllowableParameterType;
|
||||||
import org.hibernate.query.QueryParameter;
|
import org.hibernate.query.QueryParameter;
|
||||||
import org.hibernate.query.spi.ParameterMetadataImplementor;
|
import org.hibernate.query.spi.ParameterMetadataImplementor;
|
||||||
import org.hibernate.query.spi.QueryParameterImplementor;
|
import org.hibernate.query.spi.QueryParameterImplementor;
|
||||||
|
import org.hibernate.query.sqm.tree.expression.SqmParameter;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Encapsulates metadata about parameters encountered within a query.
|
* Encapsulates metadata about parameters encountered within a query.
|
||||||
|
@ -37,19 +40,19 @@ public class ParameterMetadataImpl implements ParameterMetadataImplementor {
|
||||||
*/
|
*/
|
||||||
public static final ParameterMetadataImpl EMPTY = new ParameterMetadataImpl();
|
public static final ParameterMetadataImpl EMPTY = new ParameterMetadataImpl();
|
||||||
|
|
||||||
private final Set<QueryParameterImplementor<?>> queryParameters;
|
private final Map<QueryParameterImplementor<?>, List<SqmParameter>> queryParameters;
|
||||||
|
|
||||||
private final Set<String> names;
|
private final Set<String> names;
|
||||||
private final Set<Integer> labels;
|
private final Set<Integer> labels;
|
||||||
|
|
||||||
|
|
||||||
private ParameterMetadataImpl() {
|
private ParameterMetadataImpl() {
|
||||||
this.queryParameters = Collections.emptySet();
|
this.queryParameters = Collections.emptyMap();
|
||||||
this.names = Collections.emptySet();
|
this.names = Collections.emptySet();
|
||||||
this.labels = Collections.emptySet();
|
this.labels = Collections.emptySet();
|
||||||
}
|
}
|
||||||
|
|
||||||
public ParameterMetadataImpl(Set<QueryParameterImplementor<?>> queryParameters) {
|
public ParameterMetadataImpl(Map<QueryParameterImplementor<?>, List<SqmParameter>> queryParameters) {
|
||||||
this.queryParameters = queryParameters;
|
this.queryParameters = queryParameters;
|
||||||
|
|
||||||
// if we have any ordinal parameters, make sure the numbers
|
// if we have any ordinal parameters, make sure the numbers
|
||||||
|
@ -58,7 +61,7 @@ public class ParameterMetadataImpl implements ParameterMetadataImplementor {
|
||||||
Set<String> names = null;
|
Set<String> names = null;
|
||||||
Set<Integer> labels = null;
|
Set<Integer> labels = null;
|
||||||
|
|
||||||
for ( QueryParameterImplementor<?> queryParameter : queryParameters ) {
|
for ( QueryParameterImplementor<?> queryParameter : queryParameters.keySet() ) {
|
||||||
if ( queryParameter.getPosition() != null ) {
|
if ( queryParameter.getPosition() != null ) {
|
||||||
if ( labels == null ) {
|
if ( labels == null ) {
|
||||||
labels = new HashSet<>();
|
labels = new HashSet<>();
|
||||||
|
@ -90,14 +93,16 @@ public class ParameterMetadataImpl implements ParameterMetadataImplementor {
|
||||||
if ( CollectionHelper.isEmpty( positionalQueryParameters )
|
if ( CollectionHelper.isEmpty( positionalQueryParameters )
|
||||||
&& CollectionHelper.isEmpty( namedQueryParameters ) ) {
|
&& CollectionHelper.isEmpty( namedQueryParameters ) ) {
|
||||||
// no parameters
|
// no parameters
|
||||||
this.queryParameters = Collections.emptySet();
|
this.queryParameters = Collections.emptyMap();
|
||||||
this.names = Collections.emptySet();
|
this.names = Collections.emptySet();
|
||||||
this.labels = Collections.emptySet();
|
this.labels = Collections.emptySet();
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
this.queryParameters = new IdentitySet<>();
|
this.queryParameters = new IdentityHashMap<>();
|
||||||
if ( positionalQueryParameters != null ) {
|
if ( positionalQueryParameters != null ) {
|
||||||
this.queryParameters.addAll( positionalQueryParameters.values() );
|
for ( QueryParameterImplementor<?> value : positionalQueryParameters.values() ) {
|
||||||
|
this.queryParameters.put( value, Collections.emptyList() );
|
||||||
|
}
|
||||||
this.labels = positionalQueryParameters.keySet();
|
this.labels = positionalQueryParameters.keySet();
|
||||||
verifyOrdinalParamLabels( labels );
|
verifyOrdinalParamLabels( labels );
|
||||||
}
|
}
|
||||||
|
@ -105,7 +110,9 @@ public class ParameterMetadataImpl implements ParameterMetadataImplementor {
|
||||||
labels = null;
|
labels = null;
|
||||||
}
|
}
|
||||||
if ( namedQueryParameters != null ) {
|
if ( namedQueryParameters != null ) {
|
||||||
this.queryParameters.addAll( namedQueryParameters.values() );
|
for ( QueryParameterImplementor<?> value : namedQueryParameters.values() ) {
|
||||||
|
this.queryParameters.put( value, Collections.emptyList() );
|
||||||
|
}
|
||||||
this.names = namedQueryParameters.keySet();
|
this.names = namedQueryParameters.keySet();
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
@ -160,25 +167,34 @@ public class ParameterMetadataImpl implements ParameterMetadataImplementor {
|
||||||
return queryParameters.size();
|
return queryParameters.size();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public <T> AllowableParameterType<T> getInferredParameterType(QueryParameter<T> parameter) {
|
||||||
|
final List<SqmParameter> sqmParameters = queryParameters.get( parameter );
|
||||||
|
if ( sqmParameters == null || sqmParameters.isEmpty() ) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return sqmParameters.get( 0 ).getNodeType();
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean containsReference(QueryParameter<?> parameter) {
|
public boolean containsReference(QueryParameter<?> parameter) {
|
||||||
//noinspection SuspiciousMethodCalls
|
//noinspection SuspiciousMethodCalls
|
||||||
return queryParameters.contains( parameter );
|
return queryParameters.containsKey( parameter );
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void visitParameters(Consumer<QueryParameterImplementor<?>> consumer) {
|
public void visitParameters(Consumer<QueryParameterImplementor<?>> consumer) {
|
||||||
queryParameters.forEach( consumer );
|
queryParameters.keySet().forEach( consumer );
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Set<QueryParameterImplementor<?>> getRegistrations() {
|
public Set<QueryParameterImplementor<?>> getRegistrations() {
|
||||||
return Collections.unmodifiableSet( queryParameters );
|
return Collections.unmodifiableSet( queryParameters.keySet() );
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean hasAnyMatching(Predicate<QueryParameterImplementor<?>> filter) {
|
public boolean hasAnyMatching(Predicate<QueryParameterImplementor<?>> filter) {
|
||||||
for ( QueryParameterImplementor<?> queryParameter : queryParameters ) {
|
for ( QueryParameterImplementor<?> queryParameter : queryParameters.keySet() ) {
|
||||||
if ( filter.test( queryParameter ) ) {
|
if ( filter.test( queryParameter ) ) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -212,13 +228,20 @@ public class ParameterMetadataImpl implements ParameterMetadataImplementor {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public QueryParameterImplementor<?> getQueryParameter(String name) {
|
public QueryParameterImplementor<?> getQueryParameter(String name) {
|
||||||
for ( QueryParameterImplementor<?> queryParameter : queryParameters ) {
|
for ( QueryParameterImplementor<?> queryParameter : queryParameters.keySet() ) {
|
||||||
if ( name.equals( queryParameter.getName() ) ) {
|
if ( name.equals( queryParameter.getName() ) ) {
|
||||||
return queryParameter;
|
return queryParameter;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return null;
|
throw new IllegalArgumentException(
|
||||||
|
String.format(
|
||||||
|
Locale.ROOT,
|
||||||
|
"Could not locate named parameter [%s], expecting one of [%s]",
|
||||||
|
name,
|
||||||
|
String.join( ", ", names )
|
||||||
|
)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -237,7 +260,7 @@ public class ParameterMetadataImpl implements ParameterMetadataImplementor {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public QueryParameterImplementor<?> getQueryParameter(int positionLabel) {
|
public QueryParameterImplementor<?> getQueryParameter(int positionLabel) {
|
||||||
for ( QueryParameterImplementor<?> queryParameter : queryParameters ) {
|
for ( QueryParameterImplementor<?> queryParameter : queryParameters.keySet() ) {
|
||||||
if ( queryParameter.getPosition() != null && queryParameter.getPosition() == positionLabel ) {
|
if ( queryParameter.getPosition() != null && queryParameter.getPosition() == positionLabel ) {
|
||||||
return queryParameter;
|
return queryParameter;
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,6 +20,7 @@ import org.hibernate.query.spi.QueryParameterBinding;
|
||||||
import org.hibernate.query.spi.QueryParameterBindingTypeResolver;
|
import org.hibernate.query.spi.QueryParameterBindingTypeResolver;
|
||||||
import org.hibernate.query.spi.QueryParameterBindingValidator;
|
import org.hibernate.query.spi.QueryParameterBindingValidator;
|
||||||
import org.hibernate.type.descriptor.WrapperOptions;
|
import org.hibernate.type.descriptor.WrapperOptions;
|
||||||
|
import org.hibernate.type.descriptor.java.CoercionException;
|
||||||
import org.hibernate.type.descriptor.java.JavaTypeDescriptor;
|
import org.hibernate.type.descriptor.java.JavaTypeDescriptor;
|
||||||
import org.hibernate.type.spi.TypeConfiguration;
|
import org.hibernate.type.spi.TypeConfiguration;
|
||||||
|
|
||||||
|
@ -128,19 +129,19 @@ public class QueryParameterBindingImpl<T> implements QueryParameterBinding<T>, J
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( value instanceof Collection ) {
|
if ( value instanceof Collection && !isRegisteredAsBasicType( value.getClass() ) ) {
|
||||||
setBindValues( (Collection) value );
|
//noinspection unchecked
|
||||||
return true;
|
setBindValues( (Collection<T>) value );
|
||||||
}
|
|
||||||
|
|
||||||
if ( value.getClass().isArray() ) {
|
|
||||||
setBindValues( (Collection) Arrays.asList( (Object[]) value ) );
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private boolean isRegisteredAsBasicType(Class<?> valueClass) {
|
||||||
|
return getTypeConfiguration().getBasicTypeForJavaType( valueClass ) != null;
|
||||||
|
}
|
||||||
|
|
||||||
private void bindValue(T value) {
|
private void bindValue(T value) {
|
||||||
this.isBound = true;
|
this.isBound = true;
|
||||||
this.bindValue = value;
|
this.bindValue = value;
|
||||||
|
@ -186,8 +187,21 @@ public class QueryParameterBindingImpl<T> implements QueryParameterBinding<T>, J
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( bindType != null ) {
|
if ( bindType != null ) {
|
||||||
|
try {
|
||||||
value = bindType.getExpressableJavaTypeDescriptor().coerce( value, this );
|
value = bindType.getExpressableJavaTypeDescriptor().coerce( value, this );
|
||||||
}
|
}
|
||||||
|
catch ( CoercionException ex ) {
|
||||||
|
throw new IllegalArgumentException(
|
||||||
|
String.format(
|
||||||
|
"Parameter value [%s] did not match expected type [%s (%s)]",
|
||||||
|
value,
|
||||||
|
bindType.getTypeName(),
|
||||||
|
temporalTypePrecision == null ? "n/a" : temporalTypePrecision.name()
|
||||||
|
),
|
||||||
|
ex
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
else if ( queryParameter.getHibernateType() != null ) {
|
else if ( queryParameter.getHibernateType() != null ) {
|
||||||
value = queryParameter.getHibernateType().getExpressableJavaTypeDescriptor().coerce( value, this );
|
value = queryParameter.getHibernateType().getExpressableJavaTypeDescriptor().coerce( value, this );
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,11 +22,13 @@ import org.hibernate.cache.spi.QueryKey;
|
||||||
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
import org.hibernate.engine.spi.SessionFactoryImplementor;
|
||||||
import org.hibernate.engine.spi.SharedSessionContractImplementor;
|
import org.hibernate.engine.spi.SharedSessionContractImplementor;
|
||||||
import org.hibernate.metamodel.mapping.MappingModelExpressable;
|
import org.hibernate.metamodel.mapping.MappingModelExpressable;
|
||||||
|
import org.hibernate.metamodel.model.domain.AllowableParameterType;
|
||||||
import org.hibernate.query.QueryParameter;
|
import org.hibernate.query.QueryParameter;
|
||||||
import org.hibernate.query.spi.ParameterMetadataImplementor;
|
import org.hibernate.query.spi.ParameterMetadataImplementor;
|
||||||
import org.hibernate.query.spi.QueryParameterBinding;
|
import org.hibernate.query.spi.QueryParameterBinding;
|
||||||
import org.hibernate.query.spi.QueryParameterBindings;
|
import org.hibernate.query.spi.QueryParameterBindings;
|
||||||
import org.hibernate.query.spi.QueryParameterImplementor;
|
import org.hibernate.query.spi.QueryParameterImplementor;
|
||||||
|
import org.hibernate.query.sqm.tree.expression.SqmParameter;
|
||||||
import org.hibernate.type.descriptor.java.JavaTypeDescriptor;
|
import org.hibernate.type.descriptor.java.JavaTypeDescriptor;
|
||||||
import org.hibernate.type.descriptor.java.JavaTypedExpressable;
|
import org.hibernate.type.descriptor.java.JavaTypedExpressable;
|
||||||
import org.hibernate.type.spi.TypeConfiguration;
|
import org.hibernate.type.spi.TypeConfiguration;
|
||||||
|
@ -106,7 +108,12 @@ public class QueryParameterBindingsImpl implements QueryParameterBindings {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
final QueryParameterBinding<T> binding = new QueryParameterBindingImpl<>( queryParameter, sessionFactory, null, queryParametersValidationEnabled );
|
final QueryParameterBinding<T> binding = new QueryParameterBindingImpl<>(
|
||||||
|
queryParameter,
|
||||||
|
sessionFactory,
|
||||||
|
parameterMetadata.getInferredParameterType( queryParameter ),
|
||||||
|
queryParametersValidationEnabled
|
||||||
|
);
|
||||||
parameterBindingMap.put( queryParameter, binding );
|
parameterBindingMap.put( queryParameter, binding );
|
||||||
|
|
||||||
return binding;
|
return binding;
|
||||||
|
|
|
@ -73,6 +73,8 @@ public class QueryEngine {
|
||||||
final SqmTranslatorFactory sqmTranslatorFactory = resolveSqmTranslatorFactory( queryEngineOptions, dialect );
|
final SqmTranslatorFactory sqmTranslatorFactory = resolveSqmTranslatorFactory( queryEngineOptions, dialect );
|
||||||
|
|
||||||
return new QueryEngine(
|
return new QueryEngine(
|
||||||
|
sessionFactory.getUuid(),
|
||||||
|
sessionFactory.getName(),
|
||||||
() -> sessionFactory.getRuntimeMetamodels().getJpaMetamodel(),
|
() -> sessionFactory.getRuntimeMetamodels().getJpaMetamodel(),
|
||||||
sessionFactory.getSessionFactoryOptions().getCriteriaValueHandlingMode(),
|
sessionFactory.getSessionFactoryOptions().getCriteriaValueHandlingMode(),
|
||||||
sessionFactory.getSessionFactoryOptions().getPreferredSqlTypeCodeForBoolean(),
|
sessionFactory.getSessionFactoryOptions().getPreferredSqlTypeCodeForBoolean(),
|
||||||
|
@ -99,6 +101,8 @@ public class QueryEngine {
|
||||||
private final int preferredSqlTypeCodeForBoolean;
|
private final int preferredSqlTypeCodeForBoolean;
|
||||||
|
|
||||||
public QueryEngine(
|
public QueryEngine(
|
||||||
|
String uuid,
|
||||||
|
String name,
|
||||||
Supplier<JpaMetamodel> jpaMetamodelAccess,
|
Supplier<JpaMetamodel> jpaMetamodelAccess,
|
||||||
ValueHandlingMode criteriaValueHandlingMode,
|
ValueHandlingMode criteriaValueHandlingMode,
|
||||||
int preferredSqlTypeCodeForBoolean,
|
int preferredSqlTypeCodeForBoolean,
|
||||||
|
@ -118,6 +122,8 @@ public class QueryEngine {
|
||||||
this.hqlTranslator = hqlTranslator;
|
this.hqlTranslator = hqlTranslator;
|
||||||
|
|
||||||
this.criteriaBuilder = new SqmCriteriaNodeBuilder(
|
this.criteriaBuilder = new SqmCriteriaNodeBuilder(
|
||||||
|
uuid,
|
||||||
|
name,
|
||||||
this,
|
this,
|
||||||
jpaMetamodelAccess,
|
jpaMetamodelAccess,
|
||||||
serviceRegistry,
|
serviceRegistry,
|
||||||
|
@ -152,6 +158,8 @@ public class QueryEngine {
|
||||||
* Simplified constructor mainly meant for Quarkus use
|
* Simplified constructor mainly meant for Quarkus use
|
||||||
*/
|
*/
|
||||||
public QueryEngine(
|
public QueryEngine(
|
||||||
|
String uuid,
|
||||||
|
String name,
|
||||||
JpaMetamodel jpaMetamodel,
|
JpaMetamodel jpaMetamodel,
|
||||||
ValueHandlingMode criteriaValueHandlingMode,
|
ValueHandlingMode criteriaValueHandlingMode,
|
||||||
int preferredSqlTypeCodeForBoolean,
|
int preferredSqlTypeCodeForBoolean,
|
||||||
|
@ -170,6 +178,8 @@ public class QueryEngine {
|
||||||
dialect.initializeFunctionRegistry( this );
|
dialect.initializeFunctionRegistry( this );
|
||||||
|
|
||||||
this.criteriaBuilder = new SqmCriteriaNodeBuilder(
|
this.criteriaBuilder = new SqmCriteriaNodeBuilder(
|
||||||
|
uuid,
|
||||||
|
name,
|
||||||
this,
|
this,
|
||||||
() -> jpaMetamodel,
|
() -> jpaMetamodel,
|
||||||
serviceRegistry,
|
serviceRegistry,
|
||||||
|
|
|
@ -1,25 +0,0 @@
|
||||||
/*
|
|
||||||
* Hibernate, Relational Persistence for Idiomatic Java
|
|
||||||
*
|
|
||||||
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
|
|
||||||
* See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html
|
|
||||||
*/
|
|
||||||
package org.hibernate.query.sqm;
|
|
||||||
|
|
||||||
import org.hibernate.query.SemanticException;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Indicates an attempt to use an SqmPath in an unsupported manner - e.g., an
|
|
||||||
* attempt to de-reference a basic value
|
|
||||||
*
|
|
||||||
* @author Steve Ebersole
|
|
||||||
*/
|
|
||||||
public class IllegalPathUsageException extends SemanticException {
|
|
||||||
public IllegalPathUsageException(String message) {
|
|
||||||
super( message );
|
|
||||||
}
|
|
||||||
|
|
||||||
public IllegalPathUsageException(String message, Exception cause) {
|
|
||||||
super( message, cause );
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -409,7 +409,7 @@ public interface NodeBuilder extends HibernateCriteriaBuilder {
|
||||||
@Override
|
@Override
|
||||||
<R> SqmTuple<R> tuple(
|
<R> SqmTuple<R> tuple(
|
||||||
Class<R> tupleType,
|
Class<R> tupleType,
|
||||||
List<JpaExpression<?>> expressions);
|
List<? extends JpaExpression<?>> expressions);
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
<R> SqmTuple<R> tuple(
|
<R> SqmTuple<R> tuple(
|
||||||
|
@ -419,7 +419,7 @@ public interface NodeBuilder extends HibernateCriteriaBuilder {
|
||||||
@Override
|
@Override
|
||||||
<R> SqmTuple<R> tuple(
|
<R> SqmTuple<R> tuple(
|
||||||
DomainType<R> tupleType,
|
DomainType<R> tupleType,
|
||||||
List<JpaExpression<?>> expressions);
|
List<? extends JpaExpression<?>> expressions);
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
SqmPredicate and(Expression<Boolean> x, Expression<Boolean> y);
|
SqmPredicate and(Expression<Boolean> x, Expression<Boolean> y);
|
||||||
|
@ -603,7 +603,7 @@ public interface NodeBuilder extends HibernateCriteriaBuilder {
|
||||||
<T> SqmInPredicate<T> in(Expression<? extends T> expression, T... values);
|
<T> SqmInPredicate<T> in(Expression<? extends T> expression, T... values);
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
<T> SqmInPredicate<T> in(Expression<? extends T> expression, List<T> values);
|
<T> SqmInPredicate<T> in(Expression<? extends T> expression, Collection<T> values);
|
||||||
|
|
||||||
<T> SqmInPredicate<T> in(Expression<? extends T> expression, SqmSubQuery<T> subQuery);
|
<T> SqmInPredicate<T> in(Expression<? extends T> expression, SqmSubQuery<T> subQuery);
|
||||||
|
|
||||||
|
@ -620,7 +620,7 @@ public interface NodeBuilder extends HibernateCriteriaBuilder {
|
||||||
<M extends Map<?,?>> SqmExpression<Integer> mapSize(JpaExpression<M> mapExpression);
|
<M extends Map<?,?>> SqmExpression<Integer> mapSize(JpaExpression<M> mapExpression);
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
SqmExpression<Integer> mapSize(Map map);
|
<M extends Map<?, ?>> SqmExpression<Integer> mapSize(M map);
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
SqmSortSpecification sort(
|
SqmSortSpecification sort(
|
||||||
|
|
|
@ -141,15 +141,15 @@ public interface SemanticQueryWalker<T> {
|
||||||
|
|
||||||
T visitAnyValuedValuedPath(SqmAnyValuedSimplePath<?> path);
|
T visitAnyValuedValuedPath(SqmAnyValuedSimplePath<?> path);
|
||||||
|
|
||||||
T visitNonAggregatedCompositeValuedPath(NonAggregatedCompositeSimplePath path);
|
T visitNonAggregatedCompositeValuedPath(NonAggregatedCompositeSimplePath<?> path);
|
||||||
|
|
||||||
T visitEntityValuedPath(SqmEntityValuedSimplePath<?> path);
|
T visitEntityValuedPath(SqmEntityValuedSimplePath<?> path);
|
||||||
|
|
||||||
T visitPluralValuedPath(SqmPluralValuedSimplePath<?> path);
|
T visitPluralValuedPath(SqmPluralValuedSimplePath<?> path);
|
||||||
|
|
||||||
T visitSelfInterpretingSqmPath(SelfInterpretingSqmPath sqmPath);
|
T visitSelfInterpretingSqmPath(SelfInterpretingSqmPath<?> sqmPath);
|
||||||
|
|
||||||
T visitIndexedPluralAccessPath(SqmIndexedCollectionAccessPath path);
|
T visitIndexedPluralAccessPath(SqmIndexedCollectionAccessPath<?> path);
|
||||||
|
|
||||||
T visitMaxElementPath(SqmMaxElementPath<?> path);
|
T visitMaxElementPath(SqmMaxElementPath<?> path);
|
||||||
|
|
||||||
|
@ -161,7 +161,7 @@ public interface SemanticQueryWalker<T> {
|
||||||
|
|
||||||
T visitTreatedPath(SqmTreatedPath<?, ?> sqmTreatedPath);
|
T visitTreatedPath(SqmTreatedPath<?, ?> sqmTreatedPath);
|
||||||
|
|
||||||
T visitCorrelation(SqmCorrelation correlation);
|
T visitCorrelation(SqmCorrelation<?, ?> correlation);
|
||||||
|
|
||||||
|
|
||||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
@ -193,9 +193,9 @@ public interface SemanticQueryWalker<T> {
|
||||||
|
|
||||||
T visitLiteral(SqmLiteral<?> literal);
|
T visitLiteral(SqmLiteral<?> literal);
|
||||||
|
|
||||||
T visitEnumLiteral(SqmEnumLiteral sqmEnumLiteral);
|
T visitEnumLiteral(SqmEnumLiteral<?> sqmEnumLiteral);
|
||||||
|
|
||||||
T visitFieldLiteral(SqmFieldLiteral sqmFieldLiteral);
|
T visitFieldLiteral(SqmFieldLiteral<?> sqmFieldLiteral);
|
||||||
|
|
||||||
T visitTuple(SqmTuple<?> sqmTuple);
|
T visitTuple(SqmTuple<?> sqmTuple);
|
||||||
|
|
||||||
|
@ -229,27 +229,27 @@ public interface SemanticQueryWalker<T> {
|
||||||
|
|
||||||
T visitUnaryOperationExpression(SqmUnaryOperation<?> expression);
|
T visitUnaryOperationExpression(SqmUnaryOperation<?> expression);
|
||||||
|
|
||||||
T visitFunction(SqmFunction tSqmFunction);
|
T visitFunction(SqmFunction<?> tSqmFunction);
|
||||||
|
|
||||||
T visitExtractUnit(SqmExtractUnit extractUnit);
|
T visitExtractUnit(SqmExtractUnit<?> extractUnit);
|
||||||
|
|
||||||
T visitFormat(SqmFormat sqmFormat);
|
T visitFormat(SqmFormat sqmFormat);
|
||||||
|
|
||||||
T visitCastTarget(SqmCastTarget sqmCastTarget);
|
T visitCastTarget(SqmCastTarget<?> sqmCastTarget);
|
||||||
|
|
||||||
T visitTrimSpecification(SqmTrimSpecification trimSpecification);
|
T visitTrimSpecification(SqmTrimSpecification trimSpecification);
|
||||||
|
|
||||||
T visitDistinct(SqmDistinct distinct);
|
T visitDistinct(SqmDistinct<?> distinct);
|
||||||
|
|
||||||
T visitStar(SqmStar sqmStar);
|
T visitStar(SqmStar sqmStar);
|
||||||
|
|
||||||
T visitCoalesce(SqmCoalesce sqmCoalesce);
|
T visitCoalesce(SqmCoalesce<?> sqmCoalesce);
|
||||||
|
|
||||||
T visitToDuration(SqmToDuration toDuration);
|
T visitToDuration(SqmToDuration<?> toDuration);
|
||||||
|
|
||||||
T visitByUnit(SqmByUnit sqmByUnit);
|
T visitByUnit(SqmByUnit sqmByUnit);
|
||||||
|
|
||||||
T visitDurationUnit(SqmDurationUnit durationUnit);
|
T visitDurationUnit(SqmDurationUnit<?> durationUnit);
|
||||||
|
|
||||||
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
// predicates
|
// predicates
|
||||||
|
@ -306,7 +306,7 @@ public interface SemanticQueryWalker<T> {
|
||||||
|
|
||||||
T visitPluralAttributeSizeFunction(SqmCollectionSize function);
|
T visitPluralAttributeSizeFunction(SqmCollectionSize function);
|
||||||
|
|
||||||
T visitMapEntryFunction(SqmMapEntryReference function);
|
T visitMapEntryFunction(SqmMapEntryReference<?, ?> function);
|
||||||
|
|
||||||
T visitFullyQualifiedClass(Class<?> namedClass);
|
T visitFullyQualifiedClass(Class<?> namedClass);
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,6 +11,7 @@ import javax.persistence.metamodel.Bindable;
|
||||||
|
|
||||||
import org.hibernate.metamodel.model.domain.DomainType;
|
import org.hibernate.metamodel.model.domain.DomainType;
|
||||||
import org.hibernate.query.hql.spi.SqmCreationState;
|
import org.hibernate.query.hql.spi.SqmCreationState;
|
||||||
|
import org.hibernate.query.sqm.tree.SqmExpressableAccessor;
|
||||||
import org.hibernate.query.sqm.tree.domain.SqmPath;
|
import org.hibernate.query.sqm.tree.domain.SqmPath;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -22,7 +23,7 @@ import org.hibernate.query.sqm.tree.domain.SqmPath;
|
||||||
*
|
*
|
||||||
* @author Steve Ebersole
|
* @author Steve Ebersole
|
||||||
*/
|
*/
|
||||||
public interface SqmPathSource<J> extends SqmExpressable<J>, Bindable<J> {
|
public interface SqmPathSource<J> extends SqmExpressable<J>, Bindable<J>, SqmExpressableAccessor<J> {
|
||||||
/**
|
/**
|
||||||
* The name of this thing. Mainly used in logging and when creating a
|
* The name of this thing. Mainly used in logging and when creating a
|
||||||
* {@link org.hibernate.query.NavigablePath}
|
* {@link org.hibernate.query.NavigablePath}
|
||||||
|
@ -38,15 +39,20 @@ public interface SqmPathSource<J> extends SqmExpressable<J>, Bindable<J> {
|
||||||
/**
|
/**
|
||||||
* Find a SqmPathSource by name relative to this source.
|
* Find a SqmPathSource by name relative to this source.
|
||||||
*
|
*
|
||||||
* @throws IllegalPathUsageException to indicate that this source cannot be de-referenced
|
* @throws IllegalStateException to indicate that this source cannot be de-referenced
|
||||||
*/
|
*/
|
||||||
SqmPathSource<?> findSubPathSource(String name) throws IllegalPathUsageException;
|
SqmPathSource<?> findSubPathSource(String name);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create an SQM path for this source relative to the given left-hand side
|
* Create an SQM path for this source relative to the given left-hand side
|
||||||
*/
|
*/
|
||||||
SqmPath<J> createSqmPath(SqmPath<?> lhs);
|
SqmPath<J> createSqmPath(SqmPath<?> lhs);
|
||||||
|
|
||||||
|
@Override
|
||||||
|
default SqmExpressable<J> getExpressable() {
|
||||||
|
return (SqmExpressable<J>) getSqmPathType();
|
||||||
|
}
|
||||||
|
|
||||||
default <X extends DomainType> X sqmAs(Class<X> targetType) {
|
default <X extends DomainType> X sqmAs(Class<X> targetType) {
|
||||||
if ( targetType.isInstance( this ) ) {
|
if ( targetType.isInstance( this ) ) {
|
||||||
//noinspection unchecked
|
//noinspection unchecked
|
||||||
|
|
|
@ -113,7 +113,7 @@ public class NamedSqmFunctionDescriptor
|
||||||
boolean firstPass = true;
|
boolean firstPass = true;
|
||||||
for ( SqlAstNode arg : sqlAstArguments ) {
|
for ( SqlAstNode arg : sqlAstArguments ) {
|
||||||
if ( !firstPass ) {
|
if ( !firstPass ) {
|
||||||
sqlAppender.appendSql( ", " );
|
sqlAppender.appendSql( "," );
|
||||||
}
|
}
|
||||||
if ( caseWrapper && !( arg instanceof Distinct ) ) {
|
if ( caseWrapper && !( arg instanceof Distinct ) ) {
|
||||||
sqlAppender.appendSql( "case when " );
|
sqlAppender.appendSql( "case when " );
|
||||||
|
|
|
@ -14,6 +14,7 @@ import org.hibernate.query.sqm.produce.function.FunctionReturnTypeResolver;
|
||||||
import org.hibernate.query.sqm.sql.BaseSqmToSqlAstConverter;
|
import org.hibernate.query.sqm.sql.BaseSqmToSqlAstConverter;
|
||||||
import org.hibernate.query.sqm.sql.SqmToSqlAstConverter;
|
import org.hibernate.query.sqm.sql.SqmToSqlAstConverter;
|
||||||
import org.hibernate.query.sqm.tree.SqmTypedNode;
|
import org.hibernate.query.sqm.tree.SqmTypedNode;
|
||||||
|
import org.hibernate.query.sqm.tree.expression.SqmAggregateFunction;
|
||||||
import org.hibernate.query.sqm.tree.expression.SqmDistinct;
|
import org.hibernate.query.sqm.tree.expression.SqmDistinct;
|
||||||
import org.hibernate.query.sqm.tree.predicate.SqmPredicate;
|
import org.hibernate.query.sqm.tree.predicate.SqmPredicate;
|
||||||
import org.hibernate.query.sqm.tree.select.SqmSelectableNode;
|
import org.hibernate.query.sqm.tree.select.SqmSelectableNode;
|
||||||
|
@ -22,7 +23,8 @@ import org.hibernate.sql.ast.tree.predicate.Predicate;
|
||||||
/**
|
/**
|
||||||
* @author Christian Beikov
|
* @author Christian Beikov
|
||||||
*/
|
*/
|
||||||
public class SelfRenderingSqmAggregateFunction<T> extends SelfRenderingSqmFunction<T> {
|
public class SelfRenderingSqmAggregateFunction<T> extends SelfRenderingSqmFunction<T>
|
||||||
|
implements SqmAggregateFunction<T> {
|
||||||
|
|
||||||
private final SqmPredicate filter;
|
private final SqmPredicate filter;
|
||||||
|
|
||||||
|
@ -55,6 +57,11 @@ public class SelfRenderingSqmAggregateFunction<T> extends SelfRenderingSqmFuncti
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public SqmPredicate getFilter() {
|
||||||
|
return filter;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void appendHqlString(StringBuilder sb) {
|
public void appendHqlString(StringBuilder sb) {
|
||||||
final List<? extends SqmTypedNode<?>> arguments = getArguments();
|
final List<? extends SqmTypedNode<?>> arguments = getArguments();
|
||||||
|
|
|
@ -12,6 +12,7 @@ import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import javax.persistence.Tuple;
|
import javax.persistence.Tuple;
|
||||||
import javax.persistence.TupleElement;
|
import javax.persistence.TupleElement;
|
||||||
|
import javax.persistence.criteria.CompoundSelection;
|
||||||
|
|
||||||
import org.hibernate.ScrollMode;
|
import org.hibernate.ScrollMode;
|
||||||
import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
|
import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment;
|
||||||
|
@ -22,6 +23,7 @@ import org.hibernate.internal.EmptyScrollableResults;
|
||||||
import org.hibernate.internal.util.streams.StingArrayCollector;
|
import org.hibernate.internal.util.streams.StingArrayCollector;
|
||||||
import org.hibernate.metamodel.mapping.MappingModelExpressable;
|
import org.hibernate.metamodel.mapping.MappingModelExpressable;
|
||||||
import org.hibernate.query.IllegalQueryOperationException;
|
import org.hibernate.query.IllegalQueryOperationException;
|
||||||
|
import org.hibernate.query.criteria.JpaSelection;
|
||||||
import org.hibernate.query.spi.QueryEngine;
|
import org.hibernate.query.spi.QueryEngine;
|
||||||
import org.hibernate.query.spi.QueryOptions;
|
import org.hibernate.query.spi.QueryOptions;
|
||||||
import org.hibernate.query.spi.QueryParameterImplementor;
|
import org.hibernate.query.spi.QueryParameterImplementor;
|
||||||
|
@ -32,6 +34,7 @@ import org.hibernate.query.sqm.sql.SqmTranslation;
|
||||||
import org.hibernate.query.sqm.sql.SqmTranslator;
|
import org.hibernate.query.sqm.sql.SqmTranslator;
|
||||||
import org.hibernate.query.sqm.sql.SqmTranslatorFactory;
|
import org.hibernate.query.sqm.sql.SqmTranslatorFactory;
|
||||||
import org.hibernate.query.sqm.tree.expression.SqmParameter;
|
import org.hibernate.query.sqm.tree.expression.SqmParameter;
|
||||||
|
import org.hibernate.query.sqm.tree.select.SqmJpaCompoundSelection;
|
||||||
import org.hibernate.query.sqm.tree.select.SqmSelectStatement;
|
import org.hibernate.query.sqm.tree.select.SqmSelectStatement;
|
||||||
import org.hibernate.query.sqm.tree.select.SqmSelection;
|
import org.hibernate.query.sqm.tree.select.SqmSelection;
|
||||||
import org.hibernate.sql.ast.SqlAstTranslator;
|
import org.hibernate.sql.ast.SqlAstTranslator;
|
||||||
|
@ -140,11 +143,23 @@ public class ConcreteSqmSelectQueryPlan<R> implements SelectQueryPlan<R> {
|
||||||
if ( Tuple.class.isAssignableFrom( resultType ) ) {
|
if ( Tuple.class.isAssignableFrom( resultType ) ) {
|
||||||
// resultType is Tuple..
|
// resultType is Tuple..
|
||||||
if ( queryOptions.getTupleTransformer() == null ) {
|
if ( queryOptions.getTupleTransformer() == null ) {
|
||||||
final Map<TupleElement<?>, Integer> tupleElementMap = new IdentityHashMap<>( selections.size() );
|
final Map<TupleElement<?>, Integer> tupleElementMap;
|
||||||
|
if ( selections.size() == 1 && selections.get( 0 ).getSelectableNode() instanceof CompoundSelection<?> ) {
|
||||||
|
final List<? extends JpaSelection<?>> selectionItems = selections.get( 0 )
|
||||||
|
.getSelectableNode()
|
||||||
|
.getSelectionItems();
|
||||||
|
tupleElementMap = new IdentityHashMap<>( selectionItems.size() );
|
||||||
|
for ( int i = 0; i < selectionItems.size(); i++ ) {
|
||||||
|
tupleElementMap.put( selectionItems.get( i ), i );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
tupleElementMap = new IdentityHashMap<>( selections.size() );
|
||||||
for ( int i = 0; i < selections.size(); i++ ) {
|
for ( int i = 0; i < selections.size(); i++ ) {
|
||||||
final SqmSelection<?> selection = selections.get( i );
|
final SqmSelection<?> selection = selections.get( i );
|
||||||
tupleElementMap.put( selection.getSelectableNode(), i );
|
tupleElementMap.put( selection.getSelectableNode(), i );
|
||||||
}
|
}
|
||||||
|
}
|
||||||
return (RowTransformer<R>) new RowTransformerJpaTupleImpl( new TupleMetadata( tupleElementMap ) );
|
return (RowTransformer<R>) new RowTransformerJpaTupleImpl( new TupleMetadata( tupleElementMap ) );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue