resolve merge confilts

This commit is contained in:
Strong Liu 2013-03-18 20:12:26 +08:00
commit 7881b4d6b6
625 changed files with 26528 additions and 6541 deletions

View File

@ -24,7 +24,7 @@ on the Jira HHH-123 : `git checkout -b HHH-123 master`
## Code
Do yo thang!
Do yo thing!
## Commit

View File

@ -64,6 +64,8 @@ subprojects { subProject ->
apply plugin: 'java'
apply plugin: 'maven' // for install task as well as deploy dependencies
apply plugin: 'uploadAuth'
apply plugin: 'osgi'
apply from: "../utilities.gradle"
configurations {
provided {
@ -165,13 +167,54 @@ subprojects { subProject ->
compileJava.options.define(compilerArgs: ["-proc:none", "-encoding", "UTF-8"])
compileTestJava.options.define(compilerArgs: ["-proc:none", "-encoding", "UTF-8"])
manifest.mainAttributes(
provider: 'gradle',
'Implementation-Url': 'http://hibernate.org',
'Implementation-Version': version,
'Implementation-Vendor': 'Hibernate.org',
'Implementation-Vendor-Id': 'org.hibernate'
)
jar {
Set<String> exportPackages = new HashSet<String>()
Set<String> privatePackages = new HashSet<String>()
// TODO: Could more of this be pulled into utilities.gradle?
sourceSets.each { SourceSet sourceSet ->
// skip certain source sets
if ( ! ['test','matrix'].contains( sourceSet.name ) ) {
sourceSet.java.each { javaFile ->
// - org.hibernate.boot.registry.classloading.internal
// until EntityManagerFactoryBuilderImpl no longer imports ClassLoaderServiceImpl
// - .util for external module use (especially envers)
final String[] temporaryExports = [
'org.hibernate.boot.registry.classloading.internal',
'org.hibernate.internal.util' ]
final String packageName = determinePackageName( sourceSet.java, javaFile );
if ( ! temporaryExports.contains( packageName )
&& ( packageName.endsWith( ".internal" )
|| packageName.contains( ".internal." )
|| packageName.endsWith( ".test" )
|| packageName.contains( ".test." ) ) ) {
privatePackages.add( packageName );
}
else {
exportPackages.add( packageName );
}
}
}
}
manifest = osgiManifest {
// GRADLE-1411: Even if we override Imports and Exports
// auto-generation with instructions, classesDir and classpath
// need to be here (temporarily).
classesDir = sourceSets.main.output.classesDir
classpath = configurations.runtime
instruction 'Export-Package', exportPackages.toArray(new String[0])
instruction 'Private-Package', privatePackages.toArray(new String[0])
instruction 'Bundle-Vendor', 'Hibernate.org'
instruction 'Implementation-Url', 'http://hibernate.org'
instruction 'Implementation-Version', version
instruction 'Implementation-Vendor', 'Hibernate.org'
instruction 'Implementation-Vendor-Id', 'org.hibernate'
}
}
tasks.withType(Test) {
System.getProperties().each { Map.Entry entry ->

View File

@ -383,42 +383,12 @@
<listitem><para>read-only</para></listitem>
<listitem><para>nontrict read-write</para></listitem>
<listitem><para>read-write</para></listitem>
</itemizedlist>
</entry>
</row>
<row>
<entry>OSCache</entry>
<entry></entry>
<entry>
<itemizedlist>
<listitem><para>read-only</para></listitem>
<listitem><para>nontrict read-write</para></listitem>
<listitem><para>read-write</para></listitem>
</itemizedlist>
</entry>
</row>
<row>
<entry>SwarmCache</entry>
<entry></entry>
<entry>
<itemizedlist>
<listitem><para>read-only</para></listitem>
<listitem><para>nontrict read-write</para></listitem>
</itemizedlist>
</entry>
</row>
<row>
<entry>JBoss Cache 1.x</entry>
<entry></entry>
<entry>
<itemizedlist>
<listitem><para>read-only</para></listitem>
<listitem><para>transactional</para></listitem>
</itemizedlist>
</entry>
</row>
<row>
<entry>JBoss Cache 2.x</entry>
<entry>Infinispan</entry>
<entry></entry>
<entry>
<itemizedlist>

View File

@ -35,6 +35,7 @@
</para>
<itemizedlist>
<listitem><para><xref linkend="value-basic-types" /></para></listitem>
<listitem><para><xref linkend="value-national-character-types" /></para></listitem>
<listitem><para><xref linkend="value-composite-types" /></para></listitem>
<listitem><para><xref linkend="value-collection-types" /></para></listitem>
</itemizedlist>
@ -294,7 +295,111 @@
</tgroup>
</table>
</section>
<section xml:id="value-national-character-types">
<title>National Character Types</title>
<para>
National Character types, which is a new feature since JDBC 4.0 API, now available in hibernate type system.
National Language Support enables you retrieve data or insert data into a database in any character
set that the underlying database supports.
</para>
<para>
Depending on your environment, you might want to set the configuration option <property>hibernate.use_nationalized_character_data</property>
to true and having all string or clob based attributes having this national character support automatically.
There is nothing else to be changed, and you don't have to use any hibernate specific mapping, so it is portable
( though the national character support feature is not required and may not work on other JPA provider impl ).
</para>
<para>
The other way of using this feature is having the <classname>@Nationalized</classname> annotation on the attribute
that should be nationalized. This only works on string based attributes, including string, char, char array and clob.
<programlisting role="JAVA">
@Entity( name="NationalizedEntity")
public static class NationalizedEntity {
@Id
private Integer id;
@Nationalized
private String nvarcharAtt;
@Lob
@Nationalized
private String materializedNclobAtt;
@Lob
@Nationalized
private NClob nclobAtt;
@Nationalized
private Character ncharacterAtt;
@Nationalized
private Character[] ncharArrAtt;
@Type(type = "ntext")
private String nlongvarcharcharAtt;
}</programlisting>
</para>
<table>
<title>National Character Type Mappings</title>
<tgroup cols="4">
<thead>
<row>
<entry>Hibernate type</entry>
<entry>Database type</entry>
<entry>JDBC type</entry>
<entry>Type registry</entry>
</row>
</thead>
<tbody>
<row>
<entry>org.hibernate.type.StringNVarcharType</entry>
<entry>string</entry>
<entry>NVARCHAR</entry>
<entry>nstring</entry>
</row>
<row>
<entry>org.hibernate.type.NTextType</entry>
<entry>string</entry>
<entry>LONGNVARCHAR</entry>
<entry>materialized_clob</entry>
</row>
<row>
<entry>org.hibernate.type.NClobType</entry>
<entry>java.sql.NClob</entry>
<entry>NCLOB</entry>
<entry>nclob</entry>
</row>
<row>
<entry>org.hibernate.type.MaterializedNClobType</entry>
<entry>string</entry>
<entry>NCLOB</entry>
<entry>materialized_nclob</entry>
</row>
<row>
<entry>org.hibernate.type.PrimitiveCharacterArrayNClobType</entry>
<entry>char[]</entry>
<entry>NCHAR</entry>
<entry>char[]</entry>
</row>
<row>
<entry>org.hibernate.type.CharacterNCharType</entry>
<entry>java.lang.Character</entry>
<entry>NCHAR</entry>
<entry>ncharacter</entry>
</row>
<row>
<entry>org.hibernate.type.CharacterArrayNClobType</entry>
<entry>java.lang.Character[]</entry>
<entry>NCLOB</entry>
<entry>Character[], java.lang.Character[]</entry>
</row>
</tbody>
</tgroup>
</table>
</section>
<section xml:id="value-composite-types">
<title>Composite types</title>
<para>

View File

@ -1306,7 +1306,8 @@ query.add(AuditEntity.relatedId("address").eq(relatedEntityId));]]></programlist
<orderedlist>
<listitem>
<para>
collections of components
Bag style collection which identifier column has been defined using
<interfacename>@CollectionId</interfacename> annotation (JIRA ticket HHH-3950).
</para>
</listitem>
</orderedlist>

View File

@ -263,6 +263,11 @@
<entry><para>acquired upon explicit user request using a <code>SELECT ... FOR UPDATE NOWAIT</code> in
Oracle.</para></entry>
</row>
<row>
<entry>LockMode.UPGRADE_SKIPLOCKED</entry>
<entry><para>acquired upon explicit user request using a <code>SELECT ... FOR UPDATE SKIP LOCKED</code> in
Oracle, or <code>SELECT ... with (rowlock,updlock,readpast) in SQL Server</code>.</para></entry>
</row>
<row>
<entry>LockMode.READ</entry>
<entry><para>acquired automatically when Hibernate reads data under <phrase>Repeatable Read</phrase> or
@ -299,17 +304,18 @@
</listitem>
</itemizedlist>
<para>
If you call <methodname>Session.load()</methodname> with option <option>UPGRADE</option> or
<option>UPGRADE_NOWAIT</option>, and the requested object is not already loaded by the session, the object is
loaded using <code>SELECT ... FOR UPDATE</code>. If you call <methodname>load()</methodname> for an object that
is already loaded with a less restrictive lock than the one you request, Hibernate calls
<methodname>lock()</methodname> for that object.
If you call <methodname>Session.load()</methodname> with option <option>UPGRADE</option>,
<option>UPGRADE_NOWAIT</option> or <option>UPGRADE_SKIPLOCKED</option>, and the requested object is not already
loaded by the session, the object is loaded using <code>SELECT ... FOR UPDATE</code>. If you call
<methodname>load()</methodname> for an object that is already loaded with a less restrictive lock than the one
you request, Hibernate calls <methodname>lock()</methodname> for that object.
</para>
<para>
<methodname>Session.lock()</methodname> performs a version number check if the specified lock mode is
<literal>READ</literal>, <literal>UPGRADE</literal>, or <literal>UPGRADE_NOWAIT</literal>. In the case of
<literal>UPGRADE</literal> or <literal>UPGRADE_NOWAIT</literal>, <code>SELECT ... FOR UPDATE</code> syntax is
used.
<literal>READ</literal>, <literal>UPGRADE</literal>, <literal>UPGRADE_NOWAIT</literal> or
<literal>UPGRADE_SKIPLOCKED</literal>. In the case of <literal>UPGRADE</literal>,
<literal>UPGRADE_NOWAIT</literal> or <literal>UPGRADE_SKIPLOCKED</literal>, <code>SELECT ... FOR UPDATE</code>
syntax is used.
</para>
<para>
If the requested lock mode is not supported by the database, Hibernate uses an appropriate alternate mode

View File

@ -119,7 +119,7 @@ List cats = sess.createCriteria(Cat.class)
<programlisting role="JAVA"><![CDATA[List cats = sess.createCriteria(Cat.class)
.add( Restrictions.like("name", "F%")
.addOrder( Order.asc("name") )
.addOrder( Order.asc("name").nulls(NullPrecedence.LAST) )
.addOrder( Order.desc("age") )
.setMaxResults(50)
.list();]]></programlisting>

View File

@ -125,6 +125,12 @@
<entry>Forces Hibernate to order SQL updates by the primary key value of the items being updated. This
reduces the likelihood of transaction deadlocks in highly-concurrent systems.</entry>
</row>
<row>
<entry>hibernate.order_by.default_null_ordering</entry>
<entry><para><literal>none</literal>, <literal>first</literal> or <literal>last</literal></para></entry>
<entry>Defines precedence of null values in <literal>ORDER BY</literal> clause. Defaults to
<literal>none</literal> which varies between RDBMS implementation.</entry>
</row>
<row>
<entry>hibernate.generate_statistics</entry>
<entry><para><literal>true</literal> or <literal>false</literal></para></entry>

View File

@ -1427,7 +1427,9 @@
</para>
<para>
Individual expressions in the order-by can be qualified with either <literal>ASC</literal> (ascending) or
<literal>DESC</literal> (descending) to indicated the desired ordering direction.
<literal>DESC</literal> (descending) to indicated the desired ordering direction. Null values can be placed
in front or at the end of sorted set using <literal>NULLS FIRST</literal> or <literal>NULLS LAST</literal>
clause respectively.
</para>
<example>
<title>Order-by examples</title>

View File

@ -261,7 +261,7 @@
<para>
The standard resolver implementation acts as a chain, delegating to a series of individual
resolvers. The standard Hibernate resolution behavior is contained in
<classname>org.hibernate.engine.jdbc.dialect.internal.StandardDialectResolver</classname>.
<classname>org.hibernate.engine.jdbc.dialect.internal.StandardDatabaseMetaDataDialectResolver</classname>.
<classname>org.hibernate.engine.jdbc.dialect.internal.DialectResolverInitiator</classname>
also consults with the <property>hibernate.dialect_resolvers</property> setting for any
custom resolvers.

View File

@ -226,7 +226,7 @@ session.close();]]></programlisting>
<para>
In keeping with the EJB3 specification, HQL <literal>UPDATE</literal> statements, by default, do not effect the
<xref linkend="mapping-declaration-version">version</xref>
<xref linkend="entity-mapping-entity-version">version</xref>
or the <xref linkend="mapping-declaration-timestamp">timestamp</xref> property values
for the affected entities. However,
you can force Hibernate to reset the <literal>version</literal> or

View File

@ -483,8 +483,8 @@ public class Part {
<literal>@javax.persistence.OrderBy</literal> to your property. This
annotation takes as parameter a list of comma separated properties (of
the target entity) and orders the collection accordingly (eg
<code>firstname asc, age desc</code>), if the string is empty, the
collection will be ordered by the primary key of the target
<code>firstname asc, age desc, weight asc nulls last</code>), if the string
is empty, the collection will be ordered by the primary key of the target
entity.</para>
<example>

View File

@ -747,53 +747,6 @@ Customer customer = (Customer) session.get( Customer.class, customerId );
<entry>yes</entry>
</row>
<!-- <row>
<entry>OSCache</entry>
<entry><literal>org.hibernate.cache.OSCacheProvider</literal></entry>
<entry>memory, disk</entry>
<entry></entry>
<entry>yes</entry>
</row>
<row>
<entry>SwarmCache</entry>
<entry><literal>org.hibernate.cache.SwarmCacheProvider</literal></entry>
<entry>clustered (ip multicast)</entry>
<entry>yes (clustered invalidation)</entry>
<entry></entry>
</row>
<row>
<entry>JBoss Cache 1.x</entry>
<entry><literal>org.hibernate.cache.TreeCacheProvider</literal></entry>
<entry>clustered (ip multicast), transactional</entry>
<entry>yes (replication)</entry>
<entry>yes (clock sync req.)</entry>
</row>
<row>
<entry>JBoss Cache 2</entry>
<entry><literal>org.hibernate.cache.jbc.JBossCacheRegionFactory</literal></entry>
<entry>clustered (ip multicast), transactional</entry>
<entry>yes (replication or invalidation)</entry>
<entry>yes (clock sync req.)</entry>
</row> -->
<row>
<entry>Infinispan</entry>
@ -1122,53 +1075,6 @@ public SortedSet&lt;Ticket&gt; getTickets() {
<entry>yes</entry>
</row>
<!-- <row>
<entry>OSCache</entry>
<entry>yes</entry>
<entry>yes</entry>
<entry>yes</entry>
<entry></entry>
</row>
<row>
<entry>SwarmCache</entry>
<entry>yes</entry>
<entry>yes</entry>
<entry></entry>
<entry></entry>
</row>
<row>
<entry>JBoss Cache 1.x</entry>
<entry>yes</entry>
<entry></entry>
<entry></entry>
<entry>yes</entry>
</row>
<row>
<entry>JBoss Cache 2</entry>
<entry>yes</entry>
<entry></entry>
<entry></entry>
<entry>yes</entry>
</row> -->
<row>
<entry>Infinispan</entry>

View File

@ -855,12 +855,17 @@ WHERE prod.name = 'widget'
</para>
<programlisting><![CDATA[from DomesticCat cat
order by cat.name asc, cat.weight desc, cat.birthdate]]></programlisting>
order by cat.name asc, cat.weight desc nulls first, cat.birthdate]]></programlisting>
<para>
The optional <literal>asc</literal> or <literal>desc</literal> indicate ascending or descending order
respectively.
</para>
<para>
The optional <literal>nulls first</literal> or <literal>nulls last</literal> indicate precedence of null
values while sorting.
</para>
</section>
<section xml:id="queryhql-grouping" revision="1">

View File

@ -51,7 +51,7 @@
<para>
The first thing we need to do is to set up the development environment. We
will be using the "standard layout" advocated by alot of build tools such
as <link xlink:href="http://maven.org" xmlns:xlink="http://www.w3.org/1999/xlink">Maven</link>.
as <link xlink:href="http://maven.apache.org" xmlns:xlink="http://www.w3.org/1999/xlink">Maven</link>.
Maven, in particular, has a
good resource describing this
<link xlink:href="http://maven.apache.org/guides/introduction/introduction-to-the-standard-directory-layout.html" xmlns:xlink="http://www.w3.org/1999/xlink">layout</link>.

View File

@ -9,3 +9,10 @@ dependencies {
}
testCompile project( ':hibernate-testing' )
}
jar {
manifest {
instruction 'Bundle-Description', 'Hibernate ORM C3P0'
instruction 'Bundle-SymbolicName', 'org.hibernate.c3p0'
}
}

View File

@ -37,6 +37,28 @@ manifest.mainAttributes(
'Main-Class': 'org.hibernate.Version'
)
jar {
manifest {
instruction 'Bundle-Description', 'Hibernate ORM Core'
instruction 'Bundle-SymbolicName', 'org.hibernate.core'
instruction 'Import-Package',
'javax.security.auth;resolution:=optional',
'javax.security.jacc;resolution:=optional',
'javax.validation;resolution:=optional',
'javax.validation.constraints;resolution:=optional',
'javax.validation.groups;resolution:=optional',
'javax.validation.metadata;resolution:=optional',
'*'
// TODO: Uncomment once EntityManagerFactoryBuilderImpl no longer
// uses ClassLoaderServiceImpl.
instruction 'Export-Package',
'org.hibernate.boot.registry.classloading.internal',
'*'
}
}
sourceSets.main {
ext.jaxbTargetDir = file( "${buildDir}/generated-src/jaxb/main" )
java.srcDir jaxbTargetDir

View File

@ -348,9 +348,18 @@ orderClause
;
orderExprs
: orderExpr ( ASCENDING | DESCENDING )? (orderExprs)?
: orderExpr ( ASCENDING | DESCENDING )? ( nullOrdering )? (orderExprs)?
;
nullOrdering
: NULLS nullPrecedence
;
nullPrecedence
: FIRST
| LAST
;
orderExpr
: { isOrderExpressionResultVariableRef( _t ) }? resultVariableRef
| expr

View File

@ -78,6 +78,9 @@ tokens
UPDATE="update";
VERSIONED="versioned";
WHERE="where";
NULLS="nulls";
FIRST;
LAST;
// -- SQL tokens --
// These aren't part of HQL, but the SQL fragment parser uses the HQL lexer, so they need to be declared here.
@ -439,7 +442,7 @@ orderByClause
;
orderElement
: expression ( ascendingOrDescending )?
: expression ( ascendingOrDescending )? ( nullOrdering )?
;
ascendingOrDescending
@ -447,6 +450,24 @@ ascendingOrDescending
| ( "desc" | "descending") { #ascendingOrDescending.setType(DESCENDING); }
;
nullOrdering
: NULLS nullPrecedence
;
nullPrecedence
: IDENT {
if ( "first".equalsIgnoreCase( #nullPrecedence.getText() ) ) {
#nullPrecedence.setType( FIRST );
}
else if ( "last".equalsIgnoreCase( #nullPrecedence.getText() ) ) {
#nullPrecedence.setType( LAST );
}
else {
throw new SemanticException( "Expecting 'first' or 'last', but found '" + #nullPrecedence.getText() + "' as null ordering precedence." );
}
}
;
//## havingClause:
//## HAVING logicalExpression;
@ -723,7 +744,7 @@ castedIdentPrimaryBase
aggregate
: ( SUM^ | AVG^ | MAX^ | MIN^ ) OPEN! additiveExpression CLOSE! { #aggregate.setType(AGGREGATE); }
// Special case for count - It's 'parameters' can be keywords.
| COUNT^ OPEN! ( STAR { #STAR.setType(ROW_STAR); } | ( ( DISTINCT | ALL )? ( path | collectionExpr | NUM_INT ) ) ) CLOSE!
| COUNT^ OPEN! ( STAR { #STAR.setType(ROW_STAR); } | ( ( DISTINCT | ALL )? ( path | collectionExpr | NUM_INT | caseExpression ) ) ) CLOSE!
| collectionExpr
;
@ -737,6 +758,7 @@ collectionExpr
compoundExpr
: collectionExpr
| path
| { LA(1) == OPEN && LA(2) == CLOSE }? OPEN! CLOSE!
| (OPEN! ( (expression (COMMA! expression)*) | subQuery ) CLOSE!)
| parameter
;

View File

@ -30,6 +30,7 @@ package org.hibernate.sql.ordering.antlr;
* Antlr grammar for rendering <tt>ORDER_BY</tt> trees as described by the {@link OrderByFragmentParser}
* @author Steve Ebersole
* @author Lukasz Antoniak (lukasz dot antoniak at gmail dot com)
*/
class GeneratedOrderByFragmentRenderer extends TreeParser;
@ -53,6 +54,13 @@ options {
/*package*/ String getRenderedFragment() {
return buffer.toString();
}
/**
* Implementation note: This is just a stub. OrderByFragmentRenderer contains the effective implementation.
*/
protected String renderOrderByElement(String expression, String collation, String order, String nulls) {
throw new UnsupportedOperationException("Concrete ORDER BY renderer should override this method.");
}
}
orderByFragment
@ -61,32 +69,29 @@ orderByFragment
)
;
sortSpecification
sortSpecification { String sortKeySpec = null; String collSpec = null; String ordSpec = null; String nullOrd = null; }
: #(
SORT_SPEC sortKeySpecification (collationSpecification)? (orderingSpecification)?
SORT_SPEC sortKeySpec=sortKeySpecification (collSpec=collationSpecification)? (ordSpec=orderingSpecification)? (nullOrd=nullOrdering)?
{ out( renderOrderByElement( sortKeySpec, collSpec, ordSpec, nullOrd ) ); }
)
;
sortKeySpecification
: #(SORT_KEY sortKey)
sortKeySpecification returns [String sortKeyExp = null]
: #(SORT_KEY s:sortKey) { sortKeyExp = #s.getText(); }
;
sortKey
: i:IDENT {
out( #i );
}
: IDENT
;
collationSpecification
: c:COLLATE {
out( " collate " );
out( c );
}
collationSpecification returns [String collSpecExp = null]
: c:COLLATE { collSpecExp = "collate " + #c.getText(); }
;
orderingSpecification
: o:ORDER_SPEC {
out( " " );
out( #o );
}
orderingSpecification returns [String ordSpecExp = null]
: o:ORDER_SPEC { ordSpecExp = #o.getText(); }
;
nullOrdering returns [String nullOrdExp = null]
: n:NULL_ORDER { nullOrdExp = #n.getText(); }
;

View File

@ -46,6 +46,7 @@ tokens
ORDER_BY;
SORT_SPEC;
ORDER_SPEC;
NULL_ORDER;
SORT_KEY;
EXPR_LIST;
DOT;
@ -55,6 +56,9 @@ tokens
COLLATE="collate";
ASCENDING="asc";
DESCENDING="desc";
NULLS="nulls";
FIRST;
LAST;
}
@ -76,7 +80,7 @@ tokens
* @return The text.
*/
protected final String extractText(AST ast) {
// for some reason, within AST creation blocks "[]" I am somtimes unable to refer to the AST.getText() method
// for some reason, within AST creation blocks "[]" I am sometimes unable to refer to the AST.getText() method
// using #var (the #var is not interpreted as the rule's output AST).
return ast.getText();
}
@ -168,7 +172,7 @@ orderByFragment { trace("orderByFragment"); }
* the results should be sorted.
*/
sortSpecification { trace("sortSpecification"); }
: sortKey (collationSpecification)? (orderingSpecification)? {
: sortKey (collationSpecification)? (orderingSpecification)? (nullOrdering)? {
#sortSpecification = #( [SORT_SPEC, "{sort specification}"], #sortSpecification );
#sortSpecification = postProcessSortSpecification( #sortSpecification );
}
@ -290,6 +294,30 @@ orderingSpecification! { trace("orderingSpecification"); }
}
;
/**
* Recognition rule for what SQL-2003 terms the <tt>null ordering</tt>; <tt>NULLS FIRST</tt> or
* <tt>NULLS LAST</tt>.
*/
nullOrdering! { trace("nullOrdering"); }
: NULLS n:nullPrecedence {
#nullOrdering = #( [NULL_ORDER, extractText( #n )] );
}
;
nullPrecedence { trace("nullPrecedence"); }
: IDENT {
if ( "first".equalsIgnoreCase( #nullPrecedence.getText() ) ) {
#nullPrecedence.setType( FIRST );
}
else if ( "last".equalsIgnoreCase( #nullPrecedence.getText() ) ) {
#nullPrecedence.setType( LAST );
}
else {
throw new SemanticException( "Expecting 'first' or 'last', but found '" + #nullPrecedence.getText() + "' as null ordering precedence." );
}
}
;
/**
* A simple-property-path is an IDENT followed by one or more (DOT IDENT) sequences
*/

View File

@ -27,8 +27,11 @@ options {
/** the buffer resulting SQL statement is written to */
private StringBuilder buf = new StringBuilder();
private boolean captureExpression = false;
private StringBuilder expr = new StringBuilder();
protected void out(String s) {
buf.append(s);
getStringBuilder().append( s );
}
/**
@ -72,7 +75,7 @@ options {
}
protected StringBuilder getStringBuilder() {
return buf;
return captureExpression ? expr : buf;
}
protected void nyi(AST n) {
@ -92,6 +95,27 @@ options {
protected void commaBetweenParameters(String comma) {
out(comma);
}
protected void captureExpressionStart() {
captureExpression = true;
}
protected void captureExpressionFinish() {
captureExpression = false;
}
protected String resetCapture() {
final String expression = expr.toString();
expr = new StringBuilder();
return expression;
}
/**
* Implementation note: This is just a stub. SqlGenerator contains the effective implementation.
*/
protected String renderOrderByElement(String expression, String order, String nulls) {
throw new UnsupportedOperationException("Concrete SQL generator should override this method.");
}
}
statement
@ -152,9 +176,14 @@ whereClauseExpr
| booleanExpr[ false ]
;
orderExprs
orderExprs { String ordExp = null; String ordDir = null; String ordNul = null; }
// TODO: remove goofy space before the comma when we don't have to regression test anymore.
: ( expr ) (dir:orderDirection { out(" "); out(dir); })? ( {out(", "); } orderExprs)?
// Dialect is provided a hook to render each ORDER BY element, so the expression is being captured instead of
// printing to the SQL output directly. See Dialect#renderOrderByElement(String, String, String, NullPrecedence).
: { captureExpressionStart(); } ( expr ) { captureExpressionFinish(); ordExp = resetCapture(); }
(dir:orderDirection { ordDir = #dir.getText(); })? (ordNul=nullOrdering)?
{ out( renderOrderByElement( ordExp, ordDir, ordNul ) ); }
( {out(", "); } orderExprs )?
;
groupExprs
@ -167,6 +196,15 @@ orderDirection
| DESCENDING
;
nullOrdering returns [String nullOrdExp = null]
: NULLS fl:nullPrecedence { nullOrdExp = #fl.getText(); }
;
nullPrecedence
: FIRST
| LAST
;
whereExpr
// Expect the filter subtree, followed by the theta join subtree, followed by the HQL condition subtree.
// Might need parens around the HQL condition if there is more than one subtree.

View File

@ -82,22 +82,11 @@ public enum CacheMode {
return null;
}
if ( GET.name().equalsIgnoreCase( setting ) ) {
return CacheMode.GET;
try {
return CacheMode.valueOf( setting.toUpperCase() );
}
if ( IGNORE.name().equalsIgnoreCase( setting ) ) {
return CacheMode.IGNORE;
catch ( IllegalArgumentException e ) {
throw new MappingException( "Unknown Cache Mode: " + setting );
}
if ( NORMAL.name().equalsIgnoreCase( setting ) ) {
return CacheMode.NORMAL;
}
if ( PUT.name().equalsIgnoreCase( setting ) ) {
return CacheMode.PUT;
}
if ( REFRESH.name().equalsIgnoreCase( setting ) ) {
return CacheMode.REFRESH;
}
throw new MappingException( "Unknown Cache Mode: " + setting );
}
}

View File

@ -38,7 +38,7 @@ public enum ConnectionReleaseMode{
* explicitly close all iterators and scrollable results. This mode may
* only be used with a JTA datasource.
*/
AFTER_STATEMENT("after_statement"),
AFTER_STATEMENT,
/**
* Indicates that JDBC connections should be released after each transaction
@ -47,18 +47,14 @@ public enum ConnectionReleaseMode{
* <p/>
* This is the default mode starting in 3.1; was previously {@link #ON_CLOSE}.
*/
AFTER_TRANSACTION("after_transaction"),
AFTER_TRANSACTION,
/**
* Indicates that connections should only be released when the Session is explicitly closed
* or disconnected; this is the legacy (Hibernate2 and pre-3.1) behavior.
*/
ON_CLOSE("on_close");
ON_CLOSE;
private final String name;
ConnectionReleaseMode(String name){
this.name = name;
}
public static ConnectionReleaseMode parse(String name){
return ConnectionReleaseMode.valueOf( name.toUpperCase() );
}

View File

@ -89,22 +89,11 @@ public enum FlushMode {
return null;
}
if ( AUTO.name().equalsIgnoreCase( setting ) ) {
return FlushMode.AUTO;
try {
return FlushMode.valueOf( setting.toUpperCase() );
}
if ( COMMIT.name().equalsIgnoreCase( setting ) ) {
return FlushMode.COMMIT;
catch ( IllegalArgumentException e ) {
throw new MappingException( "unknown FlushMode : " + setting );
}
if ( NEVER.name().equalsIgnoreCase( setting ) ) {
return FlushMode.NEVER;
}
if ( MANUAL.name().equalsIgnoreCase( setting ) ) {
return FlushMode.MANUAL;
}
if ( ALWAYS.name().equalsIgnoreCase( setting ) ) {
return FlushMode.ALWAYS;
}
throw new MappingException( "unknown FlushMode : " + setting );
}
}

View File

@ -66,6 +66,15 @@ public enum LockMode {
* <tt>UPGRADE</tt>.
*/
UPGRADE_NOWAIT( 10 ),
/**
* Attempt to obtain an upgrade lock, using an Oracle-style
* <tt>select for update skip locked</tt>. The semantics of
* this lock mode, once obtained, are the same as
* <tt>UPGRADE</tt>.
*/
UPGRADE_SKIPLOCKED( 10 ),
/**
* A <tt>WRITE</tt> lock is obtained when an object is updated
* or inserted. This lock mode is for internal use only and is

View File

@ -64,6 +64,12 @@ public class LockOptions implements Serializable {
*/
public static final int WAIT_FOREVER = -1;
/**
* Indicates that rows that are already locked should be skipped.
* @see #getTimeOut()
*/
public static final int SKIP_LOCKED = -2;
private LockMode lockMode = LockMode.NONE;
private int timeout = WAIT_FOREVER;
@ -221,9 +227,9 @@ public class LockOptions implements Serializable {
* The timeout is the amount of time, in milliseconds, we should instruct the database
* to wait for any requested pessimistic lock acquisition.
* <p/>
* {@link #NO_WAIT} and {@link #WAIT_FOREVER} represent 2 "magic" values.
* {@link #NO_WAIT}, {@link #WAIT_FOREVER} or {@link #SKIP_LOCKED} represent 3 "magic" values.
*
* @return timeout in milliseconds, or {@link #NO_WAIT} or {@link #WAIT_FOREVER}
* @return timeout in milliseconds, {@link #NO_WAIT}, {@link #WAIT_FOREVER} or {@link #SKIP_LOCKED}
*/
public int getTimeOut() {
return timeout;

View File

@ -0,0 +1,41 @@
package org.hibernate;
/**
* Defines precedence of null values within {@code ORDER BY} clause.
*
* @author Lukasz Antoniak (lukasz dot antoniak at gmail dot com)
*/
public enum NullPrecedence {
/**
* Null precedence not specified. Relies on the RDBMS implementation.
*/
NONE,
/**
* Null values appear at the beginning of the sorted collection.
*/
FIRST,
/**
* Null values appear at the end of the sorted collection.
*/
LAST;
public static NullPrecedence parse(String type) {
if ( "none".equalsIgnoreCase( type ) ) {
return NullPrecedence.NONE;
}
else if ( "first".equalsIgnoreCase( type ) ) {
return NullPrecedence.FIRST;
}
else if ( "last".equalsIgnoreCase( type ) ) {
return NullPrecedence.LAST;
}
return null;
}
public static NullPrecedence parse(String type, NullPrecedence defaultValue) {
final NullPrecedence value = parse( type );
return value != null ? value : defaultValue;
}
}

View File

@ -266,7 +266,10 @@ public interface Session extends SharedSessionContract {
* not be synchronized with the database. This operation cascades to associated
* instances if the association is mapped with <tt>cascade="evict"</tt>.
*
* @param object a persistent instance
* @param object The entity to evict
*
* @throws NullPointerException if the passed object is {@code null}
* @throws IllegalArgumentException if the passed object is not defined as an entity
*/
public void evict(Object object);

View File

@ -27,6 +27,7 @@ import java.io.Serializable;
import java.sql.Connection;
import java.util.Map;
import java.util.Set;
import javax.naming.Referenceable;
import org.hibernate.boot.registry.StandardServiceRegistry;

View File

@ -25,6 +25,8 @@ package org.hibernate;
import java.io.Serializable;
import org.hibernate.procedure.ProcedureCall;
/**
* Contract methods shared between {@link Session} and {@link StatelessSession}
*
@ -91,17 +93,18 @@ public interface SharedSessionContract extends Serializable {
*
* @return The representation of the procedure call.
*/
public StoredProcedureCall createStoredProcedureCall(String procedureName);
public ProcedureCall createStoredProcedureCall(String procedureName);
/**
* Creates a call to a stored procedure with specific result set entity mappings
* Creates a call to a stored procedure with specific result set entity mappings. Each class named
* is considered a "root return".
*
* @param procedureName The name of the procedure.
* @param resultClasses The entity(s) to map the result on to.
*
* @return The representation of the procedure call.
*/
public StoredProcedureCall createStoredProcedureCall(String procedureName, Class... resultClasses);
public ProcedureCall createStoredProcedureCall(String procedureName, Class... resultClasses);
/**
* Creates a call to a stored procedure with specific result set entity mappings
@ -111,7 +114,7 @@ public interface SharedSessionContract extends Serializable {
*
* @return The representation of the procedure call.
*/
public StoredProcedureCall createStoredProcedureCall(String procedureName, String... resultSetMappings);
public ProcedureCall createStoredProcedureCall(String procedureName, String... resultSetMappings);
/**
* Create {@link Criteria} instance for the given class (entity or subclasses/implementors)

View File

@ -1,197 +0,0 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2012, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate;
import java.util.List;
import javax.persistence.ParameterMode;
import javax.persistence.TemporalType;
import org.hibernate.type.Type;
/**
* @author Steve Ebersole
*/
public interface StoredProcedureCall extends BasicQueryContract, SynchronizeableQuery {
@Override
StoredProcedureCall addSynchronizedQuerySpace(String querySpace);
@Override
StoredProcedureCall addSynchronizedEntityName(String entityName) throws MappingException;
@Override
StoredProcedureCall addSynchronizedEntityClass(Class entityClass) throws MappingException;
/**
* Get the name of the stored procedure to be called.
*
* @return The procedure name.
*/
public String getProcedureName();
/**
* Register a positional parameter.
* All positional parameters must be registered.
*
* @param position parameter position
* @param type type of the parameter
* @param mode parameter mode
*
* @return the same query instance
*/
StoredProcedureCall registerStoredProcedureParameter(
int position,
Class type,
ParameterMode mode);
/**
* Register a named parameter.
* When using parameter names, all parameters must be registered
* in the order in which they occur in the parameter list of the
* stored procedure.
*
* @param parameterName name of the parameter as registered or
* <p/>
* specified in metadata
* @param type type of the parameter
* @param mode parameter mode
*
* @return the same query instance
*/
StoredProcedureCall registerStoredProcedureParameter(
String parameterName,
Class type,
ParameterMode mode);
/**
* Retrieve all registered parameters.
*
* @return The (immutable) list of all registered parameters.
*/
public List<StoredProcedureParameter> getRegisteredParameters();
/**
* Retrieve parameter registered by name.
*
* @param name The name under which the parameter of interest was registered.
*
* @return The registered parameter.
*/
public StoredProcedureParameter getRegisteredParameter(String name);
public StoredProcedureParameter getRegisteredParameter(int position);
public StoredProcedureOutputs getOutputs();
/**
* Describes a parameter registered with the stored procedure. Parameters can be either named or positional
* as the registration mechanism. Named and positional should not be mixed.
*/
public static interface StoredProcedureParameter<T> {
/**
* The name under which this parameter was registered. Can be {@code null} which should indicate that
* positional registration was used (and therefore {@link #getPosition()} should return non-null.
*
* @return The name;
*/
public String getName();
/**
* The position at which this parameter was registered. Can be {@code null} which should indicate that
* named registration was used (and therefore {@link #getName()} should return non-null.
*
* @return The name;
*/
public Integer getPosition();
/**
* Obtain the Java type of parameter. This is used to guess the Hibernate type (unless {@link #setHibernateType}
* is called explicitly).
*
* @return The parameter Java type.
*/
public Class<T> getType();
/**
* Retrieves the parameter "mode" which describes how the parameter is defined in the actual database procedure
* definition (is it an INPUT parameter? An OUTPUT parameter? etc).
*
* @return The parameter mode.
*/
public ParameterMode getMode();
/**
* Set the Hibernate mapping type for this parameter.
*
* @param type The Hibernate mapping type.
*/
public void setHibernateType(Type type);
/**
* Retrieve the binding associated with this parameter. The binding is only relevant for INPUT parameters. Can
* return {@code null} if nothing has been bound yet. To bind a value to the parameter use one of the
* {@link #bindValue} methods.
*
* @return The parameter binding
*/
public StoredProcedureParameterBind getParameterBind();
/**
* Bind a value to the parameter. How this value is bound to the underlying JDBC CallableStatement is
* totally dependent on the Hibernate type.
*
* @param value The value to bind.
*/
public void bindValue(T value);
/**
* Bind a value to the parameter, using just a specified portion of the DATE/TIME value. It is illegal to call
* this form if the parameter is not DATE/TIME type. The Hibernate type is circumvented in this case and
* an appropriate "precision" Type is used instead.
*
* @param value The value to bind
* @param explicitTemporalType An explicitly supplied TemporalType.
*/
public void bindValue(T value, TemporalType explicitTemporalType);
}
/**
* Describes an input value binding for any IN/INOUT parameters.
*/
public static interface StoredProcedureParameterBind<T> {
/**
* Retrieves the bound value.
*
* @return The bound value.
*/
public T getValue();
/**
* If {@code <T>} represents a DATE/TIME type value, JPA usually allows specifying the particular parts of
* the DATE/TIME value to be bound. This value represents the particular part the user requested to be bound.
*
* @return The explicitly supplied TemporalType.
*/
public TemporalType getExplicitTemporalType();
}
}

View File

@ -72,7 +72,9 @@ public abstract class AbstractEntityInsertAction extends EntityAction {
this.isExecuted = false;
this.areTransientReferencesNullified = false;
handleNaturalIdPreSaveNotifications();
if (id != null) {
handleNaturalIdPreSaveNotifications();
}
}
/**
@ -193,7 +195,17 @@ public abstract class AbstractEntityInsertAction extends EntityAction {
/**
* Handle sending notifications needed for natural-id after saving
*/
protected void handleNaturalIdPostSaveNotifications() {
public void handleNaturalIdPostSaveNotifications(Serializable generatedId) {
if (isEarlyInsert()) {
// with early insert, we still need to add a local (transactional) natural id cross-reference
getSession().getPersistenceContext().getNaturalIdHelper().manageLocalNaturalIdCrossReference(
getPersister(),
generatedId,
state,
null,
CachedNaturalIdValueSource.INSERT
);
}
// after save, we need to manage the shared cache entries
getSession().getPersistenceContext().getNaturalIdHelper().manageSharedNaturalIdCrossReference(
getPersister(),

View File

@ -123,7 +123,7 @@ public final class EntityInsertAction extends AbstractEntityInsertAction {
}
}
handleNaturalIdPostSaveNotifications();
handleNaturalIdPostSaveNotifications(id);
postInsert();

View File

@ -33,9 +33,11 @@ import static java.lang.annotation.RetentionPolicy.RUNTIME;
* Define a DB index
*
* @author Emmanuel Bernard
* @deprecated Using {@link javax.persistence.Index} instead.
*/
@Target({FIELD, METHOD})
@Retention(RUNTIME)
@Deprecated
public @interface Index {
String name();

View File

@ -0,0 +1,42 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.annotations;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
/**
* Marks a character data type (String, Character, character, Clob) as being a nationalized variant
* (NVARCHAR, NCHAR, NCLOB, etc).
*
* @author Steve Ebersole
*/
@Target( { METHOD, FIELD } )
@Retention( RUNTIME )
public @interface Nationalized {
}

View File

@ -26,19 +26,26 @@ package org.hibernate.boot.registry;
import java.util.ArrayList;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import org.hibernate.boot.registry.classloading.internal.ClassLoaderServiceImpl;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.boot.registry.internal.BootstrapServiceRegistryImpl;
import org.hibernate.boot.registry.selector.Availability;
import org.hibernate.boot.registry.selector.AvailabilityAnnouncer;
import org.hibernate.boot.registry.selector.internal.StrategySelectorBuilder;
import org.hibernate.integrator.internal.IntegratorServiceImpl;
import org.hibernate.integrator.spi.Integrator;
import org.hibernate.boot.registry.classloading.internal.ClassLoaderServiceImpl;
import org.hibernate.boot.registry.selector.internal.StrategySelectorBuilder;
import org.hibernate.internal.util.ClassLoaderHelper;
/**
* Builder for bootstrap {@link org.hibernate.service.ServiceRegistry} instances.
*
* @author Steve Ebersole
* @author Brett Meyer
*
* @see BootstrapServiceRegistryImpl
* @see StandardServiceRegistryBuilder#StandardServiceRegistryBuilder(org.hibernate.boot.registry.BootstrapServiceRegistry)
@ -46,9 +53,11 @@ import org.hibernate.boot.registry.selector.internal.StrategySelectorBuilder;
public class BootstrapServiceRegistryBuilder {
private final LinkedHashSet<Integrator> providedIntegrators = new LinkedHashSet<Integrator>();
private List<ClassLoader> providedClassLoaders;
private ClassLoaderService providedClassLoaderService;
private StrategySelectorBuilder strategySelectorBuilder = new StrategySelectorBuilder();
/**
* Add an {@link Integrator} to be applied to the bootstrap registry.
*
@ -75,6 +84,18 @@ public class BootstrapServiceRegistryBuilder {
return this;
}
/**
* Adds a provided {@link ClassLoaderService} for use in class-loading and resource-lookup
*
* @param classLoader The class loader to use
*
* @return {@code this}, for method chaining
*/
public BootstrapServiceRegistryBuilder with(ClassLoaderService classLoaderService) {
providedClassLoaderService = classLoaderService;
return this;
}
/**
* Applies the specified {@link ClassLoader} as the application class loader for the bootstrap registry
*
@ -171,7 +192,23 @@ public class BootstrapServiceRegistryBuilder {
* @return The built bootstrap registry
*/
public BootstrapServiceRegistry build() {
final ClassLoaderServiceImpl classLoaderService = new ClassLoaderServiceImpl( providedClassLoaders );
final ClassLoaderService classLoaderService;
if ( providedClassLoaderService == null ) {
// Use a set. As an example, in JPA, OsgiClassLoader may be in both
// the providedClassLoaders and the overridenClassLoader.
final Set<ClassLoader> classLoaders = new HashSet<ClassLoader>();
if ( providedClassLoaders != null ) {
classLoaders.addAll( providedClassLoaders );
}
if ( ClassLoaderHelper.overridenClassLoader != null ) {
classLoaders.add( ClassLoaderHelper.overridenClassLoader );
}
classLoaderService = new ClassLoaderServiceImpl( classLoaders );
} else {
classLoaderService = providedClassLoaderService;
}
final IntegratorServiceImpl integratorService = new IntegratorServiceImpl(
providedIntegrators,

View File

@ -35,14 +35,13 @@ import org.hibernate.cfg.Environment;
import org.hibernate.integrator.spi.Integrator;
import org.hibernate.integrator.spi.IntegratorService;
import org.hibernate.internal.util.config.ConfigurationHelper;
import org.hibernate.boot.registry.internal.BootstrapServiceRegistryImpl;
import org.hibernate.jaxb.spi.cfg.JaxbHibernateConfiguration;
import org.hibernate.boot.registry.internal.ConfigLoader;
import org.hibernate.boot.registry.internal.StandardServiceRegistryImpl;
import org.hibernate.service.Service;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.service.StandardServiceInitiators;
import org.hibernate.service.internal.ProvidedService;
import org.hibernate.boot.registry.internal.StandardServiceRegistryImpl;
import org.hibernate.service.spi.ServiceContributor;
/**

View File

@ -37,11 +37,10 @@ import java.util.List;
import java.util.Map;
import java.util.ServiceLoader;
import org.jboss.logging.Logger;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.boot.registry.classloading.spi.ClassLoadingException;
import org.hibernate.cfg.AvailableSettings;
import org.jboss.logging.Logger;
/**
* Standard implementation of the service for interacting with class loaders
@ -61,7 +60,7 @@ public class ClassLoaderServiceImpl implements ClassLoaderService {
this( Collections.singletonList( classLoader ) );
}
public ClassLoaderServiceImpl(List<ClassLoader> providedClassLoaders) {
public ClassLoaderServiceImpl(Collection<ClassLoader> providedClassLoaders) {
final LinkedHashSet<ClassLoader> orderedClassLoaderSet = new LinkedHashSet<ClassLoader>();
// first add all provided class loaders, if any
@ -74,7 +73,7 @@ public class ClassLoaderServiceImpl implements ClassLoaderService {
}
// normalize adding known class-loaders...
// first the Hibernate class loader
// first, the Hibernate class loader
orderedClassLoaderSet.add( ClassLoaderServiceImpl.class.getClassLoader() );
// then the TCCL, if one...
final ClassLoader tccl = locateTCCL();
@ -102,12 +101,20 @@ public class ClassLoaderServiceImpl implements ClassLoaderService {
providedClassLoaders.add( classLoader );
}
}
addIfSet( providedClassLoaders, AvailableSettings.APP_CLASSLOADER, configVales );
addIfSet( providedClassLoaders, AvailableSettings.RESOURCES_CLASSLOADER, configVales );
addIfSet( providedClassLoaders, AvailableSettings.HIBERNATE_CLASSLOADER, configVales );
addIfSet( providedClassLoaders, AvailableSettings.ENVIRONMENT_CLASSLOADER, configVales );
if ( providedClassLoaders.isEmpty() ) {
log.debugf( "Incoming config yielded no classloaders; adding standard SE ones" );
final ClassLoader tccl = locateTCCL();
if ( tccl != null ) {
providedClassLoaders.add( tccl );
}
providedClassLoaders.add( ClassLoaderServiceImpl.class.getClassLoader() );
}
return new ClassLoaderServiceImpl( providedClassLoaders );
}

View File

@ -26,9 +26,7 @@ package org.hibernate.boot.registry.selector.internal;
import java.util.ArrayList;
import java.util.List;
import org.jboss.logging.Logger;
import org.hibernate.boot.registry.classloading.internal.ClassLoaderServiceImpl;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.boot.registry.selector.Availability;
import org.hibernate.boot.registry.selector.AvailabilityAnnouncer;
import org.hibernate.boot.registry.selector.SimpleAvailabilityImpl;
@ -97,6 +95,7 @@ import org.hibernate.engine.transaction.spi.TransactionFactory;
import org.hibernate.hql.spi.MultiTableBulkIdStrategy;
import org.hibernate.hql.spi.PersistentTableBulkIdStrategy;
import org.hibernate.hql.spi.TemporaryTableBulkIdStrategy;
import org.jboss.logging.Logger;
/**
* @author Steve Ebersole
@ -127,7 +126,7 @@ public class StrategySelectorBuilder {
explicitAvailabilities.add( availability );
}
public StrategySelector buildSelector(ClassLoaderServiceImpl classLoaderService) {
public StrategySelector buildSelector(ClassLoaderService classLoaderService) {
StrategySelectorImpl strategySelector = new StrategySelectorImpl( classLoaderService );
// build the baseline...

View File

@ -21,21 +21,19 @@
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.engine.jdbc.internal.proxy;
package org.hibernate.bytecode.enhance;
import org.hibernate.HibernateException;
/**
* Indicates a problem defining or instantiating a JDBC proxy class.
*
* @author Steve Ebersole
*/
public class JdbcProxyException extends HibernateException {
public JdbcProxyException(String message, Throwable root) {
super( message, root );
}
public JdbcProxyException(String message) {
public class EnhancementException extends HibernateException {
public EnhancementException(String message) {
super( message );
}
public EnhancementException(String message, Throwable root) {
super( message, root );
}
}

View File

@ -0,0 +1,98 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2012, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.bytecode.enhance.spi;
import javassist.CtClass;
import javassist.CtField;
/**
* todo : not sure its a great idea to expose Javassist classes this way.
* maybe wrap them in our own contracts?
*
* @author Steve Ebersole
*/
public interface EnhancementContext {
/**
* Obtain access to the ClassLoader that can be used to load Class references. In JPA SPI terms, this
* should be a "temporary class loader" as defined by
* {@link javax.persistence.spi.PersistenceUnitInfo#getNewTempClassLoader()}
*/
public ClassLoader getLoadingClassLoader();
/**
* Does the given class descriptor represent a entity class?
*
* @param classDescriptor The descriptor of the class to check.
*
* @return {@code true} if the class is an entity; {@code false} otherwise.
*/
public boolean isEntityClass(CtClass classDescriptor);
/**
* Does the given class name represent an embeddable/component class?
*
* @param classDescriptor The descriptor of the class to check.
*
* @return {@code true} if the class is an embeddable/component; {@code false} otherwise.
*/
public boolean isCompositeClass(CtClass classDescriptor);
/**
* Should we in-line dirty checking for persistent attributes for this class?
*
* @param classDescriptor The descriptor of the class to check.
*
* @return {@code true} indicates that dirty checking should be in-lined within the entity; {@code false}
* indicates it should not. In-lined is more easily serializable and probably more performant.
*/
public boolean doDirtyCheckingInline(CtClass classDescriptor);
public boolean hasLazyLoadableAttributes(CtClass classDescriptor);
// todo : may be better to invert these 2 such that the context is asked for an ordered list of persistent fields for an entity/composite
/**
* Does the field represent persistent state? Persistent fields will be "enhanced".
* <p/>
// may be better to perform basic checks in the caller (non-static, etc) and call out with just the
// Class name and field name...
* @param ctField The field reference.
*
* @return {@code true} if the field is ; {@code false} otherwise.
*/
public boolean isPersistentField(CtField ctField);
/**
* For fields which are persistent (according to {@link #isPersistentField}), determine the corresponding ordering
* maintained within the Hibernate metamodel.
* @param persistentFields The persistent field references.
*
* @return The ordered references.
*/
public CtField[] order(CtField[] persistentFields);
public boolean isLazyLoadable(CtField field);
}

View File

@ -0,0 +1,986 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2012, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.bytecode.enhance.spi;
import javax.persistence.Transient;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.IdentityHashMap;
import java.util.List;
import java.util.Map;
import javassist.CannotCompileException;
import javassist.ClassPool;
import javassist.CtClass;
import javassist.CtField;
import javassist.CtMethod;
import javassist.CtNewMethod;
import javassist.LoaderClassPath;
import javassist.Modifier;
import javassist.NotFoundException;
import javassist.bytecode.AnnotationsAttribute;
import javassist.bytecode.BadBytecode;
import javassist.bytecode.CodeAttribute;
import javassist.bytecode.CodeIterator;
import javassist.bytecode.ConstPool;
import javassist.bytecode.FieldInfo;
import javassist.bytecode.MethodInfo;
import javassist.bytecode.Opcode;
import javassist.bytecode.StackMapTable;
import javassist.bytecode.annotation.Annotation;
import javassist.bytecode.stackmap.MapMaker;
import org.jboss.logging.Logger;
import org.hibernate.HibernateException;
import org.hibernate.bytecode.enhance.EnhancementException;
import org.hibernate.engine.spi.EntityEntry;
import org.hibernate.engine.spi.ManagedComposite;
import org.hibernate.engine.spi.ManagedEntity;
import org.hibernate.engine.spi.PersistentAttributeInterceptable;
import org.hibernate.engine.spi.PersistentAttributeInterceptor;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.mapping.PersistentClass;
/**
* @author Steve Ebersole
* @author Jason Greene
*/
public class Enhancer {
private static final CoreMessageLogger log = Logger.getMessageLogger( CoreMessageLogger.class, Enhancer.class.getName() );
public static final String PERSISTENT_FIELD_READER_PREFIX = "$$_hibernate_read_";
public static final String PERSISTENT_FIELD_WRITER_PREFIX = "$$_hibernate_write_";
public static final String ENTITY_INSTANCE_GETTER_NAME = "$$_hibernate_getEntityInstance";
public static final String ENTITY_ENTRY_FIELD_NAME = "$$_hibernate_entityEntryHolder";
public static final String ENTITY_ENTRY_GETTER_NAME = "$$_hibernate_getEntityEntry";
public static final String ENTITY_ENTRY_SETTER_NAME = "$$_hibernate_setEntityEntry";
public static final String PREVIOUS_FIELD_NAME = "$$_hibernate_previousManagedEntity";
public static final String PREVIOUS_GETTER_NAME = "$$_hibernate_getPreviousManagedEntity";
public static final String PREVIOUS_SETTER_NAME = "$$_hibernate_setPreviousManagedEntity";
public static final String NEXT_FIELD_NAME = "$$_hibernate_nextManagedEntity";
public static final String NEXT_GETTER_NAME = "$$_hibernate_getNextManagedEntity";
public static final String NEXT_SETTER_NAME = "$$_hibernate_setNextManagedEntity";
public static final String INTERCEPTOR_FIELD_NAME = "$$_hibernate_attributeInterceptor";
public static final String INTERCEPTOR_GETTER_NAME = "$$_hibernate_getInterceptor";
public static final String INTERCEPTOR_SETTER_NAME = "$$_hibernate_setInterceptor";
private final EnhancementContext enhancementContext;
private final ClassPool classPool;
private final CtClass managedEntityCtClass;
private final CtClass managedCompositeCtClass;
private final CtClass attributeInterceptorCtClass;
private final CtClass attributeInterceptableCtClass;
private final CtClass entityEntryCtClass;
private final CtClass objectCtClass;
public Enhancer(EnhancementContext enhancementContext) {
this.enhancementContext = enhancementContext;
this.classPool = buildClassPool( enhancementContext );
try {
// add ManagedEntity contract
this.managedEntityCtClass = classPool.makeClass(
ManagedEntity.class.getClassLoader().getResourceAsStream(
ManagedEntity.class.getName().replace( '.', '/' ) + ".class"
)
);
// add ManagedComposite contract
this.managedCompositeCtClass = classPool.makeClass(
ManagedComposite.class.getClassLoader().getResourceAsStream(
ManagedComposite.class.getName().replace( '.', '/' ) + ".class"
)
);
// add PersistentAttributeInterceptable contract
this.attributeInterceptableCtClass = classPool.makeClass(
PersistentAttributeInterceptable.class.getClassLoader().getResourceAsStream(
PersistentAttributeInterceptable.class.getName().replace( '.', '/' ) + ".class"
)
);
// add PersistentAttributeInterceptor contract
this.attributeInterceptorCtClass = classPool.makeClass(
PersistentAttributeInterceptor.class.getClassLoader().getResourceAsStream(
PersistentAttributeInterceptor.class.getName().replace( '.', '/' ) + ".class"
)
);
// "add" EntityEntry
this.entityEntryCtClass = classPool.makeClass( EntityEntry.class.getName() );
}
catch (IOException e) {
throw new EnhancementException( "Could not prepare Javassist ClassPool", e );
}
try {
this.objectCtClass = classPool.getCtClass( Object.class.getName() );
}
catch (NotFoundException e) {
throw new EnhancementException( "Could not prepare Javassist ClassPool", e );
}
}
private ClassPool buildClassPool(EnhancementContext enhancementContext) {
ClassPool classPool = new ClassPool( false );
ClassLoader loadingClassLoader = enhancementContext.getLoadingClassLoader();
if ( loadingClassLoader != null ) {
classPool.appendClassPath( new LoaderClassPath( loadingClassLoader ) );
}
return classPool;
}
/**
* Performs the enhancement.
*
* @param className The name of the class whose bytecode is being enhanced.
* @param originalBytes The class's original (pre-enhancement) byte code
*
* @return The enhanced bytecode. Could be the same as the original bytecode if the original was
* already enhanced or we could not enhance it for some reason.
*
* @throws EnhancementException
*/
public byte[] enhance(String className, byte[] originalBytes) throws EnhancementException {
final CtClass managedCtClass;
try {
managedCtClass = classPool.makeClassIfNew( new ByteArrayInputStream( originalBytes ) );
}
catch (IOException e) {
log.unableToBuildEnhancementMetamodel( className );
return originalBytes;
}
enhance( managedCtClass );
DataOutputStream out = null;
try {
ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
out = new DataOutputStream( byteStream );
managedCtClass.toBytecode( out );
return byteStream.toByteArray();
}
catch (Exception e) {
log.unableToTransformClass( e.getMessage() );
throw new HibernateException( "Unable to transform class: " + e.getMessage() );
}
finally {
try {
if ( out != null ) {
out.close();
}
}
catch (IOException e) {
//swallow
}
}
}
private void enhance(CtClass managedCtClass) {
final String className = managedCtClass.getName();
log.debugf( "Enhancing %s", className );
// can't effectively enhance interfaces
if ( managedCtClass.isInterface() ) {
log.debug( "skipping enhancement : interface" );
return;
}
// skip already enhanced classes
final String[] interfaceNames = managedCtClass.getClassFile2().getInterfaces();
for ( String interfaceName : interfaceNames ) {
if ( ManagedEntity.class.getName().equals( interfaceName )
|| ManagedComposite.class.getName().equals( interfaceName ) ) {
log.debug( "skipping enhancement : already enhanced" );
return;
}
}
if ( enhancementContext.isEntityClass( managedCtClass ) ) {
enhanceAsEntity( managedCtClass );
}
else if ( enhancementContext.isCompositeClass( managedCtClass ) ) {
enhanceAsComposite( managedCtClass );
}
else {
log.debug( "skipping enhancement : not entity or composite" );
}
}
private void enhanceAsEntity(CtClass managedCtClass) {
// add the ManagedEntity interface
managedCtClass.addInterface( managedEntityCtClass );
enhancePersistentAttributes( managedCtClass );
addEntityInstanceHandling( managedCtClass );
addEntityEntryHandling( managedCtClass );
addLinkedPreviousHandling( managedCtClass );
addLinkedNextHandling( managedCtClass );
}
private void enhanceAsComposite(CtClass managedCtClass) {
enhancePersistentAttributes( managedCtClass );
}
private void addEntityInstanceHandling(CtClass managedCtClass) {
// add the ManagedEntity#$$_hibernate_getEntityInstance method
try {
managedCtClass.addMethod(
CtNewMethod.make(
objectCtClass,
ENTITY_INSTANCE_GETTER_NAME,
new CtClass[0],
new CtClass[0],
"{ return this; }",
managedCtClass
)
);
}
catch (CannotCompileException e) {
throw new EnhancementException(
String.format(
"Could not enhance entity class [%s] to add EntityEntry getter",
managedCtClass.getName()
),
e
);
}
}
private void addEntityEntryHandling(CtClass managedCtClass) {
addFieldWithGetterAndSetter(
managedCtClass,
entityEntryCtClass,
ENTITY_ENTRY_FIELD_NAME,
ENTITY_ENTRY_GETTER_NAME,
ENTITY_ENTRY_SETTER_NAME
);
}
private void addLinkedPreviousHandling(CtClass managedCtClass) {
addFieldWithGetterAndSetter(
managedCtClass,
managedEntityCtClass,
PREVIOUS_FIELD_NAME,
PREVIOUS_GETTER_NAME,
PREVIOUS_SETTER_NAME
);
}
private void addLinkedNextHandling(CtClass managedCtClass) {
addFieldWithGetterAndSetter(
managedCtClass,
managedEntityCtClass,
NEXT_FIELD_NAME,
NEXT_GETTER_NAME,
NEXT_SETTER_NAME
);
}
private AnnotationsAttribute getVisibleAnnotations(FieldInfo fieldInfo) {
AnnotationsAttribute annotationsAttribute = (AnnotationsAttribute) fieldInfo.getAttribute( AnnotationsAttribute.visibleTag );
if ( annotationsAttribute == null ) {
annotationsAttribute = new AnnotationsAttribute( fieldInfo.getConstPool(), AnnotationsAttribute.visibleTag );
fieldInfo.addAttribute( annotationsAttribute );
}
return annotationsAttribute;
}
private void enhancePersistentAttributes(CtClass managedCtClass) {
addInterceptorHandling( managedCtClass );
if ( enhancementContext.doDirtyCheckingInline( managedCtClass ) ) {
addInLineDirtyHandling( managedCtClass );
}
final IdentityHashMap<String,PersistentAttributeDescriptor> attrDescriptorMap
= new IdentityHashMap<String, PersistentAttributeDescriptor>();
for ( CtField persistentField : collectPersistentFields( managedCtClass ) ) {
attrDescriptorMap.put(
persistentField.getName(),
enhancePersistentAttribute( managedCtClass, persistentField )
);
}
// lastly, find all references to the transformed fields and replace with calls to the added reader/writer
transformFieldAccessesIntoReadsAndWrites( managedCtClass, attrDescriptorMap );
}
private PersistentAttributeDescriptor enhancePersistentAttribute(CtClass managedCtClass, CtField persistentField) {
try {
final AttributeTypeDescriptor typeDescriptor = resolveAttributeTypeDescriptor( persistentField );
return new PersistentAttributeDescriptor(
persistentField,
generateFieldReader( managedCtClass, persistentField, typeDescriptor ),
generateFieldWriter( managedCtClass, persistentField, typeDescriptor ),
typeDescriptor
);
}
catch (Exception e) {
throw new EnhancementException(
String.format(
"Unable to enhance persistent attribute [%s:%s]",
managedCtClass.getName(),
persistentField.getName()
),
e
);
}
}
private CtField[] collectPersistentFields(CtClass managedCtClass) {
// todo : drive this from the Hibernate metamodel instance...
final List<CtField> persistentFieldList = new ArrayList<CtField>();
for ( CtField ctField : managedCtClass.getDeclaredFields() ) {
// skip static fields
if ( Modifier.isStatic( ctField.getModifiers() ) ) {
continue;
}
// skip fields added by enhancement
if ( ctField.getName().startsWith( "$" ) ) {
continue;
}
if ( enhancementContext.isPersistentField( ctField ) ) {
persistentFieldList.add( ctField );
}
}
return enhancementContext.order( persistentFieldList.toArray( new CtField[persistentFieldList.size()]) );
}
private void addInterceptorHandling(CtClass managedCtClass) {
// interceptor handling is only needed if either:
// a) in-line dirty checking has *not* been requested
// b) class has lazy-loadable attributes
if ( enhancementContext.doDirtyCheckingInline( managedCtClass )
&& ! enhancementContext.hasLazyLoadableAttributes( managedCtClass ) ) {
return;
}
log.debug( "Weaving in PersistentAttributeInterceptable implementation" );
// add in the PersistentAttributeInterceptable contract
managedCtClass.addInterface( attributeInterceptableCtClass );
addFieldWithGetterAndSetter(
managedCtClass,
attributeInterceptorCtClass,
INTERCEPTOR_FIELD_NAME,
INTERCEPTOR_GETTER_NAME,
INTERCEPTOR_SETTER_NAME
);
}
private void addInLineDirtyHandling(CtClass managedCtClass) {
// todo : implement
}
private void addFieldWithGetterAndSetter(
CtClass targetClass,
CtClass fieldType,
String fieldName,
String getterName,
String setterName) {
final CtField theField = addField( targetClass, fieldType, fieldName, true );
addGetter( targetClass, theField, getterName );
addSetter( targetClass, theField, setterName );
}
private CtField addField(CtClass targetClass, CtClass fieldType, String fieldName, boolean makeTransient) {
final ConstPool constPool = targetClass.getClassFile().getConstPool();
final CtField theField;
try {
theField = new CtField( fieldType, fieldName, targetClass );
targetClass.addField( theField );
}
catch (CannotCompileException e) {
throw new EnhancementException(
String.format(
"Could not enhance class [%s] to add field [%s]",
targetClass.getName(),
fieldName
),
e
);
}
// make that new field (1) private, (2) transient and (3) @Transient
if ( makeTransient ) {
theField.setModifiers( theField.getModifiers() | Modifier.TRANSIENT );
}
theField.setModifiers( Modifier.setPrivate( theField.getModifiers() ) );
AnnotationsAttribute annotationsAttribute = getVisibleAnnotations( theField.getFieldInfo() );
annotationsAttribute.addAnnotation( new Annotation( Transient.class.getName(), constPool ) );
return theField;
}
private void addGetter(CtClass targetClass, CtField theField, String getterName) {
try {
targetClass.addMethod( CtNewMethod.getter( getterName, theField ) );
}
catch (CannotCompileException e) {
throw new EnhancementException(
String.format(
"Could not enhance entity class [%s] to add getter method [%s]",
targetClass.getName(),
getterName
),
e
);
}
}
private void addSetter(CtClass targetClass, CtField theField, String setterName) {
try {
targetClass.addMethod( CtNewMethod.setter( setterName, theField ) );
}
catch (CannotCompileException e) {
throw new EnhancementException(
String.format(
"Could not enhance entity class [%s] to add setter method [%s]",
targetClass.getName(),
setterName
),
e
);
}
}
private CtMethod generateFieldReader(
CtClass managedCtClass,
CtField persistentField,
AttributeTypeDescriptor typeDescriptor)
throws BadBytecode, CannotCompileException {
final FieldInfo fieldInfo = persistentField.getFieldInfo();
final String fieldName = fieldInfo.getName();
final String readerName = PERSISTENT_FIELD_READER_PREFIX + fieldName;
// read attempts only have to deal lazy-loading support, not dirty checking; so if the field
// is not enabled as lazy-loadable return a plain simple getter as the reader
if ( ! enhancementContext.isLazyLoadable( persistentField ) ) {
// not lazy-loadable...
// EARLY RETURN!!!
try {
CtMethod reader = CtNewMethod.getter( readerName, persistentField );
managedCtClass.addMethod( reader );
return reader;
}
catch (CannotCompileException e) {
throw new EnhancementException(
String.format(
"Could not enhance entity class [%s] to add field reader method [%s]",
managedCtClass.getName(),
readerName
),
e
);
}
}
// temporary solution...
String methodBody = typeDescriptor.buildReadInterceptionBodyFragment( fieldName )
+ " return this." + fieldName + ";";
try {
CtMethod reader = CtNewMethod.make(
Modifier.PRIVATE,
persistentField.getType(),
readerName,
null,
null,
"{" + methodBody + "}",
managedCtClass
);
managedCtClass.addMethod( reader );
return reader;
}
catch (Exception e) {
throw new EnhancementException(
String.format(
"Could not enhance entity class [%s] to add field reader method [%s]",
managedCtClass.getName(),
readerName
),
e
);
}
}
private CtMethod generateFieldWriter(
CtClass managedCtClass,
CtField persistentField,
AttributeTypeDescriptor typeDescriptor) {
final FieldInfo fieldInfo = persistentField.getFieldInfo();
final String fieldName = fieldInfo.getName();
final String writerName = PERSISTENT_FIELD_WRITER_PREFIX + fieldName;
final CtMethod writer;
try {
if ( ! enhancementContext.isLazyLoadable( persistentField ) ) {
// not lazy-loadable...
writer = CtNewMethod.setter( writerName, persistentField );
}
else {
String methodBody = typeDescriptor.buildWriteInterceptionBodyFragment( fieldName );
writer = CtNewMethod.make(
Modifier.PRIVATE,
CtClass.voidType,
writerName,
new CtClass[] { persistentField.getType() },
null,
"{" + methodBody + "}",
managedCtClass
);
}
if ( enhancementContext.doDirtyCheckingInline( managedCtClass ) ) {
writer.insertBefore( typeDescriptor.buildInLineDirtyCheckingBodyFragment( fieldName ) );
}
managedCtClass.addMethod( writer );
return writer;
}
catch (Exception e) {
throw new EnhancementException(
String.format(
"Could not enhance entity class [%s] to add field writer method [%s]",
managedCtClass.getName(),
writerName
),
e
);
}
}
private void transformFieldAccessesIntoReadsAndWrites(
CtClass managedCtClass,
IdentityHashMap<String, PersistentAttributeDescriptor> attributeDescriptorMap) {
final ConstPool constPool = managedCtClass.getClassFile().getConstPool();
for ( Object oMethod : managedCtClass.getClassFile().getMethods() ) {
final MethodInfo methodInfo = (MethodInfo) oMethod;
final String methodName = methodInfo.getName();
// skip methods added by enhancement
if ( methodName.startsWith( PERSISTENT_FIELD_READER_PREFIX )
|| methodName.startsWith( PERSISTENT_FIELD_WRITER_PREFIX )
|| methodName.equals( ENTITY_INSTANCE_GETTER_NAME )
|| methodName.equals( ENTITY_ENTRY_GETTER_NAME )
|| methodName.equals( ENTITY_ENTRY_SETTER_NAME )
|| methodName.equals( PREVIOUS_GETTER_NAME )
|| methodName.equals( PREVIOUS_SETTER_NAME )
|| methodName.equals( NEXT_GETTER_NAME )
|| methodName.equals( NEXT_SETTER_NAME ) ) {
continue;
}
final CodeAttribute codeAttr = methodInfo.getCodeAttribute();
if ( codeAttr == null ) {
// would indicate an abstract method, continue to next method
continue;
}
try {
CodeIterator itr = codeAttr.iterator();
while ( itr.hasNext() ) {
int index = itr.next();
int op = itr.byteAt( index );
if ( op != Opcode.PUTFIELD && op != Opcode.GETFIELD ) {
continue;
}
int constIndex = itr.u16bitAt( index+1 );
final String fieldName = constPool.getFieldrefName( constIndex );
final PersistentAttributeDescriptor attributeDescriptor = attributeDescriptorMap.get( fieldName );
if ( attributeDescriptor == null ) {
// its not a field we have enhanced for interception, so skip it
continue;
}
log.tracef(
"Transforming access to field [%s] from method [%s]",
fieldName,
methodName
);
if ( op == Opcode.GETFIELD ) {
int read_method_index = constPool.addMethodrefInfo(
constPool.getThisClassInfo(),
attributeDescriptor.getReader().getName(),
attributeDescriptor.getReader().getSignature()
);
itr.writeByte( Opcode.INVOKESPECIAL, index );
itr.write16bit( read_method_index, index+1 );
}
else {
int write_method_index = constPool.addMethodrefInfo(
constPool.getThisClassInfo(),
attributeDescriptor.getWriter().getName(),
attributeDescriptor.getWriter().getSignature()
);
itr.writeByte( Opcode.INVOKESPECIAL, index );
itr.write16bit( write_method_index, index+1 );
}
}
StackMapTable smt = MapMaker.make( classPool, methodInfo );
methodInfo.getCodeAttribute().setAttribute(smt);
}
catch (BadBytecode e) {
throw new EnhancementException(
"Unable to perform field access transformation in method : " + methodName,
e
);
}
}
}
private static class PersistentAttributeDescriptor {
private final CtField field;
private final CtMethod reader;
private final CtMethod writer;
private final AttributeTypeDescriptor typeDescriptor;
private PersistentAttributeDescriptor(
CtField field,
CtMethod reader,
CtMethod writer,
AttributeTypeDescriptor typeDescriptor) {
this.field = field;
this.reader = reader;
this.writer = writer;
this.typeDescriptor = typeDescriptor;
}
public CtField getField() {
return field;
}
public CtMethod getReader() {
return reader;
}
public CtMethod getWriter() {
return writer;
}
public AttributeTypeDescriptor getTypeDescriptor() {
return typeDescriptor;
}
}
private static interface AttributeTypeDescriptor {
public String buildReadInterceptionBodyFragment(String fieldName);
public String buildWriteInterceptionBodyFragment(String fieldName);
public String buildInLineDirtyCheckingBodyFragment(String fieldName);
}
private AttributeTypeDescriptor resolveAttributeTypeDescriptor(CtField persistentField) throws NotFoundException {
// for now cheat... we know we only have Object fields
if ( persistentField.getType() == CtClass.booleanType ) {
return BOOLEAN_DESCRIPTOR;
}
else if ( persistentField.getType() == CtClass.byteType ) {
return BYTE_DESCRIPTOR;
}
else if ( persistentField.getType() == CtClass.charType ) {
return CHAR_DESCRIPTOR;
}
else if ( persistentField.getType() == CtClass.shortType ) {
return SHORT_DESCRIPTOR;
}
else if ( persistentField.getType() == CtClass.intType ) {
return INT_DESCRIPTOR;
}
else if ( persistentField.getType() == CtClass.longType ) {
return LONG_DESCRIPTOR;
}
else if ( persistentField.getType() == CtClass.doubleType ) {
return DOUBLE_DESCRIPTOR;
}
else if ( persistentField.getType() == CtClass.floatType ) {
return FLOAT_DESCRIPTOR;
}
else {
return new ObjectAttributeTypeDescriptor( persistentField.getType() );
}
}
private static abstract class AbstractAttributeTypeDescriptor implements AttributeTypeDescriptor {
@Override
public String buildInLineDirtyCheckingBodyFragment(String fieldName) {
// for now...
// todo : hook-in in-lined dirty checking
return String.format(
"System.out.println( \"DIRTY CHECK (%1$s) : \" + this.%1$s + \" -> \" + $1 + \" (dirty=\" + (this.%1$s != $1) +\")\" );",
fieldName
);
}
}
private static class ObjectAttributeTypeDescriptor extends AbstractAttributeTypeDescriptor {
private final CtClass concreteType;
private ObjectAttributeTypeDescriptor(CtClass concreteType) {
this.concreteType = concreteType;
}
@Override
public String buildReadInterceptionBodyFragment(String fieldName) {
return String.format(
"if ( $$_hibernate_getInterceptor() != null ) { " +
"this.%1$s = (%2$s) $$_hibernate_getInterceptor().readObject(this, \"%1$s\", this.%1$s); " +
"}",
fieldName,
concreteType.getName()
);
}
@Override
public String buildWriteInterceptionBodyFragment(String fieldName) {
return String.format(
"%2$s localVar = $1;" +
"if ( $$_hibernate_getInterceptor() != null ) {" +
"localVar = (%2$s) $$_hibernate_getInterceptor().writeObject(this, \"%1$s\", this.%1$s, $1);" +
"}" +
"this.%1$s = localVar;",
fieldName,
concreteType.getName()
);
}
}
private static final AttributeTypeDescriptor BOOLEAN_DESCRIPTOR = new AbstractAttributeTypeDescriptor() {
@Override
public String buildReadInterceptionBodyFragment(String fieldName) {
return String.format(
"if ( $$_hibernate_getInterceptor() != null ) { " +
"this.%1$s = $$_hibernate_getInterceptor().readBoolean(this, \"%1$s\", this.%1$s); " +
"}",
fieldName
);
}
@Override
public String buildWriteInterceptionBodyFragment(String fieldName) {
return String.format(
"boolean localVar = $1;" +
"if ( $$_hibernate_getInterceptor() != null ) {" +
"localVar = $$_hibernate_getInterceptor().writeBoolean(this, \"%1$s\", this.%1$s, $1);" +
"}" +
"this.%1$s = localVar;",
fieldName
);
}
};
private static final AttributeTypeDescriptor BYTE_DESCRIPTOR = new AbstractAttributeTypeDescriptor() {
@Override
public String buildReadInterceptionBodyFragment(String fieldName) {
return String.format(
"if ( $$_hibernate_getInterceptor() != null ) { " +
"this.%1$s = $$_hibernate_getInterceptor().readByte(this, \"%1$s\", this.%1$s); " +
"}",
fieldName
);
}
@Override
public String buildWriteInterceptionBodyFragment(String fieldName) {
return String.format(
"byte localVar = $1;" +
"if ( $$_hibernate_getInterceptor() != null ) {" +
"localVar = $$_hibernate_getInterceptor().writeByte(this, \"%1$s\", this.%1$s, $1);" +
"}" +
"this.%1$s = localVar;",
fieldName
);
}
};
private static final AttributeTypeDescriptor CHAR_DESCRIPTOR = new AbstractAttributeTypeDescriptor() {
@Override
public String buildReadInterceptionBodyFragment(String fieldName) {
return String.format(
"if ( $$_hibernate_getInterceptor() != null ) { " +
"this.%1$s = $$_hibernate_getInterceptor().readChar(this, \"%1$s\", this.%1$s); " +
"}",
fieldName
);
}
@Override
public String buildWriteInterceptionBodyFragment(String fieldName) {
return String.format(
"char localVar = $1;" +
"if ( $$_hibernate_getInterceptor() != null ) {" +
"localVar = $$_hibernate_getInterceptor().writeChar(this, \"%1$s\", this.%1$s, $1);" +
"}" +
"this.%1$s = localVar;",
fieldName
);
}
};
private static final AttributeTypeDescriptor SHORT_DESCRIPTOR = new AbstractAttributeTypeDescriptor() {
@Override
public String buildReadInterceptionBodyFragment(String fieldName) {
return String.format(
"if ( $$_hibernate_getInterceptor() != null ) { " +
"this.%1$s = $$_hibernate_getInterceptor().readShort(this, \"%1$s\", this.%1$s); " +
"}",
fieldName
);
}
@Override
public String buildWriteInterceptionBodyFragment(String fieldName) {
return String.format(
"short localVar = $1;" +
"if ( $$_hibernate_getInterceptor() != null ) {" +
"localVar = $$_hibernate_getInterceptor().writeShort(this, \"%1$s\", this.%1$s, $1);" +
"}" +
"this.%1$s = localVar;",
fieldName
);
}
};
private static final AttributeTypeDescriptor INT_DESCRIPTOR = new AbstractAttributeTypeDescriptor() {
@Override
public String buildReadInterceptionBodyFragment(String fieldName) {
return String.format(
"if ( $$_hibernate_getInterceptor() != null ) { " +
"this.%1$s = $$_hibernate_getInterceptor().readInt(this, \"%1$s\", this.%1$s); " +
"}",
fieldName
);
}
@Override
public String buildWriteInterceptionBodyFragment(String fieldName) {
return String.format(
"int localVar = $1;" +
"if ( $$_hibernate_getInterceptor() != null ) {" +
"localVar = $$_hibernate_getInterceptor().writeInt(this, \"%1$s\", this.%1$s, $1);" +
"}" +
"this.%1$s = localVar;",
fieldName
);
}
};
private static final AttributeTypeDescriptor LONG_DESCRIPTOR = new AbstractAttributeTypeDescriptor() {
@Override
public String buildReadInterceptionBodyFragment(String fieldName) {
return String.format(
"if ( $$_hibernate_getInterceptor() != null ) { " +
"this.%1$s = $$_hibernate_getInterceptor().readLong(this, \"%1$s\", this.%1$s); " +
"}",
fieldName
);
}
@Override
public String buildWriteInterceptionBodyFragment(String fieldName) {
return String.format(
"long localVar = $1;" +
"if ( $$_hibernate_getInterceptor() != null ) {" +
"localVar = $$_hibernate_getInterceptor().writeLong(this, \"%1$s\", this.%1$s, $1);" +
"}" +
"this.%1$s = localVar;",
fieldName
);
}
};
private static final AttributeTypeDescriptor DOUBLE_DESCRIPTOR = new AbstractAttributeTypeDescriptor() {
@Override
public String buildReadInterceptionBodyFragment(String fieldName) {
return String.format(
"if ( $$_hibernate_getInterceptor() != null ) { " +
"this.%1$s = $$_hibernate_getInterceptor().readDouble(this, \"%1$s\", this.%1$s); " +
"}",
fieldName
);
}
@Override
public String buildWriteInterceptionBodyFragment(String fieldName) {
return String.format(
"double localVar = $1;" +
"if ( $$_hibernate_getInterceptor() != null ) {" +
"localVar = $$_hibernate_getInterceptor().writeDouble(this, \"%1$s\", this.%1$s, $1);" +
"}" +
"this.%1$s = localVar;",
fieldName
);
}
};
private static final AttributeTypeDescriptor FLOAT_DESCRIPTOR = new AbstractAttributeTypeDescriptor() {
@Override
public String buildReadInterceptionBodyFragment(String fieldName) {
return String.format(
"if ( $$_hibernate_getInterceptor() != null ) { " +
"this.%1$s = $$_hibernate_getInterceptor().readFloat(this, \"%1$s\", this.%1$s); " +
"}",
fieldName
);
}
@Override
public String buildWriteInterceptionBodyFragment(String fieldName) {
return String.format(
"float localVar = $1;" +
"if ( $$_hibernate_getInterceptor() != null ) {" +
"localVar = $$_hibernate_getInterceptor().writeFloat(this, \"%1$s\", this.%1$s, $1);" +
"}" +
"this.%1$s = localVar;",
fieldName
);
}
};
}

View File

@ -81,8 +81,7 @@ public final class FieldInterceptorImpl extends AbstractFieldInterceptor impleme
}
public int readInt(Object target, String name, int oldValue) {
return ( ( Integer ) intercept( target, name, Integer.valueOf( oldValue ) ) )
.intValue();
return ( ( Integer ) intercept( target, name, Integer.valueOf( oldValue ) ) );
}
public long readLong(Object target, String name, long oldValue) {

View File

@ -64,7 +64,8 @@ public class StandardQueryCache implements QueryCache {
StandardQueryCache.class.getName()
);
private static final boolean tracing = LOG.isTraceEnabled();
private static final boolean DEBUGGING = LOG.isDebugEnabled();
private static final boolean TRACING = LOG.isTraceEnabled();
private QueryResultsRegion cacheRegion;
private UpdateTimestampsCache updateTimestampsCache;
@ -93,19 +94,18 @@ public class StandardQueryCache implements QueryCache {
cacheRegion.evictAll();
}
@SuppressWarnings({ "UnnecessaryBoxing", "unchecked" })
public boolean put(
QueryKey key,
Type[] returnTypes,
List result,
boolean isNaturalKeyLookup,
SessionImplementor session) throws HibernateException {
final QueryKey key,
final Type[] returnTypes,
final List result,
final boolean isNaturalKeyLookup,
final SessionImplementor session) throws HibernateException {
if ( isNaturalKeyLookup && result.isEmpty() ) {
return false;
}
long ts = cacheRegion.nextTimestamp();
LOG.debugf( "Caching query results in region: %s; timestamp=%s", cacheRegion.getName(), ts );
if ( DEBUGGING ) LOG.debugf( "Caching query results in region: %s; timestamp=%s", cacheRegion.getName(), ts );
List cacheable = new ArrayList( result.size() + 1 );
logCachedResultDetails( key, null, returnTypes, cacheable );
@ -127,28 +127,28 @@ public class StandardQueryCache implements QueryCache {
@SuppressWarnings({ "unchecked" })
public List get(
QueryKey key,
Type[] returnTypes,
boolean isNaturalKeyLookup,
Set spaces,
SessionImplementor session) throws HibernateException {
LOG.debugf( "Checking cached query results in region: %s", cacheRegion.getName() );
final QueryKey key,
final Type[] returnTypes,
final boolean isNaturalKeyLookup,
final Set spaces,
final SessionImplementor session) throws HibernateException {
if ( DEBUGGING ) LOG.debugf( "Checking cached query results in region: %s", cacheRegion.getName() );
List cacheable = (List) cacheRegion.get( key );
logCachedResultDetails( key, spaces, returnTypes, cacheable );
if ( cacheable == null ) {
LOG.debug( "Query results were not found in cache" );
if ( DEBUGGING ) LOG.debug( "Query results were not found in cache" );
return null;
}
Long timestamp = (Long) cacheable.get( 0 );
if ( !isNaturalKeyLookup && !isUpToDate( spaces, timestamp ) ) {
LOG.debug( "Cached query results were not up-to-date" );
if ( DEBUGGING ) LOG.debug( "Cached query results were not up-to-date" );
return null;
}
LOG.debug( "Returning cached query results" );
if ( DEBUGGING ) LOG.debug( "Returning cached query results" );
final boolean singleResult = returnTypes.length == 1;
for ( int i = 1; i < cacheable.size(); i++ ) {
if ( singleResult ) {
@ -179,7 +179,7 @@ public class StandardQueryCache implements QueryCache {
// the uoe could occur while resolving
// associations, leaving the PC in an
// inconsistent state
LOG.debug( "Unable to reassemble cached result set" );
if ( DEBUGGING ) LOG.debug( "Unable to reassemble cached result set" );
cacheRegion.evict( key );
return null;
}
@ -189,8 +189,8 @@ public class StandardQueryCache implements QueryCache {
return result;
}
protected boolean isUpToDate(Set spaces, Long timestamp) {
LOG.debugf( "Checking query spaces are up-to-date: %s", spaces );
protected boolean isUpToDate(final Set spaces, final Long timestamp) {
if ( DEBUGGING ) LOG.debugf( "Checking query spaces are up-to-date: %s", spaces );
return updateTimestampsCache.isUpToDate( spaces, timestamp );
}
@ -213,7 +213,7 @@ public class StandardQueryCache implements QueryCache {
}
private static void logCachedResultDetails(QueryKey key, Set querySpaces, Type[] returnTypes, List result) {
if ( !LOG.isTraceEnabled() ) {
if ( !TRACING ) {
return;
}
LOG.trace( "key.hashCode=" + key.hashCode() );
@ -238,7 +238,7 @@ public class StandardQueryCache implements QueryCache {
}
private static void logCachedResultRowDetails(Type[] returnTypes, Object result) {
if ( !LOG.isTraceEnabled() ) {
if ( !TRACING ) {
return;
}
logCachedResultRowDetails(
@ -248,7 +248,7 @@ public class StandardQueryCache implements QueryCache {
}
private static void logCachedResultRowDetails(Type[] returnTypes, Object[] tuple) {
if ( !tracing ) {
if ( !TRACING ) {
return;
}
if ( tuple == null ) {

View File

@ -110,7 +110,7 @@ public class CacheKey implements Serializable {
@Override
public String toString() {
// Mainly for OSCache
// Used to be required for OSCache
return entityOrRoleName + '#' + key.toString();//"CacheKey#" + type.toString(key, sf);
}
}

View File

@ -553,8 +553,9 @@ public final class AnnotationBinder {
String table = ""; //might be no @Table annotation on the annotated class
String catalog = "";
List<UniqueConstraintHolder> uniqueConstraints = new ArrayList<UniqueConstraintHolder>();
javax.persistence.Table tabAnn = null;
if ( clazzToProcess.isAnnotationPresent( javax.persistence.Table.class ) ) {
javax.persistence.Table tabAnn = clazzToProcess.getAnnotation( javax.persistence.Table.class );
tabAnn = clazzToProcess.getAnnotation( javax.persistence.Table.class );
table = tabAnn.name();
schema = tabAnn.schema();
catalog = tabAnn.catalog();
@ -708,7 +709,7 @@ public final class AnnotationBinder {
//add process complementary Table definition (index & all)
entityBinder.processComplementaryTableDefinitions( clazzToProcess.getAnnotation( org.hibernate.annotations.Table.class ) );
entityBinder.processComplementaryTableDefinitions( clazzToProcess.getAnnotation( org.hibernate.annotations.Tables.class ) );
entityBinder.processComplementaryTableDefinitions( tabAnn );
}
// parse everything discriminator column relevant in case of single table inheritance
@ -1112,7 +1113,7 @@ public final class AnnotationBinder {
jcAnn = jcsAnn.value()[colIndex];
inheritanceJoinedColumns[colIndex] = Ejb3JoinColumn.buildJoinColumn(
jcAnn, null, superEntity.getIdentifier(),
( Map<String, Join> ) null, ( PropertyHolder ) null, mappings
null, null, mappings
);
}
}
@ -1121,7 +1122,7 @@ public final class AnnotationBinder {
inheritanceJoinedColumns = new Ejb3JoinColumn[1];
inheritanceJoinedColumns[0] = Ejb3JoinColumn.buildJoinColumn(
jcAnn, null, superEntity.getIdentifier(),
( Map<String, Join> ) null, ( PropertyHolder ) null, mappings
null, null, mappings
);
}
LOG.trace( "Subclass joined column(s) created" );
@ -2164,7 +2165,6 @@ public final class AnnotationBinder {
JoinColumn[] annInverseJoins;
JoinTable assocTable = propertyHolder.getJoinTable( property );
CollectionTable collectionTable = property.getAnnotation( CollectionTable.class );
if ( assocTable != null || collectionTable != null ) {
final String catalog;
@ -2173,6 +2173,8 @@ public final class AnnotationBinder {
final UniqueConstraint[] uniqueConstraints;
final JoinColumn[] joins;
final JoinColumn[] inverseJoins;
final javax.persistence.Index[] jpaIndexes;
//JPA 2 has priority
if ( collectionTable != null ) {
@ -2182,6 +2184,7 @@ public final class AnnotationBinder {
uniqueConstraints = collectionTable.uniqueConstraints();
joins = collectionTable.joinColumns();
inverseJoins = null;
jpaIndexes = collectionTable.indexes();
}
else {
catalog = assocTable.catalog();
@ -2190,10 +2193,13 @@ public final class AnnotationBinder {
uniqueConstraints = assocTable.uniqueConstraints();
joins = assocTable.joinColumns();
inverseJoins = assocTable.inverseJoinColumns();
jpaIndexes = assocTable.indexes();
}
collectionBinder.setExplicitAssociationTable( true );
if ( jpaIndexes != null && jpaIndexes.length > 0 ) {
associationTableBinder.setJpaIndex( jpaIndexes );
}
if ( !BinderHelper.isEmptyAnnotationValue( schema ) ) {
associationTableBinder.setSchema( schema );
}
@ -2204,7 +2210,7 @@ public final class AnnotationBinder {
associationTableBinder.setName( tableName );
}
associationTableBinder.setUniqueConstraints( uniqueConstraints );
associationTableBinder.setJpaIndex( jpaIndexes );
//set check constaint in the second pass
annJoins = joins.length == 0 ? null : joins;
annInverseJoins = inverseJoins == null || inverseJoins.length == 0 ? null : inverseJoins;

View File

@ -282,6 +282,12 @@ public interface AvailableSettings {
*/
public static final String JTA_PLATFORM = "hibernate.transaction.jta.platform";
/**
* Names the {@link org.hibernate.engine.transaction.jta.platform.spi.JtaPlatformResolver} implementation to use.
* @since 4.3
*/
public static final String JTA_PLATFORM_RESOLVER = "hibernate.transaction.jta.platform_resolver";
/**
* The {@link org.hibernate.cache.spi.RegionFactory} implementation class
*/
@ -400,6 +406,12 @@ public interface AvailableSettings {
*/
public static final String ORDER_INSERTS = "hibernate.order_inserts";
/**
* Default precedence of null values in {@code ORDER BY} clause. Supported options: {@code none} (default),
* {@code first}, {@code last}.
*/
public static final String DEFAULT_NULL_ORDERING = "hibernate.order_by.default_null_ordering";
/**
* The EntityMode in which set the Session opened from the SessionFactory.
*/
@ -658,4 +670,22 @@ public interface AvailableSettings {
* Default is to not store direct references.
*/
public static final String USE_DIRECT_REFERENCE_CACHE_ENTRIES = "hibernate.cache.use_reference_entries";
/**
* Enable nationalized character support on all string / clob based attribute ( string, char, clob, text etc ).
*
* Default is <clode>false</clode>.
*/
public static final String USE_NATIONALIZED_CHARACTER_DATA = "hibernate.use_nationalized_character_data";
/**
* A transaction can be rolled back by another thread ("tracking by thread")
* -- not the original application. Examples of this include a JTA
* transaction timeout handled by a background reaper thread. The ability
* to handle this situation requires checking the Thread ID every time
* Session is called. This can certainly have performance considerations.
*
* Default is <code>true</code> (enabled).
*/
public static final String JTA_TRACK_BY_THREAD = "hibernate.jta.track_by_thread";
}

View File

@ -98,6 +98,7 @@ import org.hibernate.id.factory.internal.DefaultIdentifierGeneratorFactory;
import org.hibernate.id.factory.spi.MutableIdentifierGeneratorFactory;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.SessionFactoryImpl;
import org.hibernate.internal.util.ClassLoaderHelper;
import org.hibernate.internal.util.ConfigHelper;
import org.hibernate.internal.util.ReflectHelper;
import org.hibernate.internal.util.SerializationHelper;
@ -250,6 +251,7 @@ public class Configuration implements Serializable {
private Set<String> defaultNamedGenerators;
private Map<String, Properties> generatorTables;
private Map<Table, List<UniqueConstraintHolder>> uniqueConstraintHoldersByTable;
private Map<Table, List<JPAIndexHolder>> jpaIndexHoldersByTable;
private Map<String, String> mappedByResolver;
private Map<String, String> propertyRefResolver;
private Map<String, AnyMetaDef> anyMetaDefs;
@ -323,6 +325,7 @@ public class Configuration implements Serializable {
defaultSqlResultSetMappingNames = new HashSet<String>();
defaultNamedGenerators = new HashSet<String>();
uniqueConstraintHoldersByTable = new HashMap<Table, List<UniqueConstraintHolder>>();
jpaIndexHoldersByTable = new HashMap<Table,List<JPAIndexHolder>>( );
mappedByResolver = new HashMap<String, String>();
propertyRefResolver = new HashMap<String, String>();
caches = new ArrayList<CacheHolder>();
@ -720,7 +723,7 @@ public class Configuration implements Serializable {
*/
public Configuration addResource(String resourceName) throws MappingException {
LOG.readingMappingsFromResource( resourceName );
ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
ClassLoader contextClassLoader = ClassLoaderHelper.getContextClassLoader();
InputStream resourceInputStream = null;
if ( contextClassLoader != null ) {
resourceInputStream = contextClassLoader.getResourceAsStream( resourceName );
@ -876,7 +879,7 @@ public class Configuration implements Serializable {
@SuppressWarnings({ "unchecked" })
private Iterator<IdentifierGenerator> iterateGenerators(Dialect dialect) throws MappingException {
public Iterator<IdentifierGenerator> iterateGenerators(Dialect dialect) throws MappingException {
TreeMap generators = new TreeMap();
String defaultCatalog = properties.getProperty( Environment.DEFAULT_CATALOG );
@ -1303,6 +1306,11 @@ public class Configuration implements Serializable {
protected void secondPassCompile() throws MappingException {
LOG.trace( "Starting secondPassCompile() processing" );
// TEMPORARY
// Ensure the correct ClassLoader is used in commons-annotations.
ClassLoader tccl = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader( ClassLoaderHelper.getContextClassLoader() );
//process default values first
{
if ( !isDefaultProcessed ) {
@ -1374,11 +1382,25 @@ public class Configuration implements Serializable {
for ( UniqueConstraintHolder holder : uniqueConstraints ) {
uniqueIndexPerTable++;
final String keyName = StringHelper.isEmpty( holder.getName() )
? "key" + uniqueIndexPerTable
? "UK_" + table.getName() + "_" + uniqueIndexPerTable
: holder.getName();
buildUniqueKeyFromColumnNames( table, keyName, holder.getColumns() );
}
}
for(Table table : jpaIndexHoldersByTable.keySet()){
final List<JPAIndexHolder> jpaIndexHolders = jpaIndexHoldersByTable.get( table );
int uniqueIndexPerTable = 0;
for ( JPAIndexHolder holder : jpaIndexHolders ) {
uniqueIndexPerTable++;
final String keyName = StringHelper.isEmpty( holder.getName() )
? "idx_"+table.getName()+"_" + uniqueIndexPerTable
: holder.getName();
buildUniqueKeyFromColumnNames( table, keyName, holder.getColumns(), holder.getOrdering(), holder.isUnique() );
}
}
Thread.currentThread().setContextClassLoader( tccl );
}
private void processSecondPassesOfType(Class<? extends SecondPass> type) {
@ -1532,7 +1554,11 @@ public class Configuration implements Serializable {
}
}
private void buildUniqueKeyFromColumnNames(Table table, String keyName, String[] columnNames) {
private void buildUniqueKeyFromColumnNames(Table table, String keyName, String[] columnNames){
buildUniqueKeyFromColumnNames( table, keyName, columnNames, null, true );
}
private void buildUniqueKeyFromColumnNames(Table table, String keyName, String[] columnNames, String[] orderings, boolean unique) {
keyName = normalizer.normalizeIdentifierQuoting( keyName );
int size = columnNames.length;
@ -1540,24 +1566,40 @@ public class Configuration implements Serializable {
Set<Column> unbound = new HashSet<Column>();
Set<Column> unboundNoLogical = new HashSet<Column>();
for ( int index = 0; index < size; index++ ) {
final String logicalColumnName = normalizer.normalizeIdentifierQuoting( columnNames[index] );
String column = columnNames[index];
try {
final String columnName = createMappings().getPhysicalColumnName( logicalColumnName, table );
final String columnName = createMappings().getPhysicalColumnName( column, table );
columns[index] = new Column( columnName );
unbound.add( columns[index] );
//column equals and hashcode is based on column name
}
catch ( MappingException e ) {
unboundNoLogical.add( new Column( logicalColumnName ) );
unboundNoLogical.add( new Column( column ) );
}
}
UniqueKey uk = table.getOrCreateUniqueKey( keyName );
for ( Column column : columns ) {
if ( table.containsColumn( column ) ) {
uk.addColumn( column );
unbound.remove( column );
if ( unique ) {
UniqueKey uk = table.getOrCreateUniqueKey( keyName );
for ( int i = 0; i < columns.length; i++ ) {
Column column = columns[i];
String order = orderings != null ? orderings[i] : null;
if ( table.containsColumn( column ) ) {
uk.addColumn( column, order );
unbound.remove( column );
}
}
}
else {
Index index = table.getOrCreateIndex( keyName );
for ( int i = 0; i < columns.length; i++ ) {
Column column = columns[i];
String order = orderings != null ? orderings[i] : null;
if ( table.containsColumn( column ) ) {
index.addColumn( column, order );
unbound.remove( column );
}
}
}
if ( unbound.size() > 0 || unboundNoLogical.size() > 0 ) {
StringBuilder sb = new StringBuilder( "Unable to create unique key constraint (" );
for ( String columnName : columnNames ) {
@ -1566,10 +1608,10 @@ public class Configuration implements Serializable {
sb.setLength( sb.length() - 2 );
sb.append( ") on table " ).append( table.getName() ).append( ": database column " );
for ( Column column : unbound ) {
sb.append( column.getName() ).append( ", " );
sb.append("'").append( column.getName() ).append( "', " );
}
for ( Column column : unboundNoLogical ) {
sb.append( column.getName() ).append( ", " );
sb.append("'").append( column.getName() ).append( "', " );
}
sb.setLength( sb.length() - 2 );
sb.append( " not found. Make sure that you use the correct column name which depends on the naming strategy in use (it may not be the same as the property name in the entity, especially for relational types)" );
@ -3144,6 +3186,19 @@ public class Configuration implements Serializable {
return useNewGeneratorMappings.booleanValue();
}
private Boolean useNationalizedCharacterData;
@Override
@SuppressWarnings( {"UnnecessaryUnboxing"})
public boolean useNationalizedCharacterData() {
if ( useNationalizedCharacterData == null ) {
final String booleanName = getConfigurationProperties()
.getProperty( AvailableSettings.USE_NATIONALIZED_CHARACTER_DATA );
useNationalizedCharacterData = Boolean.valueOf( booleanName );
}
return useNationalizedCharacterData.booleanValue();
}
private Boolean forceDiscriminatorInSelectsByDefault;
@Override
@ -3294,6 +3349,15 @@ public class Configuration implements Serializable {
holderList.addAll( uniqueConstraintHolders );
}
public void addJpaIndexHolders(Table table, List<JPAIndexHolder> holders) {
List<JPAIndexHolder> holderList = jpaIndexHoldersByTable.get( table );
if ( holderList == null ) {
holderList = new ArrayList<JPAIndexHolder>();
jpaIndexHoldersByTable.put( table, holderList );
}
holderList.addAll( holders );
}
public void addMappedBy(String entityName, String propertyName, String inversePropertyName) {
mappedByResolver.put( entityName + "." + propertyName, inversePropertyName );
}

View File

@ -211,7 +211,7 @@ public class Ejb3JoinColumn extends Ejb3Column {
if ( actualColumns == null || actualColumns.length == 0 ) {
return new Ejb3JoinColumn[] {
buildJoinColumn(
(JoinColumn) null,
null,
mappedBy,
joins,
propertyHolder,
@ -356,8 +356,8 @@ public class Ejb3JoinColumn extends Ejb3Column {
else {
defaultName = mappings.getObjectNameNormalizer().normalizeIdentifierQuoting( defaultName );
return new Ejb3JoinColumn(
(String) null, defaultName,
false, false, true, true, null, (String) null,
null, defaultName,
false, false, true, true, null, null,
joins, propertyHolder, null, null, true, mappings
);
}

View File

@ -22,10 +22,16 @@
* Boston, MA 02110-1301 USA
*/
package org.hibernate.cfg;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.StringTokenizer;
import org.hibernate.AnnotationException;
import org.hibernate.MappingException;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.internal.util.collections.ArrayHelper;
import org.hibernate.internal.util.collections.CollectionHelper;
import org.hibernate.mapping.Column;
import org.hibernate.mapping.Table;
@ -52,6 +58,7 @@ public class IndexOrUniqueKeySecondPass implements SecondPass {
this.unique = false;
}
/**
* Build an index
*/
@ -69,11 +76,11 @@ public class IndexOrUniqueKeySecondPass implements SecondPass {
this.mappings = mappings;
this.unique = unique;
}
@Override
public void doSecondPass(Map persistentClasses) throws MappingException {
if ( columns != null ) {
for (String columnName : columns) {
addConstraintToColumn( columnName );
for ( int i = 0; i < columns.length; i++ ) {
addConstraintToColumn( columns[i] );
}
}
if ( column != null ) {
@ -82,7 +89,7 @@ public class IndexOrUniqueKeySecondPass implements SecondPass {
}
}
private void addConstraintToColumn(String columnName) {
private void addConstraintToColumn(final String columnName ) {
Column column = table.getColumn(
new Column(
mappings.getPhysicalColumnName( columnName, table )

View File

@ -0,0 +1,88 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2010, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.cfg;
import java.util.ArrayList;
import java.util.List;
import java.util.StringTokenizer;
import javax.persistence.Index;
/**
* @author Strong Liu <stliu@hibernate.org>
*/
public class JPAIndexHolder {
private final String name;
private final String[] columns;
private final String[] ordering;
private final boolean unique;
public JPAIndexHolder(Index index) {
StringTokenizer tokenizer = new StringTokenizer( index.columnList(), "," );
List<String> tmp = new ArrayList<String>();
while ( tokenizer.hasMoreElements() ) {
tmp.add( tokenizer.nextToken().trim() );
}
this.name = index.name();
this.columns = new String[tmp.size()];
this.ordering = new String[tmp.size()];
this.unique = index.unique();
initializeColumns( columns, ordering, tmp );
}
public String[] getColumns() {
return columns;
}
public String getName() {
return name;
}
public String[] getOrdering() {
return ordering;
}
public boolean isUnique() {
return unique;
}
private void initializeColumns(String[] columns, String[] ordering, List<String> list) {
for ( int i = 0, size = list.size(); i < size; i++ ) {
final String description = list.get( i );
final String tmp = description.toLowerCase();
if ( tmp.endsWith( " desc" ) ) {
columns[i] = description.substring( 0, description.length() - 5 );
ordering[i] = "desc";
}
else if ( tmp.endsWith( " asc" ) ) {
columns[i] = description.substring( 0, description.length() - 4 );
ordering[i] = "asc";
}
else {
columns[i] = description;
ordering[i] = null;
}
}
}
}

View File

@ -720,6 +720,8 @@ public interface Mappings {
public void addUniqueConstraintHolders(Table table, List<UniqueConstraintHolder> uniqueConstraintHolders);
public void addJpaIndexHolders(Table table, List<JPAIndexHolder> jpaIndexHolders);
public void addMappedBy(String entityName, String propertyName, String inversePropertyName);
public String getFromMappedBy(String entityName, String propertyName);
@ -764,6 +766,14 @@ public interface Mappings {
*/
public boolean useNewGeneratorMappings();
/**
* Should we use nationalized variants of character data by default? This is controlled by the
* {@link AvailableSettings#USE_NATIONALIZED_CHARACTER_DATA} setting.
*
* @return {@code true} if nationalized character data should be used by default; {@code false} otherwise.
*/
public boolean useNationalizedCharacterData();
/**
* Return the property annotated with @ToOne and @Id if any.
* Null otherwise

View File

@ -375,6 +375,9 @@ public abstract class ResultSetMappingBinder {
else if ( "upgrade-nowait".equals( lockMode ) ) {
return LockMode.UPGRADE_NOWAIT;
}
else if ( "upgrade-skiplocked".equals( lockMode )) {
return LockMode.UPGRADE_SKIPLOCKED;
}
else if ( "write".equals( lockMode ) ) {
return LockMode.WRITE;
}

View File

@ -28,7 +28,9 @@ import java.util.Map;
import org.hibernate.ConnectionReleaseMode;
import org.hibernate.EntityMode;
import org.hibernate.MultiTenancyStrategy;
import org.hibernate.NullPrecedence;
import org.hibernate.cache.spi.QueryCacheFactory;
import org.hibernate.cache.spi.RegionFactory;
import org.hibernate.engine.transaction.jta.platform.spi.JtaPlatform;
import org.hibernate.hql.spi.MultiTableBulkIdStrategy;
import org.hibernate.hql.spi.QueryTranslatorFactory;
@ -79,6 +81,7 @@ public final class Settings {
private boolean namedQueryStartupCheckingEnabled;
private EntityTuplizerFactory entityTuplizerFactory;
private boolean checkNullability;
private NullPrecedence defaultNullPrecedence;
private boolean initializeLazyStateOutsideTransactions;
// private ComponentTuplizerFactory componentTuplizerFactory; todo : HHH-3517 and HHH-1907
// private BytecodeProvider bytecodeProvider;
@ -91,6 +94,8 @@ public final class Settings {
private BatchFetchStyle batchFetchStyle;
private boolean directReferenceCacheEntriesEnabled;
private boolean jtaTrackByThread;
/**
* Package protected constructor
@ -256,6 +261,10 @@ public final class Settings {
return entityTuplizerFactory;
}
public NullPrecedence getDefaultNullPrecedence() {
return defaultNullPrecedence;
}
// package protected setters ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
void setDefaultSchemaName(String string) {
@ -451,4 +460,16 @@ public final class Settings {
public void setDirectReferenceCacheEntriesEnabled(boolean directReferenceCacheEntriesEnabled) {
this.directReferenceCacheEntriesEnabled = directReferenceCacheEntriesEnabled;
}
void setDefaultNullPrecedence(NullPrecedence defaultNullPrecedence) {
this.defaultNullPrecedence = defaultNullPrecedence;
}
public boolean isJtaTrackByThread() {
return jtaTrackByThread;
}
public void setJtaTrackByThread(boolean jtaTrackByThread) {
this.jtaTrackByThread = jtaTrackByThread;
}
}

View File

@ -32,6 +32,7 @@ import org.hibernate.EntityMode;
import org.hibernate.HibernateException;
import org.hibernate.MultiTenancyStrategy;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.NullPrecedence;
import org.hibernate.boot.registry.selector.spi.StrategySelector;
import org.hibernate.cache.internal.NoCachingRegionFactory;
import org.hibernate.cache.internal.RegionFactoryInitiator;
@ -247,6 +248,14 @@ public class SettingsFactory implements Serializable {
}
settings.setOrderInsertsEnabled( orderInserts );
String defaultNullPrecedence = ConfigurationHelper.getString(
AvailableSettings.DEFAULT_NULL_ORDERING, properties, "none", "first", "last"
);
if ( debugEnabled ) {
LOG.debugf( "Default null ordering: %s", defaultNullPrecedence );
}
settings.setDefaultNullPrecedence( NullPrecedence.parse( defaultNullPrecedence ) );
//Query parser settings:
settings.setQueryTranslatorFactory( createQueryTranslatorFactory( properties, serviceRegistry ) );
@ -365,6 +374,16 @@ public class SettingsFactory implements Serializable {
}
settings.setInitializeLazyStateOutsideTransactions( initializeLazyStateOutsideTransactionsEnabled );
boolean jtaTrackByThread = ConfigurationHelper.getBoolean(
AvailableSettings.JTA_TRACK_BY_THREAD,
properties,
true
);
if ( debugEnabled ) {
LOG.debugf( "JTA Track by Thread: %s", enabledDisabled(jtaTrackByThread) );
}
settings.setJtaTrackByThread( jtaTrackByThread );
return settings;
}

View File

@ -762,7 +762,11 @@ public class EntityBinder {
null
);
//no check constraints available on joins
if ( secondaryTable != null ) {
TableBinder.addIndexes( table, secondaryTable.indexes(), mappings );
}
//no check constraints available on joins
join.setTable( table );
//somehow keep joins() for later.
@ -881,7 +885,10 @@ public class EntityBinder {
public void setIgnoreIdAnnotations(boolean ignoreIdAnnotations) {
this.ignoreIdAnnotations = ignoreIdAnnotations;
}
public void processComplementaryTableDefinitions(javax.persistence.Table table) {
if ( table == null ) return;
TableBinder.addIndexes( persistentClass.getTable(), table.indexes(), mappings );
}
public void processComplementaryTableDefinitions(org.hibernate.annotations.Table table) {
//comment and index are processed here
if ( table == null ) return;

View File

@ -22,8 +22,10 @@
* Boston, MA 02110-1301 USA
*/
package org.hibernate.cfg.annotations;
import java.lang.annotation.Annotation;
import javax.persistence.Column;
import javax.persistence.ForeignKey;
import javax.persistence.JoinColumn;
import javax.persistence.MapKeyJoinColumn;
@ -38,38 +40,52 @@ public class MapKeyJoinColumnDelegator implements JoinColumn {
this.column = column;
}
@Override
public String name() {
return column.name();
}
@Override
public String referencedColumnName() {
return column.referencedColumnName();
}
@Override
public boolean unique() {
return column.unique();
}
@Override
public boolean nullable() {
return column.nullable();
}
@Override
public boolean insertable() {
return column.insertable();
}
@Override
public boolean updatable() {
return column.updatable();
}
@Override
public String columnDefinition() {
return column.columnDefinition();
}
@Override
public String table() {
return column.table();
}
@Override
public ForeignKey foreignKey() {
return column.foreignKey();
}
@Override
public Class<? extends Annotation> annotationType() {
return Column.class;
}

View File

@ -114,26 +114,27 @@ public abstract class QueryBinder {
if ( BinderHelper.isEmptyAnnotationValue( queryAnn.name() ) ) {
throw new AnnotationException( "A named query must have a name when used in class or package level" );
}
NamedSQLQueryDefinition query;
String resultSetMapping = queryAnn.resultSetMapping();
QueryHint[] hints = queryAnn.hints();
String queryName = queryAnn.query();
NamedSQLQueryDefinitionBuilder builder = new NamedSQLQueryDefinitionBuilder( queryAnn.name() )
.setQuery( queryName )
.setQuerySpaces( null )
.setCacheable( getBoolean( queryName, "org.hibernate.cacheable", hints ) )
.setCacheRegion( getString( queryName, "org.hibernate.cacheRegion", hints ) )
.setTimeout( getTimeout( queryName, hints ) )
.setFetchSize( getInteger( queryName, "org.hibernate.fetchSize", hints ) )
.setFlushMode( getFlushMode( queryName, hints ) )
.setCacheMode( getCacheMode( queryName, hints ) )
.setReadOnly( getBoolean( queryName, "org.hibernate.readOnly", hints ) )
.setComment( getString( queryName, "org.hibernate.comment", hints ) )
.setParameterTypes( null )
.setCallable( getBoolean( queryName, "org.hibernate.callable", hints ) );
if ( !BinderHelper.isEmptyAnnotationValue( resultSetMapping ) ) {
//sql result set usage
query = new NamedSQLQueryDefinitionBuilder( queryAnn.name() )
.setQuery( queryName )
.setResultSetRef( resultSetMapping )
.setQuerySpaces( null )
.setCacheable( getBoolean( queryName, "org.hibernate.cacheable", hints ) )
.setCacheRegion( getString( queryName, "org.hibernate.cacheRegion", hints ) )
.setTimeout( getTimeout( queryName, hints ) )
.setFetchSize( getInteger( queryName, "org.hibernate.fetchSize", hints ) )
.setFlushMode( getFlushMode( queryName, hints ) )
.setCacheMode( getCacheMode( queryName, hints ) )
.setReadOnly( getBoolean( queryName, "org.hibernate.readOnly", hints ) )
.setComment( getString( queryName, "org.hibernate.comment", hints ) )
.setParameterTypes( null )
.setCallable( getBoolean( queryName, "org.hibernate.callable", hints ) )
builder.setResultSetRef( resultSetMapping )
.createNamedQueryDefinition();
}
else if ( !void.class.equals( queryAnn.resultClass() ) ) {
@ -141,25 +142,14 @@ public abstract class QueryBinder {
//FIXME should be done in a second pass due to entity name?
final NativeSQLQueryRootReturn entityQueryReturn =
new NativeSQLQueryRootReturn( "alias1", queryAnn.resultClass().getName(), new HashMap(), LockMode.READ );
query = new NamedSQLQueryDefinitionBuilder( queryAnn.name() )
.setQuery( queryName )
.setQueryReturns( new NativeSQLQueryReturn[] {entityQueryReturn} )
.setQuerySpaces( null )
.setCacheable( getBoolean( queryName, "org.hibernate.cacheable", hints ) )
.setCacheRegion( getString( queryName, "org.hibernate.cacheRegion", hints ) )
.setTimeout( getTimeout( queryName, hints ) )
.setFetchSize( getInteger( queryName, "org.hibernate.fetchSize", hints ) )
.setFlushMode( getFlushMode( queryName, hints ) )
.setCacheMode( getCacheMode( queryName, hints ) )
.setReadOnly( getBoolean( queryName, "org.hibernate.readOnly", hints ) )
.setComment( getString( queryName, "org.hibernate.comment", hints ) )
.setParameterTypes( null )
.setCallable( getBoolean( queryName, "org.hibernate.callable", hints ) )
.createNamedQueryDefinition();
builder.setQueryReturns( new NativeSQLQueryReturn[] {entityQueryReturn} );
}
else {
throw new NotYetImplementedException( "Pure native scalar queries are not yet supported" );
builder.setQueryReturns( new NativeSQLQueryReturn[0] );
}
NamedSQLQueryDefinition query = builder.createNamedQueryDefinition();
if ( isDefault ) {
mappings.addDefaultSQLQuery( query.getName(), query );
}

View File

@ -23,10 +23,7 @@
*/
package org.hibernate.cfg.annotations;
import org.jboss.logging.Logger;
import org.hibernate.annotations.OrderBy;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.mapping.Collection;
import org.hibernate.mapping.PersistentClass;
@ -36,7 +33,6 @@ import org.hibernate.mapping.PersistentClass;
* @author Matthew Inger
*/
public class SetBinder extends CollectionBinder {
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, SetBinder.class.getName());
public SetBinder() {
}

View File

@ -45,6 +45,7 @@ import org.jboss.logging.Logger;
import org.hibernate.AnnotationException;
import org.hibernate.AssertionFailure;
import org.hibernate.MappingException;
import org.hibernate.annotations.Nationalized;
import org.hibernate.annotations.Parameter;
import org.hibernate.annotations.Type;
import org.hibernate.annotations.common.reflection.XClass;
@ -64,10 +65,14 @@ import org.hibernate.internal.util.StringHelper;
import org.hibernate.mapping.SimpleValue;
import org.hibernate.mapping.Table;
import org.hibernate.type.CharacterArrayClobType;
import org.hibernate.type.CharacterArrayNClobType;
import org.hibernate.type.CharacterNCharType;
import org.hibernate.type.EnumType;
import org.hibernate.type.PrimitiveCharacterArrayClobType;
import org.hibernate.type.PrimitiveCharacterArrayNClobType;
import org.hibernate.type.SerializableToBlobType;
import org.hibernate.type.StandardBasicTypes;
import org.hibernate.type.StringNVarcharType;
import org.hibernate.type.WrappedMaterializedBlobType;
import org.hibernate.usertype.DynamicParameterizedType;
@ -159,6 +164,9 @@ public class SimpleValueBinder {
typeParameters.clear();
String type = BinderHelper.ANNOTATION_STRING_DEFAULT;
final boolean isNationalized = property.isAnnotationPresent( Nationalized.class )
|| mappings.useNationalizedCharacterData();
Type annType = property.getAnnotation( Type.class );
if ( annType != null ) {
setExplicitType( annType );
@ -204,19 +212,30 @@ public class SimpleValueBinder {
}
else if ( property.isAnnotationPresent( Lob.class ) ) {
if ( mappings.getReflectionManager().equals( returnedClassOrElement, java.sql.Clob.class ) ) {
type = "clob";
type = isNationalized
? StandardBasicTypes.NCLOB.getName()
: StandardBasicTypes.CLOB.getName();
}
else if ( mappings.getReflectionManager().equals( returnedClassOrElement, java.sql.NClob.class ) ) {
type = StandardBasicTypes.NCLOB.getName();
}
else if ( mappings.getReflectionManager().equals( returnedClassOrElement, java.sql.Blob.class ) ) {
type = "blob";
}
else if ( mappings.getReflectionManager().equals( returnedClassOrElement, String.class ) ) {
type = StandardBasicTypes.MATERIALIZED_CLOB.getName();
type = isNationalized
? StandardBasicTypes.MATERIALIZED_NCLOB.getName()
: StandardBasicTypes.MATERIALIZED_CLOB.getName();
}
else if ( mappings.getReflectionManager().equals( returnedClassOrElement, Character.class ) && isArray ) {
type = CharacterArrayClobType.class.getName();
type = isNationalized
? CharacterArrayNClobType.class.getName()
: CharacterArrayClobType.class.getName();
}
else if ( mappings.getReflectionManager().equals( returnedClassOrElement, char.class ) && isArray ) {
type = PrimitiveCharacterArrayClobType.class.getName();
type = isNationalized
? PrimitiveCharacterArrayNClobType.class.getName()
: PrimitiveCharacterArrayClobType.class.getName();
}
else if ( mappings.getReflectionManager().equals( returnedClassOrElement, Byte.class ) && isArray ) {
type = WrappedMaterializedBlobType.class.getName();
@ -254,6 +273,24 @@ public class SimpleValueBinder {
type = EnumType.class.getName();
explicitType = type;
}
else if ( isNationalized ) {
if ( mappings.getReflectionManager().equals( returnedClassOrElement, String.class ) ) {
// nvarchar
type = StringNVarcharType.INSTANCE.getName();
explicitType = type;
}
else if ( mappings.getReflectionManager().equals( returnedClassOrElement, Character.class ) ) {
if ( isArray ) {
// nvarchar
type = StringNVarcharType.INSTANCE.getName();
}
else {
// nchar
type = CharacterNCharType.INSTANCE.getName();
}
explicitType = type;
}
}
// implicit type will check basic types and Serializable classes
if ( columns == null ) {

View File

@ -36,6 +36,7 @@ import org.hibernate.annotations.Index;
import org.hibernate.cfg.BinderHelper;
import org.hibernate.cfg.Ejb3JoinColumn;
import org.hibernate.cfg.IndexOrUniqueKeySecondPass;
import org.hibernate.cfg.JPAIndexHolder;
import org.hibernate.cfg.Mappings;
import org.hibernate.cfg.NamingStrategy;
import org.hibernate.cfg.ObjectNameNormalizer;
@ -80,6 +81,7 @@ public class TableBinder {
private String ownerEntity;
private String associatedEntity;
private boolean isJPA2ElementCollection;
private List<JPAIndexHolder> jpaIndexHolders;
public void setSchema(String schema) {
this.schema = schema;
@ -105,6 +107,10 @@ public class TableBinder {
this.uniqueConstraints = TableBinder.buildUniqueConstraintHolders( uniqueConstraints );
}
public void setJpaIndex(javax.persistence.Index[] jpaIndex){
this.jpaIndexHolders = buildJpaIndexHolder( jpaIndex );
}
public void setConstraints(String constraints) {
this.constraints = constraints;
}
@ -183,6 +189,7 @@ public class TableBinder {
namingStrategyHelper,
isAbstract,
uniqueConstraints,
jpaIndexHolders,
constraints,
denormalizedSuperTable,
mappings,
@ -190,6 +197,7 @@ public class TableBinder {
);
}
private ObjectNameSource buildNameContext(String unquotedOwnerTable, String unquotedAssocTable) {
String logicalName = mappings.getNamingStrategy().logicalCollectionTableName(
name,
@ -211,10 +219,11 @@ public class TableBinder {
ObjectNameNormalizer.NamingStrategyHelper namingStrategyHelper,
boolean isAbstract,
List<UniqueConstraintHolder> uniqueConstraints,
List<JPAIndexHolder> jpaIndexHolders,
String constraints,
Table denormalizedSuperTable,
Mappings mappings,
String subselect) {
String subselect){
schema = BinderHelper.isEmptyAnnotationValue( schema ) ? mappings.getSchemaName() : schema;
catalog = BinderHelper.isEmptyAnnotationValue( catalog ) ? mappings.getCatalogName() : catalog;
@ -244,10 +253,14 @@ public class TableBinder {
);
}
if ( uniqueConstraints != null && uniqueConstraints.size() > 0 ) {
if ( CollectionHelper.isNotEmpty( uniqueConstraints ) ) {
mappings.addUniqueConstraintHolders( table, uniqueConstraints );
}
if ( CollectionHelper.isNotEmpty( jpaIndexHolders ) ) {
mappings.addJpaIndexHolders( table, jpaIndexHolders );
}
if ( constraints != null ) table.addCheckConstraint( constraints );
// logicalName is null if we are in the second pass
@ -258,6 +271,23 @@ public class TableBinder {
return table;
}
public static Table buildAndFillTable(
String schema,
String catalog,
ObjectNameSource nameSource,
ObjectNameNormalizer.NamingStrategyHelper namingStrategyHelper,
boolean isAbstract,
List<UniqueConstraintHolder> uniqueConstraints,
String constraints,
Table denormalizedSuperTable,
Mappings mappings,
String subselect) {
return buildAndFillTable( schema, catalog, nameSource, namingStrategyHelper, isAbstract, uniqueConstraints, null, constraints
, denormalizedSuperTable, mappings, subselect);
}
/**
* @deprecated Use {@link #buildAndFillTable} instead.
*/
@ -514,6 +544,18 @@ public class TableBinder {
}
}
public static void addIndexes(Table hibTable, javax.persistence.Index[] indexes, Mappings mappings) {
mappings.addJpaIndexHolders( hibTable, buildJpaIndexHolder( indexes ) );
}
public static List<JPAIndexHolder> buildJpaIndexHolder(javax.persistence.Index[] indexes){
List<JPAIndexHolder> holders = new ArrayList<JPAIndexHolder>( indexes.length );
for(javax.persistence.Index index : indexes){
holders.add( new JPAIndexHolder( index ) );
}
return holders;
}
/**
* @deprecated Use {@link #buildUniqueConstraintHolders} instead
*/

View File

@ -311,7 +311,7 @@ public class JPAOverriddenAnnotationReader implements AnnotationReader {
public <T extends Annotation> boolean isAnnotationPresent(Class<T> annotationType) {
initAnnotations();
return (T) annotationsMap.get( annotationType ) != null;
return annotationsMap.containsKey( annotationType );
}
public Annotation[] getAnnotations() {
@ -837,12 +837,13 @@ public class JPAOverriddenAnnotationReader implements AnnotationReader {
}
private Cacheable getCacheable(Element element, XMLContext.Default defaults){
if(element==null)return null;
String attValue = element.attributeValue( "cacheable" );
if(attValue!=null){
AnnotationDescriptor ad = new AnnotationDescriptor( Cacheable.class );
ad.setValue( "value", Boolean.valueOf( attValue ) );
return AnnotationFactory.create( ad );
if ( element != null ) {
String attValue = element.attributeValue( "cacheable" );
if ( attValue != null ) {
AnnotationDescriptor ad = new AnnotationDescriptor( Cacheable.class );
ad.setValue( "value", Boolean.valueOf( attValue ) );
return AnnotationFactory.create( ad );
}
}
if ( defaults.canUseJavaAnnotations() ) {
return getJavaAnnotation( Cacheable.class );
@ -2262,6 +2263,7 @@ public class JPAOverriddenAnnotationReader implements AnnotationReader {
annotation.setValue( "schema", table.schema() );
annotation.setValue( "catalog", table.catalog() );
annotation.setValue( "uniqueConstraints", table.uniqueConstraints() );
annotation.setValue( "indexes", table.indexes() );
}
}
if ( StringHelper.isEmpty( (String) annotation.valueOf( "schema" ) )

View File

@ -0,0 +1,70 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.cfg.beanvalidation;
import java.util.Set;
import org.hibernate.cfg.Configuration;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.service.spi.SessionFactoryServiceRegistry;
/**
* Defines the context needed to call the {@link TypeSafeActivator}
*
* @author Steve Ebersole
*/
public interface ActivationContext {
/**
* Access the requested validation mode(s).
* <p/>
* IMPL NOTE : the legacy code allowed multiple mode values to be specified, so that is why it is multi-valued here.
* However, I cannot find any good reasoning why it was defined that way and even JPA states it should be a single
* value. For 4.1 (in maintenance) I think it makes the most sense to not mess with it. Discuss for
* 4.2 and beyond.
*
* @return The requested validation modes
*/
public Set<ValidationMode> getValidationModes();
/**
* Access the Configuration
*
* @return The Hibernate Configuration object
*/
public Configuration getConfiguration();
/**
* Access the SessionFactory being built to trigger this BV activation
*
* @return The SessionFactory being built
*/
public SessionFactoryImplementor getSessionFactory();
/**
* Access the ServiceRegistry specific to the SessionFactory being built.
*
* @return The SessionFactoryServiceRegistry
*/
public SessionFactoryServiceRegistry getServiceRegistry();
}

View File

@ -22,7 +22,6 @@
* Boston, MA 02110-1301 USA
*/
package org.hibernate.cfg.beanvalidation;
import java.util.HashSet;
import java.util.Properties;
import java.util.Set;
@ -37,6 +36,7 @@ import javax.validation.ValidatorFactory;
import org.jboss.logging.Logger;
import org.hibernate.EntityMode;
import org.hibernate.cfg.Configuration;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.event.spi.PreDeleteEvent;
import org.hibernate.event.spi.PreDeleteEventListener;
@ -53,13 +53,12 @@ import org.hibernate.persister.entity.EntityPersister;
* @author Emmanuel Bernard
* @author Hardy Ferentschik
*/
//FIXME review exception model
public class BeanValidationEventListener
implements PreInsertEventListener, PreUpdateEventListener, PreDeleteEventListener {
private static final CoreMessageLogger LOG = Logger.getMessageLogger(
CoreMessageLogger.class,
BeanValidationEventListener.class.getName()
);
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class,
BeanValidationEventListener.class.getName());
private ValidatorFactory factory;
private ConcurrentHashMap<EntityPersister, Set<String>> associationsPerEntityPersister =
@ -77,19 +76,20 @@ public class BeanValidationEventListener
* Constructor used in an environment where validator factory is injected (JPA2).
*
* @param factory The {@code ValidatorFactory} to use to create {@code Validator} instance(s)
* @param properties Configured properties
* @param properties Configued properties
*/
public BeanValidationEventListener(ValidatorFactory factory, Properties properties) {
init( factory, properties );
}
public void initialize(Properties properties) {
public void initialize(Configuration cfg) {
if ( !initialized ) {
ValidatorFactory factory = Validation.buildDefaultValidatorFactory();
init( factory, properties );
Properties props = cfg.getProperties();
init( factory, props );
}
}
@Override
public boolean onPreInsert(PreInsertEvent event) {
validate(
event.getEntity(), event.getPersister().getEntityMode(), event.getPersister(),
@ -97,7 +97,7 @@ public class BeanValidationEventListener
);
return false;
}
@Override
public boolean onPreUpdate(PreUpdateEvent event) {
validate(
event.getEntity(), event.getPersister().getEntityMode(), event.getPersister(),
@ -105,7 +105,7 @@ public class BeanValidationEventListener
);
return false;
}
@Override
public boolean onPreDelete(PreDeleteEvent event) {
validate(
event.getEntity(), event.getPersister().getEntityMode(), event.getPersister(),
@ -135,38 +135,34 @@ public class BeanValidationEventListener
if ( groups.length > 0 ) {
final Set<ConstraintViolation<T>> constraintViolations = validator.validate( object, groups );
if ( constraintViolations.size() > 0 ) {
throw createConstraintViolationException( operation, groups, constraintViolations );
Set<ConstraintViolation<?>> propagatedViolations =
new HashSet<ConstraintViolation<?>>( constraintViolations.size() );
Set<String> classNames = new HashSet<String>();
for ( ConstraintViolation<?> violation : constraintViolations ) {
LOG.trace( violation );
propagatedViolations.add( violation );
classNames.add( violation.getLeafBean().getClass().getName() );
}
StringBuilder builder = new StringBuilder();
builder.append( "Validation failed for classes " );
builder.append( classNames );
builder.append( " during " );
builder.append( operation.getName() );
builder.append( " time for groups " );
builder.append( toString( groups ) );
builder.append( "\nList of constraint violations:[\n" );
for (ConstraintViolation<?> violation : constraintViolations) {
builder.append( "\t" ).append( violation.toString() ).append("\n");
}
builder.append( "]" );
throw new ConstraintViolationException(
builder.toString(), propagatedViolations
);
}
}
}
private <T> ConstraintViolationException createConstraintViolationException(GroupsPerOperation.Operation operation, Class<?>[] groups, Set<ConstraintViolation<T>> constraintViolations) {
Set<ConstraintViolation<?>> propagatedViolations =
new HashSet<ConstraintViolation<?>>( constraintViolations.size() );
Set<String> classNames = new HashSet<String>();
for ( ConstraintViolation<?> violation : constraintViolations ) {
LOG.trace( violation );
propagatedViolations.add( violation );
classNames.add( violation.getLeafBean().getClass().getName() );
}
StringBuilder builder = new StringBuilder();
builder.append( "Validation failed for classes " );
builder.append( classNames );
builder.append( " during " );
builder.append( operation.getName() );
builder.append( " time for groups " );
builder.append( toString( groups ) );
builder.append( "\nList of constraint violations:[\n" );
for ( ConstraintViolation<?> violation : constraintViolations ) {
builder.append( "\t" ).append( violation.toString() ).append( "\n" );
}
builder.append( "]" );
return new ConstraintViolationException(
builder.toString(), propagatedViolations
);
}
private String toString(Class<?>[] groups) {
StringBuilder toString = new StringBuilder( "[" );
for ( Class<?> group : groups ) {

View File

@ -25,22 +25,16 @@ package org.hibernate.cfg.beanvalidation;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Collection;
import java.util.Properties;
import java.util.Set;
import org.jboss.logging.Logger;
import org.hibernate.HibernateException;
import org.hibernate.cfg.Configuration;
import org.hibernate.cfg.Environment;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.engine.config.spi.ConfigurationService;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.event.service.spi.EventListenerRegistry;
import org.hibernate.integrator.spi.Integrator;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.config.ConfigurationHelper;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.metamodel.spi.MetadataImplementor;
import org.hibernate.service.spi.SessionFactoryServiceRegistry;
@ -55,311 +49,236 @@ public class BeanValidationIntegrator implements Integrator {
);
public static final String APPLY_CONSTRAINTS = "hibernate.validator.apply_to_ddl";
public static final String BV_CHECK_CLASS = "javax.validation.Validation";
public static final String MODE_PROPERTY = "javax.persistence.validation.mode";
private static final String ACTIVATOR_CLASS = "org.hibernate.cfg.beanvalidation.TypeSafeActivator";
private static final String DDL_METHOD = "applyDDL";
private static final String ACTIVATE_METHOD = "activateBeanValidation";
private static final String ASSERT_VALIDATOR_FACTORY_INSTANCE_METHOD = "assertObjectIsValidatorFactoryInstance";
private static final String ACTIVATOR_CLASS_NAME = "org.hibernate.cfg.beanvalidation.TypeSafeActivator";
private static final String VALIDATE_SUPPLIED_FACTORY_METHOD_NAME = "validateSuppliedFactory";
private static final String ACTIVATE_METHOD_NAME = "activate";
@Override
// TODO Can be removed once the switch to the new metamodel is complete. See also HHH-7470 (HF)
public void integrate(
Configuration configuration,
SessionFactoryImplementor sessionFactory,
SessionFactoryServiceRegistry serviceRegistry) {
final Set<ValidationMode> modes = ValidationMode.getModes( configuration.getProperties().get( MODE_PROPERTY ) );
final ClassLoaderService classLoaderService = serviceRegistry.getService( ClassLoaderService.class );
final Dialect dialect = serviceRegistry.getService( JdbcServices.class ).getDialect();
final boolean isBeanValidationAvailable = isBeanValidationOnClasspath( classLoaderService );
final Class typeSafeActivatorClass = loadTypeSafeActivatorClass( serviceRegistry );
applyRelationalConstraints(
modes,
isBeanValidationAvailable,
typeSafeActivatorClass,
configuration,
dialect
);
applyHibernateListeners(
modes,
isBeanValidationAvailable,
typeSafeActivatorClass,
sessionFactory,
serviceRegistry
);
/**
* Used to validate the type of an explicitly passed ValidatorFactory instance
*
* @param object The supposed ValidatorFactory instance
*/
@SuppressWarnings("unchecked")
public static void validateFactory(Object object) {
try {
// this direct usage of ClassLoader should be fine since the classes exist in the same jar
final Class activatorClass = BeanValidationIntegrator.class.getClassLoader().loadClass( ACTIVATOR_CLASS_NAME );
try {
final Method validateMethod = activatorClass.getMethod( VALIDATE_SUPPLIED_FACTORY_METHOD_NAME, Object.class );
if ( ! validateMethod.isAccessible() ) {
validateMethod.setAccessible( true );
}
try {
validateMethod.invoke( null, object );
}
catch (InvocationTargetException e) {
if ( e.getTargetException() instanceof HibernateException ) {
throw (HibernateException) e.getTargetException();
}
throw new HibernateException( "Unable to check validity of passed ValidatorFactory", e );
}
catch (IllegalAccessException e) {
throw new HibernateException( "Unable to check validity of passed ValidatorFactory", e );
}
}
catch (HibernateException e) {
throw e;
}
catch (Exception e) {
throw new HibernateException( "Could not locate method needed for ValidatorFactory validation", e );
}
}
catch (HibernateException e) {
throw e;
}
catch (Exception e) {
throw new HibernateException( "Could not locate TypeSafeActivator class", e );
}
}
@Override
public void integrate(MetadataImplementor metadata,
SessionFactoryImplementor sessionFactory,
SessionFactoryServiceRegistry serviceRegistry) {
public void integrate(
final MetadataImplementor metadata,
final SessionFactoryImplementor sessionFactory,
final SessionFactoryServiceRegistry serviceRegistry ) {
// IMPL NOTE : see the comments on ActivationContext.getValidationModes() as to why this is multi-valued...
final Set<ValidationMode> modes = ValidationMode.getModes(
sessionFactory.getProperties()
.get( MODE_PROPERTY )
);
final ClassLoaderService classLoaderService = serviceRegistry.getService( ClassLoaderService.class );
final Dialect dialect = serviceRegistry.getService( JdbcServices.class ).getDialect();
final boolean isBeanValidationAvailable = isBeanValidationOnClasspath( classLoaderService );
final Class typeSafeActivatorClass = loadTypeSafeActivatorClass( serviceRegistry );
serviceRegistry.getService( ConfigurationService.class )
.getSetting( MODE_PROPERTY ));
if ( modes.size() > 1 ) {
LOG.multipleValidationModes( ValidationMode.loggable( modes ) );
}
if ( modes.size() == 1 && modes.contains( ValidationMode.NONE ) ) {
// we have nothing to do; just return
return;
}
applyRelationalConstraints(
modes,
isBeanValidationAvailable,
typeSafeActivatorClass,
sessionFactory.getProperties(),
metadata,
dialect
);
applyHibernateListeners(
modes,
isBeanValidationAvailable,
typeSafeActivatorClass,
sessionFactory,
serviceRegistry
);
final ClassLoaderService classLoaderService = serviceRegistry.getService( ClassLoaderService.class );
// see if the Bean Validation API is available on the classpath
if ( isBeanValidationApiAvailable( classLoaderService ) ) {
// and if so, call out to the TypeSafeActivator
try {
final Class typeSafeActivatorClass = loadTypeSafeActivatorClass( classLoaderService );
@SuppressWarnings("unchecked")
final Method activateMethod = typeSafeActivatorClass.getMethod( ACTIVATE_METHOD_NAME, ActivationContext.class );
final ActivationContext activationContext = new ActivationContext() {
@Override
public Set<ValidationMode> getValidationModes() {
return modes;
}
@Override
public Configuration getConfiguration() {
return null;
}
@Override
public SessionFactoryImplementor getSessionFactory() {
return sessionFactory;
}
@Override
public SessionFactoryServiceRegistry getServiceRegistry() {
return serviceRegistry;
}
};
try {
activateMethod.invoke( null, activationContext );
}
catch (InvocationTargetException e) {
if ( HibernateException.class.isInstance( e.getTargetException() ) ) {
throw ( (HibernateException) e.getTargetException() );
}
throw new IntegrationException( "Error activating Bean Validation integration", e.getTargetException() );
}
catch (Exception e) {
throw new IntegrationException( "Error activating Bean Validation integration", e );
}
}
catch (NoSuchMethodException e) {
throw new HibernateException( "Unable to locate TypeSafeActivator#activate method", e );
}
}
else {
// otherwise check the validation modes
// todo : in many ways this duplicates thew checks done on the TypeSafeActivator when a ValidatorFactory could not be obtained
validateMissingBeanValidationApi( modes );
}
}
@Override
public void integrate(
final Configuration configuration,
final SessionFactoryImplementor sessionFactory,
final SessionFactoryServiceRegistry serviceRegistry) {
// IMPL NOTE : see the comments on ActivationContext.getValidationModes() as to why this is multi-valued...
final Set<ValidationMode> modes = ValidationMode.getModes( configuration.getProperties().get( MODE_PROPERTY ) );
if ( modes.size() > 1 ) {
LOG.multipleValidationModes( ValidationMode.loggable( modes ) );
}
if ( modes.size() == 1 && modes.contains( ValidationMode.NONE ) ) {
// we have nothing to do; just return
return;
}
final ClassLoaderService classLoaderService = serviceRegistry.getService( ClassLoaderService.class );
// see if the Bean Validation API is available on the classpath
if ( isBeanValidationApiAvailable( classLoaderService ) ) {
// and if so, call out to the TypeSafeActivator
try {
final Class typeSafeActivatorClass = loadTypeSafeActivatorClass( classLoaderService );
@SuppressWarnings("unchecked")
final Method activateMethod = typeSafeActivatorClass.getMethod( ACTIVATE_METHOD_NAME, ActivationContext.class );
final ActivationContext activationContext = new ActivationContext() {
@Override
public Set<ValidationMode> getValidationModes() {
return modes;
}
@Override
public Configuration getConfiguration() {
return configuration;
}
@Override
public SessionFactoryImplementor getSessionFactory() {
return sessionFactory;
}
@Override
public SessionFactoryServiceRegistry getServiceRegistry() {
return serviceRegistry;
}
};
try {
activateMethod.invoke( null, activationContext );
}
catch (InvocationTargetException e) {
if ( HibernateException.class.isInstance( e.getTargetException() ) ) {
throw ( (HibernateException) e.getTargetException() );
}
throw new IntegrationException( "Error activating Bean Validation integration", e.getTargetException() );
}
catch (Exception e) {
throw new IntegrationException( "Error activating Bean Validation integration", e );
}
}
catch (NoSuchMethodException e) {
throw new HibernateException( "Unable to locate TypeSafeActivator#activate method", e );
}
}
else {
// otherwise check the validation modes
// todo : in many ways this duplicates thew checks done on the TypeSafeActivator when a ValidatorFactory could not be obtained
validateMissingBeanValidationApi( modes );
}
}
private boolean isBeanValidationApiAvailable(ClassLoaderService classLoaderService) {
try {
classLoaderService.classForName( BV_CHECK_CLASS );
return true;
}
catch (Exception e) {
return false;
}
}
/**
* Used to validate the case when the Bean Validation API is not available.
*
* @param modes The requested validation modes.
*/
private void validateMissingBeanValidationApi(Set<ValidationMode> modes) {
if ( modes.contains( ValidationMode.CALLBACK ) ) {
throw new IntegrationException( "Bean Validation API was not available, but 'callback' validation was requested" );
}
if ( modes.contains( ValidationMode.DDL ) ) {
throw new IntegrationException( "Bean Validation API was not available, but 'ddl' validation was requested" );
}
}
private Class loadTypeSafeActivatorClass(ClassLoaderService classLoaderService) {
try {
return classLoaderService.classForName( ACTIVATOR_CLASS_NAME );
}
catch (Exception e) {
throw new HibernateException( "Unable to load TypeSafeActivator class", e );
}
}
@Override
public void disintegrate(SessionFactoryImplementor sessionFactory, SessionFactoryServiceRegistry serviceRegistry) {
// nothing to do here afaik
}
public static void validateFactory(Object object) {
Class activatorClass;
try {
activatorClass = BeanValidationIntegrator.class.getClassLoader().loadClass( ACTIVATOR_CLASS );
}
catch ( HibernateException e ) {
throw e;
}
catch ( Exception e ) {
throw new HibernateException( "Could not locate TypeSafeActivator class", e );
}
try {
final Method validateMethod = getValidateMethod( activatorClass );
invokeValidateMethod( object, validateMethod );
}
catch ( HibernateException e ) {
throw e;
}
catch ( Exception e ) {
throw new HibernateException( "Could not locate method needed for ValidatorFactory validation", e );
}
}
private static void invokeValidateMethod(Object object, Method validateMethod) {
try {
validateMethod.invoke( null, object );
}
catch ( InvocationTargetException e ) {
if ( e.getTargetException() instanceof HibernateException ) {
throw ( HibernateException ) e.getTargetException();
}
throw new HibernateException( "Unable to check validity of passed ValidatorFactory", e );
}
catch ( IllegalAccessException e ) {
throw new HibernateException( "Unable to check validity of passed ValidatorFactory", e );
}
}
private static Method getValidateMethod(Class activatorClass) throws NoSuchMethodException {
final Method validateMethod = activatorClass.getMethod(
ASSERT_VALIDATOR_FACTORY_INSTANCE_METHOD,
Object.class
);
if ( !validateMethod.isAccessible() ) {
validateMethod.setAccessible( true );
}
return validateMethod;
}
/**
* Try to locate a BV class to see if it is available on the classpath
*
* @param classLoaderService the class loader service
*
* @return {@code true} if the Bean Validation classes are on the classpath, {@code false otherwise}
*/
private boolean isBeanValidationOnClasspath(ClassLoaderService classLoaderService) {
// try to locate a BV class to see if it is available on the classpath
boolean isBeanValidationAvailable;
try {
classLoaderService.classForName( BV_CHECK_CLASS );
isBeanValidationAvailable = true;
}
catch ( Exception error ) {
isBeanValidationAvailable = false;
}
return isBeanValidationAvailable;
}
private Class loadTypeSafeActivatorClass(SessionFactoryServiceRegistry serviceRegistry) {
try {
return serviceRegistry.getService( ClassLoaderService.class ).classForName( ACTIVATOR_CLASS );
}
catch ( Exception e ) {
return null;
}
}
private void applyRelationalConstraints(
Set<ValidationMode> modes,
boolean beanValidationAvailable,
Class typeSafeActivatorClass,
Configuration configuration,
Dialect dialect) {
if ( !ConfigurationHelper.getBoolean( APPLY_CONSTRAINTS, configuration.getProperties(), true ) ) {
LOG.debug( "Skipping application of relational constraints from legacy Hibernate Validator" );
return;
}
if ( !( modes.contains( ValidationMode.DDL ) || modes.contains( ValidationMode.AUTO ) ) ) {
return;
}
if ( !beanValidationAvailable ) {
if ( modes.contains( ValidationMode.DDL ) ) {
throw new HibernateException( "Bean Validation not available in the class path but required in " + MODE_PROPERTY );
}
else if ( modes.contains( ValidationMode.AUTO ) ) {
//nothing to activate
return;
}
}
try {
Method applyDDLMethod = typeSafeActivatorClass.getMethod(
DDL_METHOD,
Collection.class,
Properties.class,
Dialect.class
);
try {
applyDDLMethod.invoke(
null,
configuration.createMappings().getClasses().values(),
configuration.getProperties(),
dialect
);
}
catch ( HibernateException e ) {
throw e;
}
catch ( Exception e ) {
throw new HibernateException( "Error applying BeanValidation relational constraints", e );
}
}
catch ( HibernateException e ) {
throw e;
}
catch ( Exception e ) {
throw new HibernateException( "Unable to locate TypeSafeActivator#applyDDL method", e );
}
}
private void applyRelationalConstraints(Set<ValidationMode> modes,
boolean beanValidationAvailable,
Class typeSafeActivatorClass,
Properties properties,
MetadataImplementor metadata,
Dialect dialect) {
if ( !ConfigurationHelper.getBoolean( APPLY_CONSTRAINTS, properties, true ) ) {
LOG.debug( "Skipping application of relational constraints from legacy Hibernate Validator" );
return;
}
if ( !( modes.contains( ValidationMode.DDL ) || modes.contains( ValidationMode.AUTO ) ) ) {
return;
}
if ( !beanValidationAvailable ) {
if ( modes.contains( ValidationMode.DDL ) ) {
throw new HibernateException( "Bean Validation not available in the class path but required in " + MODE_PROPERTY );
}
if ( modes.contains( ValidationMode.AUTO ) ) {
return; //nothing to activate
}
}
try {
Method applyDDLMethod = typeSafeActivatorClass.getMethod(
DDL_METHOD,
Iterable.class,
Properties.class,
ClassLoaderService.class,
Dialect.class
);
try {
applyDDLMethod.invoke(
null,
metadata.getEntityBindings(),
properties,
metadata.getServiceRegistry().getService( ClassLoaderService.class ),
dialect
);
}
catch ( HibernateException error ) {
throw error;
}
catch ( Exception error ) {
throw new HibernateException( "Error applying BeanValidation relational constraints", error );
}
}
catch ( HibernateException error ) {
throw error;
}
catch ( Exception error ) {
throw new HibernateException( "Unable to locate TypeSafeActivator#applyDDL method", error );
}
}
private void applyHibernateListeners(Set<ValidationMode> modes,
boolean beanValidationAvailable,
Class typeSafeActivatorClass,
SessionFactoryImplementor sessionFactory,
SessionFactoryServiceRegistry serviceRegistry) {
// de-activate not-null tracking at the core level when Bean Validation is present unless the user explicitly
// asks for it
if ( sessionFactory.getProperties().getProperty( Environment.CHECK_NULLABILITY ) == null ) {
sessionFactory.getSettings().setCheckNullability( false );
}
if ( !( modes.contains( ValidationMode.CALLBACK ) || modes.contains( ValidationMode.AUTO ) ) ) {
return;
}
if ( !beanValidationAvailable ) {
if ( modes.contains( ValidationMode.CALLBACK ) ) {
throw new HibernateException( "Bean Validation not available in the class path but required in " + MODE_PROPERTY );
}
if ( modes.contains( ValidationMode.AUTO ) ) {
return; //nothing to activate
}
}
try {
Method activateMethod = typeSafeActivatorClass.getMethod(
ACTIVATE_METHOD,
EventListenerRegistry.class,
Properties.class
);
try {
activateMethod.invoke(
null,
serviceRegistry.getService( EventListenerRegistry.class ),
sessionFactory.getProperties()
);
}
catch ( HibernateException e ) {
throw e;
}
catch ( Exception e ) {
throw new HibernateException( "Error applying BeanValidation relational constraints", e );
}
}
catch ( HibernateException e ) {
throw e;
}
catch ( Exception e ) {
throw new HibernateException( "Unable to locate TypeSafeActivator#applyDDL method", e );
}
}
}

View File

@ -0,0 +1,41 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.cfg.beanvalidation;
import org.hibernate.HibernateException;
/**
* Indicates a problem integrating Hibernate and the Bean Validation spec.
*
* @author Steve Ebersole
*/
public class IntegrationException extends HibernateException {
public IntegrationException(String message) {
super( message );
}
public IntegrationException(String message, Throwable root) {
super( message, root );
}
}

View File

@ -23,119 +23,147 @@
*/
package org.hibernate.cfg.beanvalidation;
import java.util.ArrayList;
import javax.validation.Validation;
import javax.validation.ValidatorFactory;
import javax.validation.constraints.Digits;
import javax.validation.constraints.Max;
import javax.validation.constraints.Min;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import javax.validation.metadata.BeanDescriptor;
import javax.validation.metadata.ConstraintDescriptor;
import javax.validation.metadata.PropertyDescriptor;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Iterator;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.StringTokenizer;
import javax.validation.Validation;
import javax.validation.ValidatorFactory;
import javax.validation.metadata.BeanDescriptor;
import javax.validation.metadata.ConstraintDescriptor;
import javax.validation.metadata.PropertyDescriptor;
import org.jboss.logging.Logger;
import org.hibernate.AssertionFailure;
import org.hibernate.EntityMode;
import org.hibernate.HibernateException;
import org.hibernate.MappingException;
import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
import org.hibernate.boot.registry.classloading.spi.ClassLoadingException;
import org.hibernate.cfg.beanvalidation.ddl.DigitsSchemaConstraint;
import org.hibernate.cfg.beanvalidation.ddl.LengthSchemaConstraint;
import org.hibernate.cfg.beanvalidation.ddl.MaxSchemaConstraint;
import org.hibernate.cfg.beanvalidation.ddl.MinSchemaConstraint;
import org.hibernate.cfg.beanvalidation.ddl.NotNullSchemaConstraint;
import org.hibernate.cfg.beanvalidation.ddl.SchemaConstraint;
import org.hibernate.cfg.beanvalidation.ddl.SizeSchemaConstraint;
import org.hibernate.cfg.Environment;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.event.service.spi.EventListenerRegistry;
import org.hibernate.event.spi.EventType;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.ReflectHelper;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.internal.util.config.ConfigurationHelper;
import org.hibernate.mapping.Column;
import org.hibernate.mapping.Component;
import org.hibernate.mapping.PersistentClass;
import org.hibernate.mapping.Property;
import org.hibernate.metamodel.spi.binding.AttributeBinding;
import org.hibernate.metamodel.spi.binding.BasicAttributeBinding;
import org.hibernate.metamodel.spi.binding.EntityBinding;
import org.hibernate.metamodel.spi.binding.EntityIdentifier;
import org.hibernate.mapping.SingleTableSubclass;
/**
* @author Emmanuel Bernard
* @author Hardy Ferentschik
* @author Steve Ebersole
*/
class TypeSafeActivator {
private static final CoreMessageLogger LOG = Logger.getMessageLogger(
CoreMessageLogger.class,
TypeSafeActivator.class.getName()
);
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, TypeSafeActivator.class.getName());
private static final String FACTORY_PROPERTY = "javax.persistence.validation.factory";
private static final List<SchemaConstraint> schemaConstraints;
private static final NotNullSchemaConstraint notNullSchemaConstraint = new NotNullSchemaConstraint();
static {
schemaConstraints = new ArrayList<SchemaConstraint>();
schemaConstraints.add( new DigitsSchemaConstraint() );
schemaConstraints.add( new SizeSchemaConstraint() );
schemaConstraints.add( new MinSchemaConstraint() );
schemaConstraints.add( new MaxSchemaConstraint() );
schemaConstraints.add( new LengthSchemaConstraint() );
}
/**
* Verifies that the specified object is an instance of {@code ValidatorFactory}.
* <p>
* Note:</br>
* The check is done here to avoid a hard link to Bean Validation
* </p>
* Used to validate a supplied ValidatorFactory instance as being castable to ValidatorFactory.
*
* @param object the object to check
*
* @see BeanValidationIntegrator#validateFactory(Object)
* @param object The supplied ValidatorFactory instance.
*/
@SuppressWarnings({ "UnusedDeclaration" })
public static void assertObjectIsValidatorFactoryInstance(Object object) {
if ( object == null ) {
throw new IllegalArgumentException( "null cannot be a valid ValidatorFactory" );
}
if ( !ValidatorFactory.class.isInstance( object ) ) {
throw new HibernateException(
"Given object was not an instance of " + ValidatorFactory.class.getName() + "[" + object.getClass()
.getName() + "]"
@SuppressWarnings( {"UnusedDeclaration"})
public static void validateSuppliedFactory(Object object) {
if ( ! ValidatorFactory.class.isInstance( object ) ) {
throw new IntegrationException(
"Given object was not an instance of " + ValidatorFactory.class.getName()
+ "[" + object.getClass().getName() + "]"
);
}
}
@SuppressWarnings({ "UnusedDeclaration" })
public static void activateBeanValidation(EventListenerRegistry listenerRegistry, Properties properties) {
ValidatorFactory factory = getValidatorFactory( properties );
BeanValidationEventListener listener = new BeanValidationEventListener(
factory, properties
@SuppressWarnings("UnusedDeclaration")
public static void activate(ActivationContext activationContext) {
final Properties properties = activationContext.getConfiguration().getProperties();
final ValidatorFactory factory;
try {
factory = getValidatorFactory( properties );
}
catch (IntegrationException e) {
if ( activationContext.getValidationModes().contains( ValidationMode.CALLBACK ) ) {
throw new IntegrationException( "Bean Validation provider was not available, but 'callback' validation was requested", e );
}
if ( activationContext.getValidationModes().contains( ValidationMode.DDL ) ) {
throw new IntegrationException( "Bean Validation provider was not available, but 'ddl' validation was requested", e );
}
LOG.debug( "Unable to acquire Bean Validation ValidatorFactory, skipping activation" );
return;
}
applyRelationalConstraints( factory, activationContext );
applyCallbackListeners( factory, activationContext );
}
@SuppressWarnings( {"UnusedDeclaration"})
public static void applyCallbackListeners(ValidatorFactory validatorFactory, ActivationContext activationContext) {
final Set<ValidationMode> modes = activationContext.getValidationModes();
if ( ! ( modes.contains( ValidationMode.CALLBACK ) || modes.contains( ValidationMode.AUTO ) ) ) {
return;
}
// de-activate not-null tracking at the core level when Bean Validation is present unless the user explicitly
// asks for it
if ( activationContext.getConfiguration().getProperty( Environment.CHECK_NULLABILITY ) == null ) {
activationContext.getSessionFactory().getSettings().setCheckNullability( false );
}
final BeanValidationEventListener listener = new BeanValidationEventListener(
validatorFactory,
activationContext.getConfiguration().getProperties()
);
final EventListenerRegistry listenerRegistry = activationContext.getServiceRegistry()
.getService( EventListenerRegistry.class );
listenerRegistry.addDuplicationStrategy( DuplicationStrategyImpl.INSTANCE );
listenerRegistry.appendListeners( EventType.PRE_INSERT, listener );
listenerRegistry.appendListeners( EventType.PRE_UPDATE, listener );
listenerRegistry.appendListeners( EventType.PRE_DELETE, listener );
listener.initialize( properties );
listener.initialize( activationContext.getConfiguration() );
}
@SuppressWarnings({ "UnusedDeclaration" })
// see BeanValidationIntegrator#applyRelationalConstraints
public static void applyDDL(Collection<PersistentClass> persistentClasses, Properties properties, Dialect dialect) {
@SuppressWarnings({"unchecked", "UnusedParameters"})
private static void applyRelationalConstraints(ValidatorFactory factory, ActivationContext activationContext) {
final Properties properties = activationContext.getConfiguration().getProperties();
if ( ! ConfigurationHelper.getBoolean( BeanValidationIntegrator.APPLY_CONSTRAINTS, properties, true ) ){
LOG.debug( "Skipping application of relational constraints from legacy Hibernate Validator" );
return;
}
final Set<ValidationMode> modes = activationContext.getValidationModes();
if ( ! ( modes.contains( ValidationMode.DDL ) || modes.contains( ValidationMode.AUTO ) ) ) {
return;
}
applyRelationalConstraints(
activationContext.getConfiguration().createMappings().getClasses().values(),
properties,
activationContext.getServiceRegistry().getService( JdbcServices.class ).getDialect()
);
}
@SuppressWarnings( {"UnusedDeclaration"})
public static void applyRelationalConstraints(Collection<PersistentClass> persistentClasses, Properties properties, Dialect dialect) {
ValidatorFactory factory = getValidatorFactory( properties );
Class<?>[] groupsArray = new GroupsPerOperation( properties ).get( GroupsPerOperation.Operation.DDL );
Set<Class<?>> groups = new HashSet<Class<?>>( Arrays.asList( groupsArray ) );
@ -143,7 +171,7 @@ class TypeSafeActivator {
for ( PersistentClass persistentClass : persistentClasses ) {
final String className = persistentClass.getClassName();
if ( StringHelper.isEmpty( className ) ) {
if ( className == null || className.length() == 0 ) {
continue;
}
Class<?> clazz;
@ -157,34 +185,32 @@ class TypeSafeActivator {
try {
applyDDL( "", persistentClass, clazz, factory, groups, true, dialect );
}
catch ( Exception e ) {
catch (Exception e) {
LOG.unableToApplyConstraints( className, e );
}
}
}
private static void applyDDL(String prefix,
PersistentClass persistentClass,
Class<?> clazz,
ValidatorFactory factory,
Set<Class<?>> groups,
boolean activateNotNull,
Dialect dialect) {
private static void applyDDL(
String prefix,
PersistentClass persistentClass,
Class<?> clazz,
ValidatorFactory factory,
Set<Class<?>> groups,
boolean activateNotNull,
Dialect dialect) {
final BeanDescriptor descriptor = factory.getValidator().getConstraintsForClass( clazz );
//no bean level constraints can be applied, go to the properties
for ( PropertyDescriptor propertyDesc : descriptor.getConstrainedProperties() ) {
Property property = findPropertyByName( persistentClass, prefix + propertyDesc.getPropertyName() );
boolean hasNotNull;
if ( property != null ) {
hasNotNull = applyConstraints(
propertyDesc.getConstraintDescriptors(),
property,
propertyDesc,
groups,
activateNotNull,
dialect
propertyDesc.getConstraintDescriptors(), property, propertyDesc, groups, activateNotNull, dialect
);
if ( property.isComposite() && propertyDesc.isCascaded() ) {
Class<?> componentClass = ( ( Component ) property.getValue() ).getComponentClass();
Class<?> componentClass = ( (Component) property.getValue() ).getComponentClass();
/*
* we can apply not null if the upper component let's us activate not null
@ -194,10 +220,7 @@ class TypeSafeActivator {
final boolean canSetNotNullOnColumns = activateNotNull && hasNotNull;
applyDDL(
prefix + propertyDesc.getPropertyName() + ".",
persistentClass,
componentClass,
factory,
groups,
persistentClass, componentClass, factory, groups,
canSetNotNullOnColumns,
dialect
);
@ -207,35 +230,38 @@ class TypeSafeActivator {
}
}
private static boolean applyConstraints(Set<ConstraintDescriptor<?>> constraintDescriptors,
Property property,
PropertyDescriptor propertyDescriptor,
Set<Class<?>> groups,
boolean canApplyNotNull,
Dialect dialect) {
private static boolean applyConstraints(
Set<ConstraintDescriptor<?>> constraintDescriptors,
Property property,
PropertyDescriptor propertyDesc,
Set<Class<?>> groups,
boolean canApplyNotNull,
Dialect dialect) {
boolean hasNotNull = false;
for ( ConstraintDescriptor<?> constraintDescriptor : constraintDescriptors ) {
if ( groups != null && Collections.disjoint( constraintDescriptor.getGroups(), groups ) ) {
for ( ConstraintDescriptor<?> descriptor : constraintDescriptors ) {
if ( groups != null && Collections.disjoint( descriptor.getGroups(), groups ) ) {
continue;
}
if ( canApplyNotNull ) {
hasNotNull = hasNotNull || notNullSchemaConstraint.applyConstraint(
property,
constraintDescriptor,
propertyDescriptor,
dialect
);
hasNotNull = hasNotNull || applyNotNull( property, descriptor );
}
for ( SchemaConstraint schemaConstraint : schemaConstraints ) {
schemaConstraint.applyConstraint( property, constraintDescriptor, propertyDescriptor, dialect );
}
// apply bean validation specific constraints
applyDigits( property, descriptor );
applySize( property, descriptor, propertyDesc );
applyMin( property, descriptor, dialect );
applyMax( property, descriptor, dialect );
// apply hibernate validator specific constraints - we cannot import any HV specific classes though!
// no need to check explicitly for @Range. @Range is a composed constraint using @Min and @Max which
// will be taken care later
applyLength( property, descriptor, propertyDesc );
// pass an empty set as composing constraints inherit the main constraint and thus are matching already
hasNotNull = hasNotNull || applyConstraints(
constraintDescriptor.getComposingConstraints(),
property, propertyDescriptor, null,
descriptor.getComposingConstraints(),
property, propertyDesc, null,
canApplyNotNull,
dialect
);
@ -243,75 +269,102 @@ class TypeSafeActivator {
return hasNotNull;
}
private static void applyMin(Property property, ConstraintDescriptor<?> descriptor, Dialect dialect) {
if ( Min.class.equals( descriptor.getAnnotation().annotationType() ) ) {
@SuppressWarnings("unchecked")
ConstraintDescriptor<Min> minConstraint = (ConstraintDescriptor<Min>) descriptor;
long min = minConstraint.getAnnotation().value();
@SuppressWarnings({ "UnusedDeclaration" })
// see BeanValidationIntegrator#applyRelationalConstraints
public static void applyDDL(Iterable<EntityBinding> bindings,
Properties properties,
ClassLoaderService classLoaderService,
Dialect dialect) {
final ValidatorFactory factory = getValidatorFactory( properties );
final Class<?>[] groupsArray = new GroupsPerOperation( properties, classLoaderService ).get( GroupsPerOperation.Operation.DDL );
final Set<Class<?>> groups = new HashSet<Class<?>>( Arrays.asList( groupsArray ) );
Column col = (Column) property.getColumnIterator().next();
String checkConstraint = col.getQuotedName(dialect) + ">=" + min;
applySQLCheck( col, checkConstraint );
}
}
for ( EntityBinding binding : bindings ) {
final String className = binding.getEntity().getClassName();
if ( binding.getHierarchyDetails().getEntityMode() != EntityMode.POJO ){
continue;
}
Class<?> clazz;
try {
clazz = classLoaderService.classForName( className );
}
catch ( ClassLoadingException error ) {
throw new AssertionFailure( "Entity class not found", error );
}
try {
final BeanDescriptor descriptor = factory.getValidator().getConstraintsForClass( clazz );
for ( PropertyDescriptor propertyDescriptor : descriptor.getConstrainedProperties() ) {
AttributeBinding attributeBinding = binding.locateAttributeBinding( propertyDescriptor.getPropertyName() );
if ( attributeBinding != null ) {
applyConstraints( propertyDescriptor, groups, attributeBinding, dialect );
private static void applyMax(Property property, ConstraintDescriptor<?> descriptor, Dialect dialect) {
if ( Max.class.equals( descriptor.getAnnotation().annotationType() ) ) {
@SuppressWarnings("unchecked")
ConstraintDescriptor<Max> maxConstraint = (ConstraintDescriptor<Max>) descriptor;
long max = maxConstraint.getAnnotation().value();
Column col = (Column) property.getColumnIterator().next();
String checkConstraint = col.getQuotedName(dialect) + "<=" + max;
applySQLCheck( col, checkConstraint );
}
}
private static void applySQLCheck(Column col, String checkConstraint) {
String existingCheck = col.getCheckConstraint();
// need to check whether the new check is already part of the existing check, because applyDDL can be called
// multiple times
if ( StringHelper.isNotEmpty( existingCheck ) && !existingCheck.contains( checkConstraint ) ) {
checkConstraint = col.getCheckConstraint() + " AND " + checkConstraint;
}
col.setCheckConstraint( checkConstraint );
}
private static boolean applyNotNull(Property property, ConstraintDescriptor<?> descriptor) {
boolean hasNotNull = false;
if ( NotNull.class.equals( descriptor.getAnnotation().annotationType() ) ) {
if ( !( property.getPersistentClass() instanceof SingleTableSubclass ) ) {
//single table should not be forced to null
if ( !property.isComposite() ) { //composite should not add not-null on all columns
@SuppressWarnings( "unchecked" )
Iterator<Column> iter = property.getColumnIterator();
while ( iter.hasNext() ) {
iter.next().setNullable( false );
hasNotNull = true;
}
}
}
catch ( Exception error ) {
LOG.unableToApplyConstraints( className, error );
hasNotNull = true;
}
return hasNotNull;
}
private static void applyDigits(Property property, ConstraintDescriptor<?> descriptor) {
if ( Digits.class.equals( descriptor.getAnnotation().annotationType() ) ) {
@SuppressWarnings("unchecked")
ConstraintDescriptor<Digits> digitsConstraint = (ConstraintDescriptor<Digits>) descriptor;
int integerDigits = digitsConstraint.getAnnotation().integer();
int fractionalDigits = digitsConstraint.getAnnotation().fraction();
Column col = (Column) property.getColumnIterator().next();
col.setPrecision( integerDigits + fractionalDigits );
col.setScale( fractionalDigits );
}
}
private static void applySize(Property property, ConstraintDescriptor<?> descriptor, PropertyDescriptor propertyDescriptor) {
if ( Size.class.equals( descriptor.getAnnotation().annotationType() )
&& String.class.equals( propertyDescriptor.getElementClass() ) ) {
@SuppressWarnings("unchecked")
ConstraintDescriptor<Size> sizeConstraint = (ConstraintDescriptor<Size>) descriptor;
int max = sizeConstraint.getAnnotation().max();
Column col = (Column) property.getColumnIterator().next();
if ( max < Integer.MAX_VALUE ) {
col.setLength( max );
}
}
}
private static void applyConstraints(PropertyDescriptor propertyDescriptor,
Set<Class<?>> groups,
AttributeBinding attributeBinding,
Dialect dialect) {
for ( ConstraintDescriptor<?> constraintDescriptor : propertyDescriptor.getConstraintDescriptors() ) {
if ( groups != null && Collections.disjoint( constraintDescriptor.getGroups(), groups ) ) {
continue;
private static void applyLength(Property property, ConstraintDescriptor<?> descriptor, PropertyDescriptor propertyDescriptor) {
if ( "org.hibernate.validator.constraints.Length".equals(
descriptor.getAnnotation().annotationType().getName()
)
&& String.class.equals( propertyDescriptor.getElementClass() ) ) {
@SuppressWarnings("unchecked")
int max = (Integer) descriptor.getAttributes().get( "max" );
Column col = (Column) property.getColumnIterator().next();
if ( max < Integer.MAX_VALUE ) {
col.setLength( max );
}
for ( SchemaConstraint schemaConstraint : schemaConstraints ) {
schemaConstraint.applyConstraint( attributeBinding, constraintDescriptor, propertyDescriptor, dialect );
}
notNullSchemaConstraint.applyConstraint(
attributeBinding,
constraintDescriptor,
propertyDescriptor,
dialect
);
}
}
/**
* Locates a mapping property of a persistent class by property name
*
* @param associatedClass the persistent class
* @param propertyName the property name
*
* @param associatedClass
* @param propertyName
* @return the property by path in a recursive way, including IdentifierProperty in the loop if propertyName is
* <code>null</code>. If propertyName is <code>null</code> or empty, the IdentifierProperty is returned
* <code>null</code>. If propertyName is <code>null</code> or empty, the IdentifierProperty is returned
*/
private static Property findPropertyByName(PersistentClass associatedClass, String propertyName) {
Property property = null;
@ -331,7 +384,7 @@ class TypeSafeActivator {
}
StringTokenizer st = new StringTokenizer( propertyName, ".", false );
while ( st.hasMoreElements() ) {
String element = ( String ) st.nextElement();
String element = (String) st.nextElement();
if ( property == null ) {
property = associatedClass.getProperty( element );
}
@ -339,7 +392,7 @@ class TypeSafeActivator {
if ( !property.isComposite() ) {
return null;
}
property = ( ( Component ) property.getValue() ).getProperty( element );
property = ( (Component) property.getValue() ).getProperty( element );
}
}
}
@ -352,7 +405,7 @@ class TypeSafeActivator {
}
StringTokenizer st = new StringTokenizer( propertyName, ".", false );
while ( st.hasMoreElements() ) {
String element = ( String ) st.nextElement();
String element = (String) st.nextElement();
if ( property == null ) {
property = associatedClass.getIdentifierMapper().getProperty( element );
}
@ -360,7 +413,7 @@ class TypeSafeActivator {
if ( !property.isComposite() ) {
return null;
}
property = ( ( Component ) property.getValue() ).getProperty( element );
property = ( (Component) property.getValue() ).getProperty( element );
}
}
}
@ -371,56 +424,6 @@ class TypeSafeActivator {
return property;
}
// TODO - remove!?
/**
* @param entityBinding entity binding for the currently processed entity
* @param attrName
*
* @return the attribute by path in a recursive way, including EntityIdentifier in the loop if attrName is
* {@code null}. If attrName is {@code null} or empty, the EntityIdentifier is returned
*/
private static AttributeBinding findAttributeBindingByName(EntityBinding entityBinding,
String attrName) {
AttributeBinding attrBinding = null;
EntityIdentifier identifier = entityBinding.getHierarchyDetails().getEntityIdentifier();
BasicAttributeBinding idAttrBinding = null; //identifier.getValueBinding();
String idAttrName = idAttrBinding != null ? idAttrBinding.getAttribute().getName() : null;
try {
if ( attrName == null || attrName.length() == 0 || attrName.equals( idAttrName ) ) {
attrBinding = idAttrBinding; // default to id
}
else {
if ( attrName.indexOf( idAttrName + "." ) == 0 ) {
attrBinding = idAttrBinding;
attrName = attrName.substring( idAttrName.length() + 1 );
}
for ( StringTokenizer st = new StringTokenizer( attrName, "." ); st.hasMoreElements(); ) {
String element = st.nextToken();
if ( attrBinding == null ) {
attrBinding = entityBinding.locateAttributeBinding( element );
}
else {
return null; // TODO: if (attrBinding.isComposite()) ...
}
}
}
}
catch ( MappingException error ) {
try {
//if we do not find it try to check the identifier mapper
if ( !identifier.isIdentifierMapper() ) {
return null;
}
// TODO: finish once composite/embedded/component IDs get worked out
}
catch ( MappingException ee ) {
return null;
}
}
return attrBinding;
}
private static ValidatorFactory getValidatorFactory(Map<Object, Object> properties) {
ValidatorFactory factory = null;
if ( properties != null ) {
@ -430,7 +433,7 @@ class TypeSafeActivator {
factory = ValidatorFactory.class.cast( unsafeProperty );
}
catch ( ClassCastException e ) {
throw new HibernateException(
throw new IntegrationException(
"Property " + FACTORY_PROPERTY
+ " should contain an object of type " + ValidatorFactory.class.getName()
);
@ -442,7 +445,7 @@ class TypeSafeActivator {
factory = Validation.buildDefaultValidatorFactory();
}
catch ( Exception e ) {
throw new HibernateException( "Unable to build the default ValidatorFactory", e );
throw new IntegrationException( "Unable to build the default ValidatorFactory", e );
}
}
return factory;

View File

@ -1,7 +1,7 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2012, Red Hat Inc. or third-party contributors as
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
@ -29,37 +29,41 @@ import java.util.Set;
import org.hibernate.HibernateException;
/**
* @author Hardy Ferentschik
* Duplicates the javax.validation enum (because javax validation might not be on the runtime classpath)
*
* @author Steve Ebersole
*/
public enum ValidationMode {
AUTO,
CALLBACK,
NONE,
DDL;
AUTO( "auto" ),
CALLBACK( "callback" ),
NONE( "none" ),
DDL( "ddl" );
private final String externalForm;
private ValidationMode(String externalForm) {
this.externalForm = externalForm;
}
public static Set<ValidationMode> getModes(Object modeProperty) {
Set<ValidationMode> modes = new HashSet<ValidationMode>( 3 );
if ( modeProperty == null ) {
Set<ValidationMode> modes = new HashSet<ValidationMode>(3);
if (modeProperty == null) {
modes.add( ValidationMode.AUTO );
}
else {
final String[] modesInString = modeProperty.toString().split( "," );
for ( String modeInString : modesInString ) {
modes.add( getMode( modeInString ) );
modes.add( getMode(modeInString) );
}
}
if ( modes.size() > 1 && ( modes.contains( ValidationMode.AUTO ) || modes.contains( ValidationMode.NONE ) ) ) {
StringBuilder message = new StringBuilder( "Incompatible validation modes mixed: " );
for ( ValidationMode mode : modes ) {
message.append( mode ).append( ", " );
}
throw new HibernateException( message.substring( 0, message.length() - 2 ) );
throw new HibernateException( "Incompatible validation modes mixed: " + loggable( modes ) );
}
return modes;
}
private static ValidationMode getMode(String modeProperty) {
if ( modeProperty == null || modeProperty.length() == 0 ) {
if (modeProperty == null || modeProperty.length() == 0) {
return AUTO;
}
else {
@ -71,4 +75,17 @@ public enum ValidationMode {
}
}
}
public static String loggable(Set<ValidationMode> modes) {
if ( modes == null || modes.isEmpty() ) {
return "[<empty>]";
}
StringBuilder buffer = new StringBuilder( "[" );
String sep = "";
for ( ValidationMode mode : modes ) {
buffer.append( sep ).append( mode.externalForm );
sep = ", ";
}
return buffer.append( "]" ).toString();
}
}

View File

@ -134,7 +134,7 @@ public class PersistentArrayHolder extends AbstractPersistentCollection {
throws HibernateException, SQLException {
Object element = persister.readElement( rs, owner, descriptor.getSuffixedElementAliases(), getSession() );
int index = ( (Integer) persister.readIndex( rs, descriptor.getSuffixedIndexAliases(), getSession() ) ).intValue();
int index = (Integer) persister.readIndex( rs, descriptor.getSuffixedIndexAliases(), getSession() );
for ( int i = tempList.size(); i<=index; i++) {
tempList.add(i, null);
}
@ -205,7 +205,7 @@ public class PersistentArrayHolder extends AbstractPersistentCollection {
}
public Iterator getDeletes(CollectionPersister persister, boolean indexIsFormula) throws HibernateException {
java.util.List deletes = new ArrayList();
java.util.List<Integer> deletes = new ArrayList<Integer>();
Serializable sn = getSnapshot();
int snSize = Array.getLength(sn);
int arraySize = Array.getLength(array);

View File

@ -52,8 +52,6 @@ public class PersistentList extends AbstractPersistentCollection implements List
@Override
@SuppressWarnings( {"unchecked"})
public Serializable getSnapshot(CollectionPersister persister) throws HibernateException {
final EntityMode entityMode = persister.getOwnerEntityPersister().getEntityMode();
ArrayList clonedList = new ArrayList( list.size() );
for ( Object element : list ) {
Object deepCopy = persister.getElementType().deepCopy( element, persister.getFactory() );
@ -399,7 +397,7 @@ public class PersistentList extends AbstractPersistentCollection implements List
public Object readFrom(ResultSet rs, CollectionPersister persister, CollectionAliases descriptor, Object owner)
throws HibernateException, SQLException {
Object element = persister.readElement( rs, owner, descriptor.getSuffixedElementAliases(), getSession() ) ;
int index = ( (Integer) persister.readIndex( rs, descriptor.getSuffixedIndexAliases(), getSession() ) ).intValue();
int index = (Integer) persister.readIndex( rs, descriptor.getSuffixedIndexAliases(), getSession() );
//pad with nulls from the current last element up to the new index
for ( int i = list.size(); i<=index; i++) {

View File

@ -23,8 +23,9 @@
*/
package org.hibernate.context.internal;
import java.util.Hashtable;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import javax.transaction.Synchronization;
import javax.transaction.Transaction;
import javax.transaction.TransactionManager;
@ -67,7 +68,7 @@ import org.hibernate.engine.transaction.jta.platform.spi.JtaPlatform;
public class JTASessionContext extends AbstractCurrentSessionContext {
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, JTASessionContext.class.getName());
private transient Map currentSessionMap = new Hashtable();
private transient Map<Object, Session> currentSessionMap = new ConcurrentHashMap<Object, Session>();
public JTASessionContext(SessionFactoryImplementor factory) {
super( factory );
@ -103,7 +104,7 @@ public class JTASessionContext extends AbstractCurrentSessionContext {
final Object txnIdentifier = jtaPlatform.getTransactionIdentifier( txn );
Session currentSession = ( Session ) currentSessionMap.get( txnIdentifier );
Session currentSession = currentSessionMap.get( txnIdentifier );
if ( currentSession == null ) {
currentSession = buildOrObtainSession();

View File

@ -25,12 +25,12 @@
package org.hibernate.criterion;
import org.hibernate.Criteria;
import org.hibernate.HibernateException;
import org.hibernate.dialect.MySQLDialect;
import org.hibernate.engine.spi.TypedValue;
/**
* Negates another criterion
* @author Gavin King
* @author Brett Meyer
*/
public class NotExpression implements Criterion {
@ -40,14 +40,9 @@ public class NotExpression implements Criterion {
this.criterion = criterion;
}
public String toSqlString(Criteria criteria, CriteriaQuery criteriaQuery)
throws HibernateException {
if ( criteriaQuery.getFactory().getDialect() instanceof MySQLDialect ) {
return "not (" + criterion.toSqlString(criteria, criteriaQuery) + ')';
}
else {
return "not " + criterion.toSqlString(criteria, criteriaQuery);
}
public String toSqlString(Criteria criteria, CriteriaQuery criteriaQuery) throws HibernateException {
return criteriaQuery.getFactory().getDialect().getNotExpression(
criterion.toSqlString( criteria, criteriaQuery ) );
}
public TypedValue[] getTypedValues(

View File

@ -28,21 +28,25 @@ import java.sql.Types;
import org.hibernate.Criteria;
import org.hibernate.HibernateException;
import org.hibernate.NullPrecedence;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.type.Type;
/**
* Represents an order imposed upon a <tt>Criteria</tt> result set
*
* @author Gavin King
* @author Lukasz Antoniak (lukasz dot antoniak at gmail dot com)
* @author Brett Meyer
*/
public class Order implements Serializable {
private boolean ascending;
private boolean ignoreCase;
private String propertyName;
private NullPrecedence nullPrecedence;
public String toString() {
return propertyName + ' ' + (ascending?"asc":"desc");
return propertyName + ' ' + ( ascending ? "asc" : "desc" ) + ( nullPrecedence != null ? ' ' + nullPrecedence.name().toLowerCase() : "" );
}
public Order ignoreCase() {
@ -50,6 +54,11 @@ public class Order implements Serializable {
return this;
}
public Order nulls(NullPrecedence nullPrecedence) {
this.nullPrecedence = nullPrecedence;
return this;
}
/**
* Constructor for Order.
*/
@ -68,20 +77,47 @@ public class Order implements Serializable {
Type type = criteriaQuery.getTypeUsingProjection(criteria, propertyName);
StringBuilder fragment = new StringBuilder();
for ( int i=0; i<columns.length; i++ ) {
final StringBuilder expression = new StringBuilder();
SessionFactoryImplementor factory = criteriaQuery.getFactory();
boolean lower = ignoreCase && type.sqlTypes( factory )[i]==Types.VARCHAR;
if (lower) {
fragment.append( factory.getDialect().getLowercaseFunction() )
.append('(');
boolean lower = false;
if ( ignoreCase ) {
int sqlType = type.sqlTypes( factory )[i];
lower = sqlType == Types.VARCHAR
|| sqlType == Types.CHAR
|| sqlType == Types.LONGVARCHAR;
}
fragment.append( columns[i] );
if (lower) fragment.append(')');
fragment.append( ascending ? " asc" : " desc" );
if (lower) {
expression.append( factory.getDialect().getLowercaseFunction() ).append('(');
}
expression.append( columns[i] );
if (lower) expression.append(')');
fragment.append(
factory.getDialect()
.renderOrderByElement(
expression.toString(),
null,
ascending ? "asc" : "desc",
nullPrecedence != null ? nullPrecedence : factory.getSettings().getDefaultNullPrecedence()
)
);
if ( i<columns.length-1 ) fragment.append(", ");
}
return fragment.toString();
}
public String getPropertyName() {
return propertyName;
}
public boolean isAscending() {
return ascending;
}
public boolean isIgnoreCase() {
return ignoreCase;
}
/**
* Ascending order
*

View File

@ -59,10 +59,18 @@ public class Property extends PropertyProjection {
return Restrictions.eq(getPropertyName(), value);
}
public Criterion eqOrIsNull(Object value) {
return Restrictions.eqOrIsNull(getPropertyName(), value);
}
public SimpleExpression ne(Object value) {
return Restrictions.ne(getPropertyName(), value);
}
public Criterion neOrIsNotNull(Object value) {
return Restrictions.neOrIsNotNull(getPropertyName(), value);
}
public SimpleExpression gt(Object value) {
return Restrictions.gt(getPropertyName(), value);
}

View File

@ -59,20 +59,46 @@ public class Restrictions {
* Apply an "equal" constraint to the named property
* @param propertyName
* @param value
* @return Criterion
* @return SimpleExpression
*/
public static SimpleExpression eq(String propertyName, Object value) {
return new SimpleExpression(propertyName, value, "=");
}
/**
* Apply an "equal" constraint to the named property. If the value
* is null, instead apply "is null".
* @param propertyName
* @param value
* @return Criterion
*/
public static Criterion eqOrIsNull(String propertyName, Object value) {
if (value == null) {
return isNull(propertyName);
}
return new SimpleExpression(propertyName, value, "=");
}
/**
* Apply a "not equal" constraint to the named property
* @param propertyName
* @param value
* @return Criterion
* @return SimpleExpression
*/
public static SimpleExpression ne(String propertyName, Object value) {
return new SimpleExpression(propertyName, value, "<>");
}
/**
* Apply a "not equal" constraint to the named property. If the value
* is null, instead apply "is not null".
* @param propertyName
* @param value
* @return Criterion
*/
public static Criterion neOrIsNotNull(String propertyName, Object value) {
if (value == null) {
return isNotNull(propertyName);
}
return new SimpleExpression(propertyName, value, "<>");
}
/**
* Apply a "like" constraint to the named property
* @param propertyName

View File

@ -463,4 +463,9 @@ public class DB2Dialect extends Dialect {
return uniqueDelegate;
}
@Override
public String getNotExpression( String expression ) {
return "not (" + expression + ")";
}
}

View File

@ -23,39 +23,14 @@
*/
package org.hibernate.dialect;
import java.io.InputStream;
import java.io.OutputStream;
import java.sql.Blob;
import java.sql.CallableStatement;
import java.sql.Clob;
import java.sql.NClob;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Types;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import org.hibernate.HibernateException;
import org.hibernate.LockMode;
import org.hibernate.LockOptions;
import org.hibernate.MappingException;
import org.hibernate.NullPrecedence;
import org.hibernate.cfg.Environment;
import org.hibernate.dialect.function.CastFunction;
import org.hibernate.dialect.function.SQLFunction;
import org.hibernate.dialect.function.SQLFunctionTemplate;
import org.hibernate.dialect.function.StandardAnsiSqlAggregationFunctions;
import org.hibernate.dialect.function.StandardSQLFunction;
import org.hibernate.dialect.lock.LockingStrategy;
import org.hibernate.dialect.lock.OptimisticForceIncrementLockingStrategy;
import org.hibernate.dialect.lock.OptimisticLockingStrategy;
import org.hibernate.dialect.lock.PessimisticForceIncrementLockingStrategy;
import org.hibernate.dialect.lock.PessimisticReadSelectLockingStrategy;
import org.hibernate.dialect.lock.PessimisticWriteSelectLockingStrategy;
import org.hibernate.dialect.lock.SelectLockingStrategy;
import org.hibernate.dialect.function.*;
import org.hibernate.dialect.lock.*;
import org.hibernate.dialect.pagination.LegacyLimitHandler;
import org.hibernate.dialect.pagination.LimitHandler;
import org.hibernate.dialect.unique.DefaultUniqueDelegate;
@ -96,11 +71,17 @@ import org.hibernate.tool.schema.internal.StandardSequenceExporter;
import org.hibernate.tool.schema.internal.StandardTableExporter;
import org.hibernate.tool.schema.internal.TemporaryTableExporter;
import org.hibernate.tool.schema.spi.Exporter;
import org.hibernate.sql.*;
import org.hibernate.type.StandardBasicTypes;
import org.hibernate.type.descriptor.sql.ClobTypeDescriptor;
import org.hibernate.type.descriptor.sql.SqlTypeDescriptor;
import org.jboss.logging.Logger;
import java.io.InputStream;
import java.io.OutputStream;
import java.sql.*;
import java.util.*;
/**
* Represents a dialect of SQL implemented by a particular RDBMS.
* Subclasses implement Hibernate compatibility with different systems.<br>
@ -1212,6 +1193,8 @@ public abstract class Dialect implements ConversionContext {
case FORCE:
case PESSIMISTIC_FORCE_INCREMENT:
return getForUpdateNowaitString();
case UPGRADE_SKIPLOCKED:
return getForUpdateSkipLockedString();
default:
return "";
}
@ -1330,6 +1313,16 @@ public abstract class Dialect implements ConversionContext {
return getForUpdateString();
}
/**
* Retrieves the <tt>FOR UPDATE SKIP LOCKED</tt> syntax specific to this dialect.
*
* @return The appropriate <tt>FOR UPDATE SKIP LOCKED</tt> clause string.
*/
public String getForUpdateSkipLockedString() {
// by default we report no support for SKIP_LOCKED lock semantics
return getForUpdateString();
}
/**
* Get the <tt>FOR UPDATE OF column_list NOWAIT</tt> fragment appropriate
* for this dialect given the aliases of the columns to be write locked.
@ -1341,6 +1334,17 @@ public abstract class Dialect implements ConversionContext {
return getForUpdateString( aliases );
}
/**
* Get the <tt>FOR UPDATE OF column_list SKIP LOCKED</tt> fragment appropriate
* for this dialect given the aliases of the columns to be write locked.
*
* @param aliases The columns to be write locked.
* @return The appropriate <tt>FOR UPDATE colunm_list SKIP LOCKED</tt> clause string.
*/
public String getForUpdateSkipLockedString(String aliases) {
return getForUpdateString( aliases );
}
/**
* Some dialects support an alternative means to <tt>SELECT FOR UPDATE</tt>,
* whereby a "lock hint" is appends to the table name in the from clause.
@ -2060,6 +2064,18 @@ public abstract class Dialect implements ConversionContext {
return false;
}
public String getDropTableString( String tableName ) {
StringBuilder buf = new StringBuilder( "drop table " );
if ( supportsIfExistsBeforeTableName() ) {
buf.append( "if exists " );
}
buf.append( tableName ).append( getCascadeConstraintsString() );
if ( supportsIfExistsAfterTableName() ) {
buf.append( " if exists" );
}
return buf.toString();
}
/**
* Does this dialect support column-level check constraints?
*
@ -2202,6 +2218,30 @@ public abstract class Dialect implements ConversionContext {
return false;
}
/**
* @param expression The SQL order expression. In case of {@code @OrderBy} annotation user receives property placeholder
* (e.g. attribute name enclosed in '{' and '}' signs).
* @param collation Collation string in format {@code collate IDENTIFIER}, or {@code null}
* if expression has not been explicitly specified.
* @param order Order direction. Possible values: {@code asc}, {@code desc}, or {@code null}
* if expression has not been explicitly specified.
* @param nulls Nulls precedence. Default value: {@link NullPrecedence#NONE}.
* @return Renders single element of {@code ORDER BY} clause.
*/
public String renderOrderByElement(String expression, String collation, String order, NullPrecedence nulls) {
final StringBuilder orderByElement = new StringBuilder( expression );
if ( collation != null ) {
orderByElement.append( " " ).append( collation );
}
if ( order != null ) {
orderByElement.append( " " ).append( order );
}
if ( nulls != NullPrecedence.NONE ) {
orderByElement.append( " nulls " ).append( nulls.name().toLowerCase() );
}
return orderByElement.toString();
}
/**
* Does this dialect require that parameters appearing in the <tt>SELECT</tt> clause be wrapped in <tt>cast()</tt>
* calls to tell the db parser the expected type.
@ -2439,4 +2479,53 @@ public abstract class Dialect implements ConversionContext {
public UniqueDelegate getUniqueDelegate() {
return uniqueDelegate;
}
public String getNotExpression( String expression ) {
return "not " + expression;
}
/**
* Does this dialect support the <tt>UNIQUE</tt> column syntax?
*
* @return boolean
*
* @deprecated {@link #getUniqueDelegate()} should be overridden instead.
*/
@Deprecated
public boolean supportsUnique() {
return true;
}
/**
* Does this dialect support adding Unique constraints via create and alter table ?
*
* @return boolean
*
* @deprecated {@link #getUniqueDelegate()} should be overridden instead.
*/
@Deprecated
public boolean supportsUniqueConstraintInCreateAlterTable() {
return true;
}
/**
* The syntax used to add a unique constraint to a table.
*
* @param constraintName The name of the unique constraint.
* @return The "add unique" fragment
*
* @deprecated {@link #getUniqueDelegate()} should be overridden instead.
*/
@Deprecated
public String getAddUniqueConstraintString(String constraintName) {
return " add constraint " + constraintName + " unique ";
}
/**
* @deprecated {@link #getUniqueDelegate()} should be overridden instead.
*/
@Deprecated
public boolean supportsNotNullUnique() {
return true;
}
}

View File

@ -25,6 +25,7 @@ package org.hibernate.dialect;
import java.sql.Types;
import org.hibernate.cfg.Environment;
import org.hibernate.dialect.function.NoArgSQLFunction;
import org.hibernate.dialect.function.VarArgsSQLFunction;
import org.hibernate.type.StandardBasicTypes;
@ -55,6 +56,7 @@ public class InterbaseDialect extends Dialect {
registerColumnType( Types.CLOB, "blob sub_type 1" );
registerFunction( "concat", new VarArgsSQLFunction( StandardBasicTypes.STRING, "(","||",")" ) );
registerFunction("current_date", new NoArgSQLFunction("current_date", StandardBasicTypes.DATE, false) );
getDefaultProperties().setProperty(Environment.STATEMENT_BATCH_SIZE, NO_BATCH);
}

View File

@ -29,6 +29,7 @@ import java.sql.SQLException;
import java.sql.Types;
import org.hibernate.JDBCException;
import org.hibernate.NullPrecedence;
import org.hibernate.cfg.Environment;
import org.hibernate.dialect.function.NoArgSQLFunction;
import org.hibernate.dialect.function.StandardSQLFunction;
@ -333,6 +334,24 @@ public class MySQLDialect extends Dialect {
return true;
}
@Override
public String renderOrderByElement(String expression, String collation, String order, NullPrecedence nulls) {
final StringBuilder orderByElement = new StringBuilder();
if ( nulls != NullPrecedence.NONE ) {
// Workaround for NULLS FIRST / LAST support.
orderByElement.append( "case when " ).append( expression ).append( " is null then " );
if ( nulls == NullPrecedence.FIRST ) {
orderByElement.append( "0 else 1" );
}
else {
orderByElement.append( "1 else 0" );
}
orderByElement.append( " end, " );
}
// Nulls precedence has already been handled so passing NONE value.
orderByElement.append( super.renderOrderByElement( expression, collation, order, NullPrecedence.NONE ) );
return orderByElement.toString();
}
// locking support
@ -396,4 +415,9 @@ public class MySQLDialect extends Dialect {
}
};
}
@Override
public String getNotExpression( String expression ) {
return "not (" + expression + ")";
}
}

View File

@ -22,6 +22,7 @@
* Boston, MA 02110-1301 USA
*/
package org.hibernate.dialect;
import org.hibernate.LockOptions;
import org.hibernate.sql.ANSIJoinFragment;
import org.hibernate.sql.JoinFragment;
@ -45,4 +46,21 @@ public class Oracle10gDialect extends Oracle9iDialect {
public JoinFragment createOuterJoinFragment() {
return new ANSIJoinFragment();
}
public String getWriteLockString(int timeout) {
if ( timeout == LockOptions.SKIP_LOCKED ) {
return getForUpdateSkipLockedString();
}
else {
return super.getWriteLockString(timeout);
}
}
public String getForUpdateSkipLockedString() {
return " for update skip locked";
}
public String getForUpdateSkipLockedString(String aliases) {
return getForUpdateString() + " of " + aliases + " skip locked";
}
}

View File

@ -581,4 +581,9 @@ public class Oracle8iDialect extends Dialect {
public boolean useFollowOnLocking() {
return true;
}
@Override
public String getNotExpression( String expression ) {
return "not (" + expression + ")";
}
}

View File

@ -104,6 +104,7 @@ public class PostgreSQL81Dialect extends Dialect {
registerFunction( "variance", new StandardSQLFunction("variance", StandardBasicTypes.DOUBLE) );
registerFunction( "random", new NoArgSQLFunction("random", StandardBasicTypes.DOUBLE) );
registerFunction( "rand", new NoArgSQLFunction("random", StandardBasicTypes.DOUBLE) );
registerFunction( "round", new StandardSQLFunction("round") );
registerFunction( "trunc", new StandardSQLFunction("trunc") );
@ -118,7 +119,7 @@ public class PostgreSQL81Dialect extends Dialect {
registerFunction( "to_ascii", new StandardSQLFunction("to_ascii") );
registerFunction( "quote_ident", new StandardSQLFunction("quote_ident", StandardBasicTypes.STRING) );
registerFunction( "quote_literal", new StandardSQLFunction("quote_literal", StandardBasicTypes.STRING) );
registerFunction( "md5", new StandardSQLFunction("md5") );
registerFunction( "md5", new StandardSQLFunction("md5", StandardBasicTypes.STRING) );
registerFunction( "ascii", new StandardSQLFunction("ascii", StandardBasicTypes.INTEGER) );
registerFunction( "char_length", new StandardSQLFunction("char_length", StandardBasicTypes.LONG) );
registerFunction( "bit_length", new StandardSQLFunction("bit_length", StandardBasicTypes.LONG) );
@ -354,7 +355,7 @@ public class PostgreSQL81Dialect extends Dialect {
private static ViolatedConstraintNameExtracter EXTRACTER = new TemplatedViolatedConstraintNameExtracter() {
public String extractConstraintName(SQLException sqle) {
try {
int sqlState = Integer.valueOf( JdbcExceptionHelper.extractSqlState( sqle )).intValue();
int sqlState = Integer.valueOf( JdbcExceptionHelper.extractSqlState( sqle ) );
switch (sqlState) {
// CHECK VIOLATION
case 23514: return extractUsingTemplate("violates check constraint \"","\"", sqle.getMessage());

View File

@ -64,7 +64,6 @@ public class SQLServer2005Dialect extends SQLServerDialect {
registerColumnType( Types.BIGINT, "bigint" );
registerColumnType( Types.BIT, "bit" );
registerColumnType( Types.BOOLEAN, "bit" );
registerFunction( "row_number", new NoArgSQLFunction( "row_number", StandardBasicTypes.INTEGER, true ) );

View File

@ -47,6 +47,7 @@ public class SQLServerDialect extends AbstractTransactSQLDialect {
registerColumnType( Types.VARBINARY, 8000, "varbinary($l)" );
registerColumnType( Types.LONGVARBINARY, "image" );
registerColumnType( Types.LONGVARCHAR, "text" );
registerColumnType( Types.BOOLEAN, "bit" );
registerFunction( "second", new SQLFunctionTemplate( StandardBasicTypes.INTEGER, "datepart(second, ?1)" ) );
registerFunction( "minute", new SQLFunctionTemplate( StandardBasicTypes.INTEGER, "datepart(minute, ?1)" ) );
@ -133,6 +134,8 @@ public class SQLServerDialect extends AbstractTransactSQLDialect {
return tableName + " with (updlock, rowlock)";
case PESSIMISTIC_READ:
return tableName + " with (holdlock, rowlock)";
case UPGRADE_SKIPLOCKED:
return tableName + " with (updlock, rowlock, readpast)";
default:
return tableName;
}

View File

@ -26,6 +26,7 @@ package org.hibernate.dialect;
import java.sql.Types;
import org.hibernate.type.descriptor.sql.BlobTypeDescriptor;
import org.hibernate.type.descriptor.sql.ClobTypeDescriptor;
import org.hibernate.type.descriptor.sql.SqlTypeDescriptor;
@ -48,6 +49,19 @@ public class SybaseDialect extends AbstractTransactSQLDialect {
@Override
protected SqlTypeDescriptor getSqlTypeDescriptorOverride(int sqlCode) {
return sqlCode == Types.BLOB ? BlobTypeDescriptor.PRIMITIVE_ARRAY_BINDING : super.getSqlTypeDescriptorOverride( sqlCode );
switch (sqlCode) {
case Types.BLOB:
return BlobTypeDescriptor.PRIMITIVE_ARRAY_BINDING;
case Types.CLOB:
// Some Sybase drivers cannot support getClob. See HHH-7889
return ClobTypeDescriptor.STREAM_BINDING_EXTRACTING;
default:
return super.getSqlTypeDescriptorOverride( sqlCode );
}
}
@Override
public String getNullColumnString() {
return " null";
}
}

View File

@ -22,6 +22,7 @@
* Boston, MA 02110-1301 USA
*/
package org.hibernate.dialect.function;
import java.util.HashMap;
import java.util.Map;
@ -33,8 +34,7 @@ public class SQLFunctionRegistry {
public SQLFunctionRegistry(Dialect dialect, Map<String, SQLFunction> userFunctions) {
this.dialect = dialect;
this.userFunctions = new HashMap<String, SQLFunction>();
this.userFunctions.putAll( userFunctions );
this.userFunctions = new HashMap<String, SQLFunction>( userFunctions );
}
public SQLFunction findSQLFunction(String functionName) {
@ -42,7 +42,7 @@ public class SQLFunctionRegistry {
SQLFunction userFunction = userFunctions.get( name );
return userFunction != null
? userFunction
: (SQLFunction) dialect.getFunctions().get( name );
: dialect.getFunctions().get( name );
}
public boolean hasFunction(String functionName) {

View File

@ -84,7 +84,7 @@ public class TemplateRenderer {
chunks = chunkList.toArray( new String[chunkList.size()] );
paramIndexes = new int[paramList.size()];
for ( int i = 0; i < paramIndexes.length; ++i ) {
paramIndexes[i] = paramList.get( i ).intValue();
paramIndexes[i] = paramList.get( i );
}
}

View File

@ -54,11 +54,18 @@ public abstract class AbstractSelectLockingStrategy implements LockingStrategy {
protected abstract String generateLockString(int lockTimeout);
protected String determineSql(int timeout) {
return timeout == LockOptions.WAIT_FOREVER
? waitForeverSql
: timeout == LockOptions.NO_WAIT
? getNoWaitSql()
: generateLockString( timeout );
if ( timeout == LockOptions.WAIT_FOREVER) {
return waitForeverSql;
}
else if ( timeout == LockOptions.NO_WAIT) {
return getNoWaitSql();
}
else if ( timeout == LockOptions.SKIP_LOCKED) {
return getSkipLockedSql();
}
else {
return generateLockString( timeout );
}
}
private String noWaitSql;
@ -69,4 +76,13 @@ public abstract class AbstractSelectLockingStrategy implements LockingStrategy {
}
return noWaitSql;
}
private String skipLockedSql;
public String getSkipLockedSql() {
if ( skipLockedSql == null ) {
skipLockedSql = generateLockString( LockOptions.SKIP_LOCKED );
}
return skipLockedSql;
}
}

View File

@ -85,7 +85,7 @@ public class PessimisticReadSelectLockingStrategy extends AbstractSelectLockingS
);
}
ResultSet rs = st.executeQuery();
ResultSet rs = session.getTransactionCoordinator().getJdbcCoordinator().getResultSetReturn().extract( st );
try {
if ( !rs.next() ) {
if ( factory.getStatistics().isStatisticsEnabled() ) {
@ -96,11 +96,11 @@ public class PessimisticReadSelectLockingStrategy extends AbstractSelectLockingS
}
}
finally {
rs.close();
session.getTransactionCoordinator().getJdbcCoordinator().release( rs );
}
}
finally {
st.close();
session.getTransactionCoordinator().getJdbcCoordinator().release( st );
}
}

View File

@ -104,7 +104,7 @@ public class PessimisticReadUpdateLockingStrategy implements LockingStrategy {
lockable.getVersionType().nullSafeSet( st, version, offset, session );
}
int affected = st.executeUpdate();
int affected = session.getTransactionCoordinator().getJdbcCoordinator().getResultSetReturn().executeUpdate( st );
if ( affected < 0 ) { // todo: should this instead check for exactly one row modified?
if (factory.getStatistics().isStatisticsEnabled()) {
factory.getStatisticsImplementor().optimisticFailure( lockable.getEntityName() );
@ -114,7 +114,7 @@ public class PessimisticReadUpdateLockingStrategy implements LockingStrategy {
}
finally {
st.close();
session.getTransactionCoordinator().getJdbcCoordinator().release( st );
}
}

View File

@ -84,7 +84,7 @@ public class PessimisticWriteSelectLockingStrategy extends AbstractSelectLocking
);
}
ResultSet rs = st.executeQuery();
ResultSet rs = session.getTransactionCoordinator().getJdbcCoordinator().getResultSetReturn().extract( st );
try {
if ( !rs.next() ) {
if ( factory.getStatistics().isStatisticsEnabled() ) {
@ -95,11 +95,11 @@ public class PessimisticWriteSelectLockingStrategy extends AbstractSelectLocking
}
}
finally {
rs.close();
session.getTransactionCoordinator().getJdbcCoordinator().release( rs );
}
}
finally {
st.close();
session.getTransactionCoordinator().getJdbcCoordinator().release( st );
}
}
catch ( SQLException e ) {

View File

@ -103,7 +103,7 @@ public class PessimisticWriteUpdateLockingStrategy implements LockingStrategy {
lockable.getVersionType().nullSafeSet( st, version, offset, session );
}
int affected = st.executeUpdate();
int affected = session.getTransactionCoordinator().getJdbcCoordinator().getResultSetReturn().executeUpdate( st );
if ( affected < 0 ) { // todo: should this instead check for exactly one row modified?
if (factory.getStatistics().isStatisticsEnabled()) {
factory.getStatisticsImplementor().optimisticFailure( lockable.getEntityName() );
@ -113,7 +113,7 @@ public class PessimisticWriteUpdateLockingStrategy implements LockingStrategy {
}
finally {
st.close();
session.getTransactionCoordinator().getJdbcCoordinator().release( st );
}
}
catch ( SQLException e ) {

View File

@ -85,7 +85,7 @@ public class SelectLockingStrategy extends AbstractSelectLockingStrategy {
);
}
ResultSet rs = st.executeQuery();
ResultSet rs = session.getTransactionCoordinator().getJdbcCoordinator().getResultSetReturn().extract( st );
try {
if ( !rs.next() ) {
if ( factory.getStatistics().isStatisticsEnabled() ) {
@ -96,11 +96,11 @@ public class SelectLockingStrategy extends AbstractSelectLockingStrategy {
}
}
finally {
rs.close();
session.getTransactionCoordinator().getJdbcCoordinator().release( rs );
}
}
finally {
st.close();
session.getTransactionCoordinator().getJdbcCoordinator().release( st );
}
}

View File

@ -106,7 +106,7 @@ public class UpdateLockingStrategy implements LockingStrategy {
lockable.getVersionType().nullSafeSet( st, version, offset, session );
}
int affected = st.executeUpdate();
int affected = session.getTransactionCoordinator().getJdbcCoordinator().getResultSetReturn().executeUpdate( st );
if ( affected < 0 ) {
if (factory.getStatistics().isStatisticsEnabled()) {
factory.getStatisticsImplementor().optimisticFailure( lockable.getEntityName() );
@ -116,7 +116,7 @@ public class UpdateLockingStrategy implements LockingStrategy {
}
finally {
st.close();
session.getTransactionCoordinator().getJdbcCoordinator().release( st );
}
}

View File

@ -121,10 +121,12 @@ public class SQLServer2005LimitHandler extends AbstractLimitHandler {
/**
* Adds missing aliases in provided SELECT clause and returns coma-separated list of them.
* If query takes advantage of expressions like {@literal *} or {@literal {table}.*} inside SELECT clause,
* method returns {@literal *}.
*
* @param sb SQL query.
*
* @return List of aliases separated with comas.
* @return List of aliases separated with comas or {@literal *}.
*/
protected String fillAliasInSelectClause(StringBuilder sb) {
final List<String> aliases = new LinkedList<String>();
@ -133,6 +135,7 @@ public class SQLServer2005LimitHandler extends AbstractLimitHandler {
int nextComa = startPos;
int prevComa = startPos;
int unique = 0;
boolean selectsMultipleColumns = false;
while ( nextComa != -1 ) {
prevComa = nextComa;
@ -142,30 +145,51 @@ public class SQLServer2005LimitHandler extends AbstractLimitHandler {
}
if ( nextComa != -1 ) {
String expression = sb.substring( prevComa, nextComa );
String alias = getAlias( expression );
if ( alias == null ) {
// Inserting alias. It is unlikely that we would have to add alias, but just in case.
alias = StringHelper.generateAlias( "page", unique );
sb.insert( nextComa, " as " + alias );
++unique;
nextComa += ( " as " + alias ).length();
if ( selectsMultipleColumns( expression ) ) {
selectsMultipleColumns = true;
}
else {
String alias = getAlias( expression );
if ( alias == null ) {
// Inserting alias. It is unlikely that we would have to add alias, but just in case.
alias = StringHelper.generateAlias( "page", unique );
sb.insert( nextComa, " as " + alias );
++unique;
nextComa += ( " as " + alias ).length();
}
aliases.add( alias );
}
aliases.add( alias );
++nextComa;
}
}
// Processing last column.
endPos = shallowIndexOfWord( sb, FROM, startPos ); // Refreshing end position, because we might have inserted new alias.
String expression = sb.substring( prevComa, endPos );
String alias = getAlias( expression );
if ( alias == null ) {
// Inserting alias. It is unlikely that we would have to add alias, but just in case.
alias = StringHelper.generateAlias( "page", unique );
sb.insert( endPos - 1, " as " + alias );
if ( selectsMultipleColumns( expression ) ) {
selectsMultipleColumns = true;
}
else {
String alias = getAlias( expression );
if ( alias == null ) {
// Inserting alias. It is unlikely that we would have to add alias, but just in case.
alias = StringHelper.generateAlias( "page", unique );
sb.insert( endPos - 1, " as " + alias );
}
aliases.add( alias );
}
aliases.add( alias );
return StringHelper.join( ", ", aliases.iterator() );
// In case of '*' or '{table}.*' expressions adding an alias breaks SQL syntax, returning '*'.
return selectsMultipleColumns ? "*" : StringHelper.join( ", ", aliases.iterator() );
}
/**
* @param expression Select expression.
*
* @return {@code true} when expression selects multiple columns, {@code false} otherwise.
*/
private boolean selectsMultipleColumns(String expression) {
final String lastExpr = expression.trim().replaceFirst( "(?i)(.)*\\s", "" );
return "*".equals( lastExpr ) || lastExpr.endsWith( ".*" );
}
/**
@ -179,7 +203,9 @@ public class SQLServer2005LimitHandler extends AbstractLimitHandler {
private String getAlias(String expression) {
Matcher matcher = ALIAS_PATTERN.matcher( expression );
if ( matcher.find() ) {
return matcher.group( 0 ).replaceFirst( "(?i)\\sas\\s", "" ).trim();
// Taking advantage of Java regular expressions greedy behavior while extracting the last AS keyword.
// Note that AS keyword can appear in CAST operator, e.g. 'cast(tab1.col1 as varchar(255)) as col1'.
return matcher.group( 0 ).replaceFirst( "(?i)(.)*\\sas\\s", "" ).trim();
}
return null;
}

View File

@ -45,7 +45,7 @@ public class DB2UniqueDelegate extends DefaultUniqueDelegate {
if ( hasNullable( uniqueKey ) ) {
return org.hibernate.mapping.Index.buildSqlCreateIndexString(
dialect, uniqueKey.getName(), uniqueKey.getTable(),
uniqueKey.columnIterator(), true, defaultCatalog,
uniqueKey.columnIterator(), uniqueKey.getColumnOrderMap(), true, defaultCatalog,
defaultSchema );
} else {
return super.applyUniquesOnAlter(
@ -110,9 +110,9 @@ public class DB2UniqueDelegate extends DefaultUniqueDelegate {
}
private boolean hasNullable( org.hibernate.mapping.UniqueKey uniqueKey ) {
Iterator iter = uniqueKey.getColumnIterator();
Iterator<org.hibernate.mapping.Column> iter = uniqueKey.columnIterator();
while ( iter.hasNext() ) {
if ( ( ( org.hibernate.mapping.Column ) iter.next() ).isNullable() ) {
if ( iter.next().isNullable() ) {
return true;
}
}
@ -120,9 +120,8 @@ public class DB2UniqueDelegate extends DefaultUniqueDelegate {
}
private boolean hasNullable( UniqueKey uniqueKey ) {
Iterator iter = uniqueKey.getColumns().iterator();
while ( iter.hasNext() ) {
if ( ( ( Column ) iter.next() ).isNullable() ) {
for ( Column column : uniqueKey.getColumns() ) {
if ( column.isNullable() ) {
return true;
}
}

View File

@ -115,11 +115,14 @@ public class DefaultUniqueDelegate implements UniqueDelegate {
public String uniqueConstraintSql( org.hibernate.mapping.UniqueKey uniqueKey ) {
StringBuilder sb = new StringBuilder();
sb.append( " unique (" );
Iterator columnIterator = uniqueKey.getColumnIterator();
Iterator<org.hibernate.mapping.Column> columnIterator = uniqueKey.columnIterator();
while ( columnIterator.hasNext() ) {
org.hibernate.mapping.Column column
= (org.hibernate.mapping.Column) columnIterator.next();
= columnIterator.next();
sb.append( column.getQuotedName( dialect ) );
if ( uniqueKey.getColumnOrderMap().containsKey( column ) ) {
sb.append( " " ).append( uniqueKey.getColumnOrderMap().get( column ) );
}
if ( columnIterator.hasNext() ) {
sb.append( ", " );
}

View File

@ -255,13 +255,17 @@ public final class Cascade {
final EntityEntry valueEntry = eventSource
.getPersistenceContext().getEntry(
loadedValue );
final String entityName = valueEntry.getPersister().getEntityName();
if ( LOG.isTraceEnabled() ) {
final Serializable id = valueEntry.getPersister().getIdentifier( loadedValue, eventSource );
final String description = MessageHelper.infoString( entityName, id );
LOG.tracev( "Deleting orphaned entity instance: {0}", description );
// Need to check this in case the context has
// already been flushed. See HHH-7829.
if ( valueEntry != null ) {
final String entityName = valueEntry.getPersister().getEntityName();
if ( LOG.isTraceEnabled() ) {
final Serializable id = valueEntry.getPersister().getIdentifier( loadedValue, eventSource );
final String description = MessageHelper.infoString( entityName, id );
LOG.tracev( "Deleting orphaned entity instance: {0}", description );
}
eventSource.delete( entityName, loadedValue, false, new HashSet() );
}
eventSource.delete( entityName, loadedValue, false, new HashSet() );
}
}
}

View File

@ -0,0 +1,422 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2013, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.engine.internal;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.IdentityHashMap;
import java.util.Map;
import org.jboss.logging.Logger;
import org.hibernate.AssertionFailure;
import org.hibernate.LockMode;
import org.hibernate.engine.spi.EntityEntry;
import org.hibernate.engine.spi.ManagedEntity;
/**
* Defines a context for maintaining the relation between an entity associated with the Session ultimately owning this
* EntityEntryContext instance and that entity's corresponding EntityEntry. 2 approaches are supported:<ul>
* <li>
* the entity->EntityEntry association is maintained in a Map within this class
* </li>
* <li>
* the EntityEntry is injected into the entity via it implementing the {@link org.hibernate.engine.spi.ManagedEntity} contract,
* either directly or through bytecode enhancement.
* </li>
* </ul>
* <p/>
*
* @author Steve Ebersole
*/
public class EntityEntryContext {
private static final Logger log = Logger.getLogger( EntityEntryContext.class );
private transient ManagedEntity head;
private transient ManagedEntity tail;
private transient int count = 0;
private transient IdentityHashMap<Object,ManagedEntity> nonEnhancedEntityXref;
@SuppressWarnings( {"unchecked"})
private transient Map.Entry<Object,EntityEntry>[] reentrantSafeEntries = new Map.Entry[0];
private transient boolean dirty = false;
public EntityEntryContext() {
}
public void addEntityEntry(Object entity, EntityEntry entityEntry) {
// IMPORTANT!!!!!
// add is called more than once of some entities. In such cases the first
// call is simply setting up a "marker" to avoid infinite looping from reentrancy
//
// any addition (even the double one described above) should invalidate the cross-ref array
dirty = true;
// determine the appropriate ManagedEntity instance to use based on whether the entity is enhanced or not.
// also track whether the entity was already associated with the context
final ManagedEntity managedEntity;
final boolean alreadyAssociated;
if ( ManagedEntity.class.isInstance( entity ) ) {
managedEntity = (ManagedEntity) entity;
alreadyAssociated = managedEntity.$$_hibernate_getEntityEntry() != null;
}
else {
ManagedEntity wrapper = null;
if ( nonEnhancedEntityXref == null ) {
nonEnhancedEntityXref = new IdentityHashMap<Object, ManagedEntity>();
}
else {
wrapper = nonEnhancedEntityXref.get( entity );
}
if ( wrapper == null ) {
wrapper = new ManagedEntityImpl( entity );
nonEnhancedEntityXref.put( entity, wrapper );
alreadyAssociated = false;
}
else {
alreadyAssociated = true;
}
managedEntity = wrapper;
}
// associate the EntityEntry with the entity
managedEntity.$$_hibernate_setEntityEntry( entityEntry );
if ( alreadyAssociated ) {
// if the entity was already associated with the context, skip the linking step.
return;
}
// finally, set up linking and count
if ( tail == null ) {
assert head == null;
head = managedEntity;
tail = head;
count = 1;
}
else {
tail.$$_hibernate_setNextManagedEntity( managedEntity );
managedEntity.$$_hibernate_setPreviousManagedEntity( tail );
tail = managedEntity;
count++;
}
}
public boolean hasEntityEntry(Object entity) {
return getEntityEntry( entity ) != null;
}
public EntityEntry getEntityEntry(Object entity) {
final ManagedEntity managedEntity;
if ( ManagedEntity.class.isInstance( entity ) ) {
managedEntity = (ManagedEntity) entity;
}
else if ( nonEnhancedEntityXref == null ) {
managedEntity = null;
}
else {
managedEntity = nonEnhancedEntityXref.get( entity );
}
return managedEntity == null
? null
: managedEntity.$$_hibernate_getEntityEntry();
}
public EntityEntry removeEntityEntry(Object entity) {
dirty = true;
final ManagedEntity managedEntity;
if ( ManagedEntity.class.isInstance( entity ) ) {
managedEntity = (ManagedEntity) entity;
}
else if ( nonEnhancedEntityXref == null ) {
managedEntity = null;
}
else {
managedEntity = nonEnhancedEntityXref.remove( entity );
}
if ( managedEntity == null ) {
return null;
}
// prepare for re-linking...
ManagedEntity previous = managedEntity.$$_hibernate_getPreviousManagedEntity();
ManagedEntity next = managedEntity.$$_hibernate_getNextManagedEntity();
managedEntity.$$_hibernate_setPreviousManagedEntity( null );
managedEntity.$$_hibernate_setNextManagedEntity( null );
count--;
if ( count == 0 ) {
// handle as a special case...
head = null;
tail = null;
assert previous == null;
assert next == null;
}
else {
// otherwise, previous or next (or both) should be non-null
if ( previous == null ) {
// we are removing head
assert managedEntity == head;
head = next;
}
else {
previous.$$_hibernate_setNextManagedEntity( next );
}
if ( next == null ) {
// we are removing tail
assert managedEntity == tail;
tail = previous;
}
else {
next.$$_hibernate_setPreviousManagedEntity( previous );
}
}
EntityEntry theEntityEntry = managedEntity.$$_hibernate_getEntityEntry();
managedEntity.$$_hibernate_setEntityEntry( null );
return theEntityEntry;
}
public Map.Entry<Object, EntityEntry>[] reentrantSafeEntityEntries() {
if ( dirty ) {
reentrantSafeEntries = new EntityEntryCrossRefImpl[count];
int i = 0;
ManagedEntity managedEntity = head;
while ( managedEntity != null ) {
reentrantSafeEntries[i++] = new EntityEntryCrossRefImpl(
managedEntity.$$_hibernate_getEntityInstance(),
managedEntity.$$_hibernate_getEntityEntry()
);
managedEntity = managedEntity.$$_hibernate_getNextManagedEntity();
}
dirty = false;
}
return reentrantSafeEntries;
}
public void clear() {
dirty = true;
ManagedEntity node = head;
while ( node != null ) {
final ManagedEntity nextNode = node.$$_hibernate_getNextManagedEntity();
node.$$_hibernate_setEntityEntry( null );
node.$$_hibernate_setPreviousManagedEntity( null );
node.$$_hibernate_setNextManagedEntity( null );
node = nextNode;
}
if ( nonEnhancedEntityXref != null ) {
nonEnhancedEntityXref.clear();
}
head = null;
tail = null;
count = 0;
reentrantSafeEntries = null;
}
public void downgradeLocks() {
if ( head == null ) {
return;
}
ManagedEntity node = head;
while ( node != null ) {
node.$$_hibernate_getEntityEntry().setLockMode( LockMode.NONE );
node = node.$$_hibernate_getNextManagedEntity();
}
}
public void serialize(ObjectOutputStream oos) throws IOException {
log.tracef( "Starting serialization of [%s] EntityEntry entries", count );
oos.writeInt( count );
if ( count == 0 ) {
return;
}
ManagedEntity managedEntity = head;
while ( managedEntity != null ) {
// so we know whether or not to build a ManagedEntityImpl on deserialize
oos.writeBoolean( managedEntity == managedEntity.$$_hibernate_getEntityInstance() );
oos.writeObject( managedEntity.$$_hibernate_getEntityInstance() );
managedEntity.$$_hibernate_getEntityEntry().serialize( oos );
managedEntity = managedEntity.$$_hibernate_getNextManagedEntity();
}
}
public static EntityEntryContext deserialize(ObjectInputStream ois, StatefulPersistenceContext rtn) throws IOException, ClassNotFoundException {
final int count = ois.readInt();
log.tracef( "Starting deserialization of [%s] EntityEntry entries", count );
final EntityEntryContext context = new EntityEntryContext();
context.count = count;
context.dirty = true;
if ( count == 0 ) {
return context;
}
ManagedEntity previous = null;
for ( int i = 0; i < count; i++ ) {
final boolean isEnhanced = ois.readBoolean();
final Object entity = ois.readObject();
final EntityEntry entry = EntityEntry.deserialize( ois, rtn );
final ManagedEntity managedEntity;
if ( isEnhanced ) {
managedEntity = (ManagedEntity) entity;
}
else {
managedEntity = new ManagedEntityImpl( entity );
if ( context.nonEnhancedEntityXref == null ) {
context.nonEnhancedEntityXref = new IdentityHashMap<Object, ManagedEntity>();
}
context.nonEnhancedEntityXref.put( entity, managedEntity );
}
managedEntity.$$_hibernate_setEntityEntry( entry );
if ( previous == null ) {
context.head = managedEntity;
}
else {
previous.$$_hibernate_setNextManagedEntity( managedEntity );
managedEntity.$$_hibernate_setPreviousManagedEntity( previous );
}
previous = managedEntity;
}
context.tail = previous;
return context;
}
public int getNumberOfManagedEntities() {
return count;
}
private static class ManagedEntityImpl implements ManagedEntity {
private final Object entityInstance;
private EntityEntry entityEntry;
private ManagedEntity previous;
private ManagedEntity next;
public ManagedEntityImpl(Object entityInstance) {
this.entityInstance = entityInstance;
}
@Override
public Object $$_hibernate_getEntityInstance() {
return entityInstance;
}
@Override
public EntityEntry $$_hibernate_getEntityEntry() {
return entityEntry;
}
@Override
public void $$_hibernate_setEntityEntry(EntityEntry entityEntry) {
this.entityEntry = entityEntry;
}
@Override
public ManagedEntity $$_hibernate_getNextManagedEntity() {
return next;
}
@Override
public void $$_hibernate_setNextManagedEntity(ManagedEntity next) {
this.next = next;
}
@Override
public ManagedEntity $$_hibernate_getPreviousManagedEntity() {
return previous;
}
@Override
public void $$_hibernate_setPreviousManagedEntity(ManagedEntity previous) {
this.previous = previous;
}
}
private static class EntityEntryCrossRefImpl implements EntityEntryCrossRef {
private final Object entity;
private EntityEntry entityEntry;
private EntityEntryCrossRefImpl(Object entity, EntityEntry entityEntry) {
this.entity = entity;
this.entityEntry = entityEntry;
}
@Override
public Object getEntity() {
return entity;
}
@Override
public EntityEntry getEntityEntry() {
return entityEntry;
}
@Override
public Object getKey() {
return getEntity();
}
@Override
public EntityEntry getValue() {
return getEntityEntry();
}
@Override
public EntityEntry setValue(EntityEntry entityEntry) {
final EntityEntry old = this.entityEntry;
this.entityEntry = entityEntry;
return old;
}
}
public static interface EntityEntryCrossRef extends Map.Entry<Object,EntityEntry> {
public Object getEntity();
public EntityEntry getEntityEntry();
}
}

Some files were not shown because too many files have changed in this diff Show More