mirror of
https://github.com/hibernate/hibernate-orm
synced 2025-03-03 08:19:15 +00:00
Migrate User Guide from Docbook to Asciidoctor
This commit is contained in:
parent
446aa84429
commit
c2ece0108e
@ -35,7 +35,7 @@ configurations {
|
||||
asciidoclet {
|
||||
description = 'Dependencies for Asciidoclet (the javadoc doclet tool for using Asciidoc)'
|
||||
}
|
||||
// asciidoctor
|
||||
//asciidoctor
|
||||
}
|
||||
|
||||
if ( JavaVersion.current().isJava8Compatible() ) {
|
||||
@ -231,6 +231,48 @@ task renderGettingStartedGuides(type: AsciidoctorTask, group: 'Documentation') {
|
||||
attributes icons: 'font', experimental: true, 'source-highlighter': 'prettify'
|
||||
}
|
||||
|
||||
// Mapping Guides ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
task renderMappingGuide(type: AsciidoctorTask, group: 'Documentation') {
|
||||
description = 'Renders the Mapping Guides in HTML format using Asciidoctor.'
|
||||
sourceDir = file( 'src/main/asciidoc/mapping' )
|
||||
outputDir = new File("$buildDir/asciidoc/mapping/html")
|
||||
backends "html5"
|
||||
separateOutputDirs false
|
||||
options logDocuments: true
|
||||
//attributes icons: 'font', experimental: true, 'source-highlighter': 'prettify', linkcss: true, stylesheet: "css/hibernate.css"
|
||||
attributes icons: 'font', experimental: true, 'source-highlighter': 'prettify', linkcss: true
|
||||
resources {
|
||||
from('src/main/asciidoc/') {
|
||||
include 'images/**'
|
||||
include 'css/**'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// User Guides ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
task renderUserGuide(type: AsciidoctorTask, group: 'Documentation') {
|
||||
description = 'Renders the User Guides in HTML format using Asciidoctor.'
|
||||
sourceDir = file( 'src/main/asciidoc/userguide' )
|
||||
outputDir = new File("$buildDir/asciidoc/userguide/html")
|
||||
backends "html5"
|
||||
separateOutputDirs false
|
||||
options logDocuments: true
|
||||
//attributes icons: 'font', experimental: true, 'source-highlighter': 'prettify', linkcss: true, stylesheet: "css/hibernate.css"
|
||||
attributes icons: 'font', experimental: true, 'source-highlighter': 'prettify', linkcss: true
|
||||
resources {
|
||||
from('src/main/asciidoc/') {
|
||||
include 'images/**'
|
||||
include 'css/**'
|
||||
}
|
||||
from('src/main/asciidoc/userguide/') {
|
||||
include 'images/**'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
buildDocs.dependsOn renderGettingStartedGuides
|
||||
|
||||
task buildTutorialZip(type: Zip) {
|
||||
|
@ -31,7 +31,7 @@ initial revision as well as the updated revision. A revision refers to a histor
|
||||
|
||||
|
||||
[[hibernate-gsg-tutorial-envers-test-api]]
|
||||
.Using the org.hibernate.envers.AuditReader
|
||||
.Using the `org.hibernate.envers.AuditReader`
|
||||
====
|
||||
[source, JAVA]
|
||||
----
|
||||
|
@ -145,7 +145,7 @@ start-up of an application and closed at the end of the application lifecycle.
|
||||
|
||||
|
||||
[[hibernate-gsg-tutorial-basic-test-setUp]]
|
||||
.Obtaining the org.hibernate.SessionFactory
|
||||
.Obtaining the `org.hibernate.SessionFactory`
|
||||
====
|
||||
[source, JAVA]
|
||||
----
|
||||
|
@ -31,7 +31,7 @@ Configuration cfg = new Configuration()
|
||||
.addResource("Bid.hbm.xml")
|
||||
|
||||
// calls addResource using "/org/hibernate/auction/User.hbm.xml"
|
||||
.addClass(org.hibernate.auction.User.class)
|
||||
.addClass(`org.hibernate.auction.User.class`)
|
||||
|
||||
// parses Address class for mapping annotations
|
||||
.addAnnotatedClass( Address.class )
|
||||
|
@ -611,4 +611,4 @@ StandardServiceRegistry serviceRegistry = new StandardServiceRegistryBuilder()
|
||||
|
||||
== Conclusion
|
||||
|
||||
Blah, blah, blah...
|
||||
TODO
|
@ -0,0 +1,6 @@
|
||||
== References
|
||||
|
||||
[bibliography]
|
||||
- [[[PoEAA]]] Martin Fowler. Patterns of Enterprise Application Architecture.
|
||||
Addison-Wesley Publishing Company. 2003.
|
||||
- [[[JPwH]]] Christian Bauer & Gavin King. http://www.manning.com/bauer2[Java Persistence with Hibernate]. Manning Publications Co. 2007.
|
@ -0,0 +1,33 @@
|
||||
= Hibernate User Guide
|
||||
:toc:
|
||||
:toclevels: 3
|
||||
|
||||
include::Preface.adoc[]
|
||||
|
||||
:numbered:
|
||||
|
||||
include::chapters/architecture/Architecture.adoc[]
|
||||
include::chapters/domain/DomainModel.adoc[]
|
||||
include::chapters/bootstrap/Bootstrap.adoc[]
|
||||
include::chapters/pc/PersistenceContext.adoc[]
|
||||
include::chapters/jdbc/Database_Access.adoc[]
|
||||
include::chapters/transactions/Transactions.adoc[]
|
||||
include::chapters/jndi/JNDI.adoc[]
|
||||
include::chapters/locking/Locking.adoc[]
|
||||
include::chapters/fetching/Fetching.adoc[]
|
||||
include::chapters/batch/Batching.adoc[]
|
||||
include::chapters/caching/Caching.adoc[]
|
||||
include::chapters/events/Events.adoc[]
|
||||
include::chapters/query-hql/HQL.adoc[]
|
||||
include::chapters/query-criteria/Criteria.adoc[]
|
||||
include::chapters/query-native/Native.adoc[]
|
||||
include::chapters/multitenancy/Multi_Tenancy.adoc[]
|
||||
include::chapters/osgi/OSGi.adoc[]
|
||||
include::chapters/envers/Envers.adoc[]
|
||||
include::chapters/portability/Portability.adoc[]
|
||||
|
||||
include::appendices/Legacy_Bootstrap.adoc[]
|
||||
include::appendices/Legacy_Criteria.adoc[]
|
||||
|
||||
include::Bibliography.adoc[]
|
||||
|
60
documentation/src/main/asciidoc/userguide/Preface.adoc
Normal file
60
documentation/src/main/asciidoc/userguide/Preface.adoc
Normal file
@ -0,0 +1,60 @@
|
||||
[[preface]]
|
||||
== Preface
|
||||
|
||||
Developing Object-Oriented software that deals with data from Relational
|
||||
Databases can be cumbersome and resource consuming. Development costs
|
||||
are significantly higher due to a paradigm mismatch between how data is
|
||||
represented in objects versus relational databases. Hibernate is an
|
||||
Object/Relational Mapping (ORM) solution for Java environments. ORM
|
||||
refers to the technique of mapping data between an object model
|
||||
representation to a relational data model representation. See
|
||||
http://en.wikipedia.org/wiki/Object-relational_mapping[Wikipedia] for a
|
||||
good high-level discussion. Also, Martin Fowler's
|
||||
http://martinfowler.com/bliki/OrmHate.html[OrmHate] article takes a look
|
||||
at many of the mentioned mismatch problems.
|
||||
|
||||
Although having a strong background in SQL is not required to use
|
||||
Hibernate, having a basic understanding of the concepts can help you
|
||||
understand Hibernate more quickly and fully. An understanding of data
|
||||
modeling principles is especially important. Both
|
||||
http://www.agiledata.org/essays/dataModeling101.html and
|
||||
http://en.wikipedia.org/wiki/Data_modeling are good starting points for
|
||||
understanding these data modeling principles.
|
||||
|
||||
Understanding the basics of transactions and design patterns such as
|
||||
"Unit of Work" <<Bibliography.adoc#PoEAA,PoEAA>> or "ApplicationTransaction" are important as well.
|
||||
These topics will be discussed in the documentation, but a prior
|
||||
understanding will certainly help.
|
||||
|
||||
Hibernate not only takes care of the mapping from Java classes to
|
||||
database tables (and from Java data types to SQL data types), but also
|
||||
provides data query and retrieval facilities. It can significantly
|
||||
reduce development time otherwise spent with manual data handling in SQL
|
||||
and JDBC. Hibernate’s design goal is to relieve the developer from 95%
|
||||
of common data persistence-related programming tasks by eliminating the
|
||||
need for manual, hand-crafted data processing using SQL and JDBC.
|
||||
However, unlike many other persistence solutions, Hibernate does not
|
||||
hide the power of SQL from you and guarantees that your investment in
|
||||
relational technology and knowledge is as valid as always.
|
||||
|
||||
Hibernate may not be the best solution for data-centric applications
|
||||
that only use stored-procedures to implement the business logic in the
|
||||
database, it is most useful with object-oriented domain models and
|
||||
business logic in the Java-based middle-tier. However, Hibernate can
|
||||
certainly help you to remove or encapsulate vendor-specific SQL code and
|
||||
will help with the common task of result set translation from a tabular
|
||||
representation to a graph of objects.
|
||||
|
||||
See http://hibernate.org/orm/contribute/ for information on getting
|
||||
involved.
|
||||
|
||||
|
||||
[TIP]
|
||||
====
|
||||
If you are just getting started with using Hibernate you may want to
|
||||
start with the Hibernate Getting Started Guide available from the
|
||||
http://hibernate.org/orm/documentation[documentation page]. It contains
|
||||
quick-start style tutorials as well as lots of introductory information.
|
||||
There is also a series of topical guides providing deep dives into
|
||||
various topics.
|
||||
====
|
@ -0,0 +1,78 @@
|
||||
[[appendix-legacy-bootstrap]]
|
||||
== Legacy Bootstrapping
|
||||
|
||||
The legacy way to bootstrap a SessionFactory is via the `org.hibernate.cfg.Configuration` object.
|
||||
`Configuration` represents, essentially, a single point for specifying all aspects of building the `SessionFactory`: everything from settings, to mappings, to strategies, etc.
|
||||
I like to think of `Configuration` as a big pot to which we add a bunch of stuff (mappings, settings, etc) and from which we eventually get a `SessionFactory.`
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
There are some significant draw backs to this approach which led to its deprecation and the development of the new approach, which is discussed in <<chapters/bootstrap/Bootstrap.adoc#bootstrap-native,Native Bootstrapping>>.
|
||||
`Configuration` is semi-deprecated but still available for use, in a limited form that eliminates these drawbacks.
|
||||
"Under the covers", `Configuration` uses the new bootstrapping code, so the things available there as also available here in terms of auto-discovery.
|
||||
====
|
||||
|
||||
You can obtain the `Configuration` by instantiating it directly.
|
||||
You then specify mapping metadata (XML mapping documents, annotated classes) that describe your applications object model and its mapping to a SQL database.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
Configuration cfg = new Configuration()
|
||||
// addResource does a classpath resource lookup
|
||||
.addResource("Item.hbm.xml")
|
||||
.addResource("Bid.hbm.xml")
|
||||
|
||||
// calls addResource using "/org/hibernate/auction/User.hbm.xml"
|
||||
.addClass(`org.hibernate.auction.User.class`)
|
||||
|
||||
// parses Address class for mapping annotations
|
||||
.addAnnotatedClass( Address.class )
|
||||
|
||||
// reads package-level (package-info.class) annotations in the named package
|
||||
.addPackage( "org.hibernate.auction" )
|
||||
|
||||
.setProperty("hibernate.dialect", "org.hibernate.dialect.H2Dialect")
|
||||
.setProperty("hibernate.connection.datasource", "java:comp/env/jdbc/test")
|
||||
.setProperty("hibernate.order_updates", "true");
|
||||
----
|
||||
|
||||
There are other ways to specify Configuration information, including:
|
||||
|
||||
* Place a file named hibernate.properties in a root directory of the classpath
|
||||
* Pass an instance of java.util.Properties to `Configuration#setProperties`
|
||||
* Via a `hibernate.cfg.xml` file
|
||||
* System properties using java `-Dproperty=value`
|
||||
|
||||
== Migration
|
||||
|
||||
Mapping Configuration methods to the corresponding methods in the new APIs..
|
||||
|
||||
|===
|
||||
|`Configuration#addFile`|`Configuration#addFile`
|
||||
|`Configuration#add(XmlDocument)`|`Configuration#add(XmlDocument)`
|
||||
|`Configuration#addXML`|`Configuration#addXML`
|
||||
|`Configuration#addCacheableFile`|`Configuration#addCacheableFile`
|
||||
|`Configuration#addURL`|`Configuration#addURL`
|
||||
|`Configuration#addInputStream`|`Configuration#addInputStream`
|
||||
|`Configuration#addResource`|`Configuration#addResource`
|
||||
|`Configuration#addClass`|`Configuration#addClass`
|
||||
|`Configuration#addAnnotatedClass`|`Configuration#addAnnotatedClass`
|
||||
|`Configuration#addPackage`|`Configuration#addPackage`
|
||||
|`Configuration#addJar`|`Configuration#addJar`
|
||||
|`Configuration#addDirectory`|`Configuration#addDirectory`
|
||||
|`Configuration#registerTypeContributor`|`Configuration#registerTypeContributor`
|
||||
|`Configuration#registerTypeOverride`|`Configuration#registerTypeOverride`
|
||||
|`Configuration#setProperty`|`Configuration#setProperty`
|
||||
|`Configuration#setProperties`|`Configuration#setProperties`
|
||||
|`Configuration#addProperties`|`Configuration#addProperties`
|
||||
|`Configuration#setNamingStrategy`|`Configuration#setNamingStrategy`
|
||||
|`Configuration#setImplicitNamingStrategy`|`Configuration#setImplicitNamingStrategy`
|
||||
|`Configuration#setPhysicalNamingStrategy`|`Configuration#setPhysicalNamingStrategy`
|
||||
|`Configuration#configure`|`Configuration#configure`
|
||||
|`Configuration#setInterceptor`|`Configuration#setInterceptor`
|
||||
|`Configuration#setEntityNotFoundDelegate`|`Configuration#setEntityNotFoundDelegate`
|
||||
|`Configuration#setSessionFactoryObserver`|`Configuration#setSessionFactoryObserver`
|
||||
|`Configuration#setCurrentTenantIdentifierResolver`|`Configuration#setCurrentTenantIdentifierResolver`
|
||||
|===
|
||||
|
||||
|
@ -0,0 +1,487 @@
|
||||
[[appendix-legacy-criteria]]
|
||||
== Legacy Hibernate Criteria Queries
|
||||
|
||||
[IMPORTANT]
|
||||
====
|
||||
This appendix covers the legacy Hibernate `org.hibernate.Criteria` API, which should be considered deprecated.
|
||||
|
||||
New development should focus on the JPA javax.persistence.criteria.CriteriaQuery API.
|
||||
Eventually, Hibernate-specific criteria features will be ported as extensions to the JPA `javax.persistence.criteria.CriteriaQuery`.
|
||||
For details on the JPA APIs, see <<chapters/query-criteria/Criteria.adoc#criteria, Criteria>>.
|
||||
====
|
||||
|
||||
Hibernate features an intuitive, extensible criteria query API.
|
||||
|
||||
[[querycriteria-creating]]
|
||||
=== Creating a `Criteria` instance
|
||||
|
||||
The interface `org.hibernate.Criteria` represents a query against a particular persistent class.
|
||||
The `Session` is a factory for `Criteria` instances.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
Criteria crit = sess.createCriteria(Cat.class);
|
||||
crit.setMaxResults(50);
|
||||
List cats = crit.list();
|
||||
----
|
||||
|
||||
[[querycriteria-narrowing]]
|
||||
=== Narrowing the result set
|
||||
|
||||
An individual query criterion is an instance of the interface `org.hibernate.criterion.Criterion`.
|
||||
The class `org.hibernate.criterion.Restrictions` defines factory methods for obtaining certain built-in `Criterion` types.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
List cats = sess.createCriteria(Cat.class)
|
||||
.add( Restrictions.like("name", "Fritz%") )
|
||||
.add( Restrictions.between("weight", minWeight, maxWeight) )
|
||||
.list();
|
||||
----
|
||||
|
||||
Restrictions can be grouped logically.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
List cats = sess.createCriteria(Cat.class)
|
||||
.add( Restrictions.like("name", "Fritz%") )
|
||||
.add( Restrictions.or(
|
||||
Restrictions.eq( "age", new Integer(0) ),
|
||||
Restrictions.isNull("age")
|
||||
) )
|
||||
.list();
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
List cats = sess.createCriteria(Cat.class)
|
||||
.add( Restrictions.in( "name", new String[] { "Fritz", "Izi", "Pk" } ) )
|
||||
.add( Restrictions.disjunction()
|
||||
.add( Restrictions.isNull("age") )
|
||||
.add( Restrictions.eq("age", new Integer(0) ) )
|
||||
.add( Restrictions.eq("age", new Integer(1) ) )
|
||||
.add( Restrictions.eq("age", new Integer(2) ) )
|
||||
) )
|
||||
.list();
|
||||
----
|
||||
|
||||
There are a range of built-in criterion types (`Restrictions` subclasses).
|
||||
One of the most useful `Restrictions` allows you to specify SQL directly.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
List cats = sess.createCriteria(Cat.class)
|
||||
.add( Restrictions.sqlRestriction("lower({alias}.name) like lower(?)", "Fritz%", Hibernate.STRING) )
|
||||
.list();
|
||||
----
|
||||
|
||||
The `{alias}` placeholder will be replaced by the row alias of the queried entity.
|
||||
|
||||
You can also obtain a criterion from a `Property` instance.
|
||||
You can create a `Property` by calling `Property.forName()`:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
|
||||
Property age = Property.forName("age");
|
||||
List cats = sess.createCriteria(Cat.class)
|
||||
.add( Restrictions.disjunction()
|
||||
.add( age.isNull() )
|
||||
.add( age.eq( new Integer(0) ) )
|
||||
.add( age.eq( new Integer(1) ) )
|
||||
.add( age.eq( new Integer(2) ) )
|
||||
) )
|
||||
.add( Property.forName("name").in( new String[] { "Fritz", "Izi", "Pk" } ) )
|
||||
.list();
|
||||
----
|
||||
|
||||
[[querycriteria-ordering]]
|
||||
=== Ordering the results
|
||||
|
||||
You can order the results using `org.hibernate.criterion.Order`.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
List cats = sess.createCriteria(Cat.class)
|
||||
.add( Restrictions.like("name", "F%")
|
||||
.addOrder( Order.asc("name").nulls(NullPrecedence.LAST) )
|
||||
.addOrder( Order.desc("age") )
|
||||
.setMaxResults(50)
|
||||
.list();
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
List cats = sess.createCriteria(Cat.class)
|
||||
.add( Property.forName("name").like("F%") )
|
||||
.addOrder( Property.forName("name").asc() )
|
||||
.addOrder( Property.forName("age").desc() )
|
||||
.setMaxResults(50)
|
||||
.list();
|
||||
----
|
||||
|
||||
[[querycriteria-associations]]
|
||||
=== Associations
|
||||
|
||||
By navigating associations using `createCriteria()` you can specify constraints upon related entities:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
List cats = sess.createCriteria(Cat.class)
|
||||
.add( Restrictions.like("name", "F%") )
|
||||
.createCriteria("kittens")
|
||||
.add( Restrictions.like("name", "F%") )
|
||||
.list();
|
||||
----
|
||||
|
||||
The second `createCriteria()` returns a new instance of `Criteria` that refers to the elements of the `kittens` collection.
|
||||
|
||||
There is also an alternate form that is useful in certain circumstances:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
List cats = sess.createCriteria(Cat.class)
|
||||
.createAlias("kittens", "kt")
|
||||
.createAlias("mate", "mt")
|
||||
.add( Restrictions.eqProperty("kt.name", "mt.name") )
|
||||
.list();
|
||||
----
|
||||
|
||||
(`createAlias()` does not create a new instance of `Criteria`.)
|
||||
|
||||
The kittens collections held by the `Cat` instances returned by the previous two queries are _not_ pre-filtered by the criteria.
|
||||
If you want to retrieve just the kittens that match the criteria, you must use a `ResultTransformer`.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
List cats = sess.createCriteria(Cat.class)
|
||||
.createCriteria("kittens", "kt")
|
||||
.add( Restrictions.eq("name", "F%") )
|
||||
.setResultTransformer(Criteria.ALIAS_TO_ENTITY_MAP)
|
||||
.list();
|
||||
Iterator iter = cats.iterator();
|
||||
while ( iter.hasNext() ) {
|
||||
Map map = (Map) iter.next();
|
||||
Cat cat = (Cat) map.get(Criteria.ROOT_ALIAS);
|
||||
Cat kitten = (Cat) map.get("kt");
|
||||
}
|
||||
----
|
||||
|
||||
Additionally you may manipulate the result set using a left outer join:
|
||||
|
||||
[source]
|
||||
----
|
||||
List cats = session.createCriteria( Cat.class )
|
||||
.createAlias("mate", "mt", Criteria.LEFT_JOIN, Restrictions.like("mt.name", "good%") )
|
||||
.addOrder(Order.asc("mt.age"))
|
||||
.list();
|
||||
----
|
||||
|
||||
This will return all of the `Cat`s with a mate whose name starts with "good" ordered by their mate's age, and all cats who do not have a mate.
|
||||
This is useful when there is a need to order or limit in the database prior to returning complex/large result sets,
|
||||
and removes many instances where multiple queries would have to be performed and the results unioned by java in memory.
|
||||
|
||||
Without this feature, first all of the cats without a mate would need to be loaded in one query.
|
||||
|
||||
A second query would need to retrieve the cats with mates who's name started with "good" sorted by the mates age.
|
||||
|
||||
Thirdly, in memory; the lists would need to be joined manually.
|
||||
|
||||
[[querycriteria-dynamicfetching]]
|
||||
=== Dynamic association fetching
|
||||
|
||||
You can specify association fetching semantics at runtime using `setFetchMode()`.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
List cats = sess.createCriteria(Cat.class)
|
||||
.add( Restrictions.like("name", "Fritz%") )
|
||||
.setFetchMode("mate", FetchMode.EAGER)
|
||||
.setFetchMode("kittens", FetchMode.EAGER)
|
||||
.list();
|
||||
----
|
||||
|
||||
This query will fetch both `mate` and `kittens` by outer join.
|
||||
|
||||
[[querycriteria-components]]
|
||||
=== Components
|
||||
|
||||
To add a restriction against a property of an embedded component, the component property name should be prepended to the property name when creating the `Restriction`.
|
||||
The criteria object should be created on the owning entity, and cannot be created on the component itself.
|
||||
For example, suppose the `Cat` has a component property `fullName` with sub-properties `firstName` and `lastName`:
|
||||
|
||||
[source]
|
||||
----
|
||||
List cats = session.createCriteria(Cat.class)
|
||||
.add(Restrictions.eq("fullName.lastName", "Cattington"))
|
||||
.list();
|
||||
----
|
||||
|
||||
Note: this does not apply when querying collections of components, for that see below <<querycriteria-collections>>
|
||||
|
||||
[[querycriteria-collections]]
|
||||
=== Collections
|
||||
|
||||
When using criteria against collections, there are two distinct cases.
|
||||
One is if the collection contains entities (eg. `<one-to-many/>` or `<many-to-many/>`) or components (`<composite-element/>` ),
|
||||
and the second is if the collection contains scalar values (`<element/>`).
|
||||
In the first case, the syntax is as given above in the section <<querycriteria-associations>> where we restrict the `kittens` collection.
|
||||
Essentially we create a `Criteria` object against the collection property and restrict the entity or component properties using that instance.
|
||||
|
||||
For querying a collection of basic values, we still create the `Criteria` object against the collection,
|
||||
but to reference the value, we use the special property "elements".
|
||||
For an indexed collection, we can also reference the index property using the special property "indices".
|
||||
|
||||
[source]
|
||||
----
|
||||
List cats = session.createCriteria(Cat.class)
|
||||
.createCriteria("nickNames")
|
||||
.add(Restrictions.eq("elements", "BadBoy"))
|
||||
.list();
|
||||
----
|
||||
|
||||
[[querycriteria-examples]]
|
||||
=== Example queries
|
||||
|
||||
The class `org.hibernate.criterion.Example` allows you to construct a query criterion from a given instance.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
Cat cat = new Cat();
|
||||
cat.setSex('F');
|
||||
cat.setColor(Color.BLACK);
|
||||
List results = session.createCriteria(Cat.class)
|
||||
.add( Example.create(cat) )
|
||||
.list();
|
||||
----
|
||||
|
||||
Version properties, identifiers and associations are ignored.
|
||||
By default, null valued properties are excluded.
|
||||
|
||||
You can adjust how the `Example` is applied.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
Example example = Example.create(cat)
|
||||
.excludeZeroes() //exclude zero valued properties
|
||||
.excludeProperty("color") //exclude the property named "color"
|
||||
.ignoreCase() //perform case insensitive string comparisons
|
||||
.enableLike(); //use like for string comparisons
|
||||
List results = session.createCriteria(Cat.class)
|
||||
.add(example)
|
||||
.list();
|
||||
----
|
||||
|
||||
You can even use examples to place criteria upon associated objects.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
List results = session.createCriteria(Cat.class)
|
||||
.add( Example.create(cat) )
|
||||
.createCriteria("mate")
|
||||
.add( Example.create( cat.getMate() ) )
|
||||
.list();
|
||||
----
|
||||
|
||||
[[querycriteria-projection]]
|
||||
=== Projections, aggregation and grouping
|
||||
|
||||
The class `org.hibernate.criterion.Projections` is a factory for `Projection` instances.
|
||||
You can apply a projection to a query by calling `setProjection()`.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
List results = session.createCriteria(Cat.class)
|
||||
.setProjection( Projections.rowCount() )
|
||||
.add( Restrictions.eq("color", Color.BLACK) )
|
||||
.list();
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
List results = session.createCriteria(Cat.class)
|
||||
.setProjection( Projections.projectionList()
|
||||
.add( Projections.rowCount() )
|
||||
.add( Projections.avg("weight") )
|
||||
.add( Projections.max("weight") )
|
||||
.add( Projections.groupProperty("color") )
|
||||
)
|
||||
.list();
|
||||
----
|
||||
|
||||
There is no explicit "group by" necessary in a criteria query.
|
||||
Certain projection types are defined to be __grouping projections__, which also appear in the SQL `group by` clause.
|
||||
|
||||
An alias can be assigned to a projection so that the projected value can be referred to in restrictions or orderings.
|
||||
Here are two different ways to do this:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
List results = session.createCriteria(Cat.class)
|
||||
.setProjection( Projections.alias( Projections.groupProperty("color"), "colr" ) )
|
||||
.addOrder( Order.asc("colr") )
|
||||
.list();
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
List results = session.createCriteria(Cat.class)
|
||||
.setProjection( Projections.groupProperty("color").as("colr") )
|
||||
.addOrder( Order.asc("colr") )
|
||||
.list();
|
||||
----
|
||||
|
||||
The `alias()` and `as()` methods simply wrap a projection instance in another, aliased, instance of `Projection`.
|
||||
As a shortcut, you can assign an alias when you add the projection to a projection list:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
List results = session.createCriteria(Cat.class)
|
||||
.setProjection( Projections.projectionList()
|
||||
.add( Projections.rowCount(), "catCountByColor" )
|
||||
.add( Projections.avg("weight"), "avgWeight" )
|
||||
.add( Projections.max("weight"), "maxWeight" )
|
||||
.add( Projections.groupProperty("color"), "color" )
|
||||
)
|
||||
.addOrder( Order.desc("catCountByColor") )
|
||||
.addOrder( Order.desc("avgWeight") )
|
||||
.list();
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
List results = session.createCriteria(Domestic.class, "cat")
|
||||
.createAlias("kittens", "kit")
|
||||
.setProjection( Projections.projectionList()
|
||||
.add( Projections.property("cat.name"), "catName" )
|
||||
.add( Projections.property("kit.name"), "kitName" )
|
||||
)
|
||||
.addOrder( Order.asc("catName") )
|
||||
.addOrder( Order.asc("kitName") )
|
||||
.list();
|
||||
----
|
||||
|
||||
You can also use `Property.forName()` to express projections:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
List results = session.createCriteria(Cat.class)
|
||||
.setProjection( Property.forName("name") )
|
||||
.add( Property.forName("color").eq(Color.BLACK) )
|
||||
.list();
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
List results = session.createCriteria(Cat.class)
|
||||
.setProjection( Projections.projectionList()
|
||||
.add( Projections.rowCount().as("catCountByColor") )
|
||||
.add( Property.forName("weight").avg().as("avgWeight") )
|
||||
.add( Property.forName("weight").max().as("maxWeight") )
|
||||
.add( Property.forName("color").group().as("color" )
|
||||
)
|
||||
.addOrder( Order.desc("catCountByColor") )
|
||||
.addOrder( Order.desc("avgWeight") )
|
||||
.list();
|
||||
----
|
||||
|
||||
[[querycriteria-detachedqueries]]
|
||||
=== Detached queries and subqueries
|
||||
|
||||
The `DetachedCriteria` class allows you to create a query outside the scope of a session and then execute it using an arbitrary `Session`.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
DetachedCriteria query = DetachedCriteria.forClass(Cat.class)
|
||||
.add( Property.forName("sex").eq('F') );
|
||||
|
||||
Session session = ....;
|
||||
Transaction txn = session.beginTransaction();
|
||||
List results = query.getExecutableCriteria(session).setMaxResults(100).list();
|
||||
txn.commit();
|
||||
session.close();
|
||||
----
|
||||
|
||||
A `DetachedCriteria` can also be used to express a subquery.
|
||||
`Criterion` instances involving subqueries can be obtained via `Subqueries` or `Property`.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
DetachedCriteria avgWeight = DetachedCriteria.forClass(Cat.class)
|
||||
.setProjection( Property.forName("weight").avg() );
|
||||
session.createCriteria(Cat.class)
|
||||
.add( Property.forName("weight").gt(avgWeight) )
|
||||
.list();
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
DetachedCriteria weights = DetachedCriteria.forClass(Cat.class)
|
||||
.setProjection( Property.forName("weight") );
|
||||
session.createCriteria(Cat.class)
|
||||
.add( Subqueries.geAll("weight", weights) )
|
||||
.list();
|
||||
----
|
||||
|
||||
Correlated subqueries are also possible:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
DetachedCriteria avgWeightForSex = DetachedCriteria.forClass(Cat.class, "cat2")
|
||||
.setProjection( Property.forName("weight").avg() )
|
||||
.add( Property.forName("cat2.sex").eqProperty("cat.sex") );
|
||||
session.createCriteria(Cat.class, "cat")
|
||||
.add( Property.forName("weight").gt(avgWeightForSex) )
|
||||
.list();
|
||||
----
|
||||
Example of multi-column restriction based on a subquery:
|
||||
|
||||
[source,java]
|
||||
----
|
||||
DetachedCriteria sizeQuery = DetachedCriteria.forClass( Man.class )
|
||||
.setProjection( Projections.projectionList().add( Projections.property( "weight" ) )
|
||||
.add( Projections.property( "height" ) ) )
|
||||
.add( Restrictions.eq( "name", "John" ) );
|
||||
session.createCriteria( Woman.class )
|
||||
.add( Subqueries.propertiesEq( new String[] { "weight", "height" }, sizeQuery ) )
|
||||
.list();
|
||||
----
|
||||
|
||||
[[query-criteria-naturalid]]
|
||||
=== Queries by natural identifier
|
||||
|
||||
For most queries, including criteria queries, the query cache is not efficient because query cache invalidation occurs too frequently.
|
||||
However, there is a special kind of query where you can optimize the cache invalidation algorithm: lookups by a constant natural key.
|
||||
In some applications, this kind of query occurs frequently.
|
||||
The Criteria API provides special provision for this use case.
|
||||
|
||||
First, map the natural key of your entity using `<natural-id>` and enable use of the second-level cache.
|
||||
|
||||
[source,xml]
|
||||
----
|
||||
<class name="User">
|
||||
<cache usage="read-write"/>
|
||||
<id name="id">
|
||||
<generator class="increment"/>
|
||||
</id>
|
||||
<natural-id>
|
||||
<property name="name"/>
|
||||
<property name="org"/>
|
||||
</natural-id>
|
||||
<property name="password"/>
|
||||
</class>
|
||||
----
|
||||
|
||||
This functionality is not intended for use with entities with _mutable_ natural keys.
|
||||
|
||||
Once you have enabled the Hibernate query cache, the `Restrictions.naturalId()` allows you to make use of the more efficient cache algorithm.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
session.createCriteria(User.class)
|
||||
.add( Restrictions.naturalId()
|
||||
.set("name", "gavin")
|
||||
.set("org", "hb")
|
||||
).setCacheable(true)
|
||||
.uniqueResult();
|
||||
----
|
@ -0,0 +1,31 @@
|
||||
[[architecture]]
|
||||
== Architecture
|
||||
|
||||
[[architecture-overview]]
|
||||
=== Overview
|
||||
|
||||
image:images/architecture/data_access_layers.svg[Data Access Layers]
|
||||
|
||||
Hibernate, as an ORM solution, effectively "sits between" the Java application data access layer and the Relational Database, as can be seen in the diagram above.
|
||||
The Java application makes use of the Hibernate APIs to load, store, query, etc its domain data.
|
||||
Here we will introduce the essential Hibernate APIs.
|
||||
This will be a brief introduction; we will discuss these contracts in detail later.
|
||||
|
||||
As a JPA provider, Hibernate implements the Java Persistence API specifications and the association between JPA interfaces and Hibernate specific implementations can be visualized in the following diagram:
|
||||
|
||||
image:images/architecture/JPA_Hibernate.svg[image]
|
||||
|
||||
SessionFactory (`org.hibernate.SessionFactory`):: A thread-safe (and immutable) representation of the mapping of the application domain model to a database.
|
||||
Acts as a factory for `org.hibernate.Session` instances. The `EntityManagerFactory` is the JPA equivalent of a `SessionFactory` and basically those two converge into the same `SessionFactory` implementation.
|
||||
+
|
||||
A `SessionFactory` is very expensive to create, so, for any given database, the application should have only one associated `SessionFactory`.
|
||||
The `SessionFactory` maintains services that Hibernate uses across all `Session(s)` such as second level caches, connection pools, transaction system integrations, etc.
|
||||
|
||||
Session (`org.hibernate.Session`):: A single-threaded, short-lived object conceptually modeling a "Unit of Work" <<Bibliography.adoc#PoEAA,PoEAA>>.
|
||||
In JPA nomenclature, the `Session` is represented by an `EntityManager`.
|
||||
+
|
||||
Behind the scenes, the Hibernate `Session` wraps a JDBC `java.sql.Connection` and acts as a factory for `org.hibernate.Transaction` instances.
|
||||
It maintains a generally "repeatable read" persistence context (first level cache) of the application's domain model.
|
||||
|
||||
Transaction (`org.hibernate.Transaction`):: A single-threaded, short-lived object used by the application to demarcate individual physical transaction boundaries.
|
||||
`EntityTransaction` is the JPA equivalent and both act as an abstraction API to isolate the application from the underling transaction system in use (JDBC or JTA).
|
@ -0,0 +1,233 @@
|
||||
[[batch]]
|
||||
== Batching
|
||||
:sourcedir: extras
|
||||
|
||||
[[batch-jdbcbatch]]
|
||||
=== JDBC batching
|
||||
|
||||
JDBC offers support for batching together SQL statements that can be represented as a single PreparedStatement.
|
||||
Implementation wise this generally means that drivers will send the batched operation to the server in one call,
|
||||
which can save on network calls to the database. Hibernate can leverage JDBC batching.
|
||||
The following settings control this behavior.
|
||||
|
||||
`hibernate.jdbc.batch_size`:: Controls the maximum number of statements Hibernate will batch together before asking the driver to execute the batch.
|
||||
Zero or a negative number disables this feature.
|
||||
|
||||
`hibernate.jdbc.batch_versioned_data`:: Some JDBC drivers return incorrect row counts when a batch is executed.
|
||||
If your JDBC driver falls into this category this setting should be set to `false`.
|
||||
Otherwise it is safe to enable this which will allow Hibernate to still batch the DML for versioned entities and still use the returned row counts for optimistic lock checks.
|
||||
Currently defaults to false to be safe.
|
||||
|
||||
`hibernate.jdbc.batch.builder`:: Names the implementation class used to manage batching capabilities.
|
||||
It is almost never a good idea to switch from Hibernate's default implementation.
|
||||
But if you wish to, this setting would name the `org.hibernate.engine.jdbc.batch.spi.BatchBuilder` implementation to use.
|
||||
|
||||
`hibernate.order_update`:: Forces Hibernate to order SQL updates by the entity type and the primary key value of the items being updated.
|
||||
This allows for more batching to be used. It will also result in fewer transaction deadlocks in highly concurrent systems.
|
||||
Comes with a performance hit, so benchmark before and after to see if this actually helps or hurts your application.
|
||||
|
||||
`hibernate.order_inserts`:: Forces Hibernate to order inserts to allow for more batching to be used.
|
||||
Comes with a performance hit, so benchmark before and after to see if this actually helps or hurts your application.
|
||||
|
||||
[[batch-session-batch]]
|
||||
=== Session batching
|
||||
|
||||
The following example shows an anti-pattern for batch inserts.
|
||||
|
||||
.Naive way to insert 100000 lines with Hibernate
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/batch_insert.java[]
|
||||
----
|
||||
====
|
||||
|
||||
This fails with an `OutOfMemoryException` after around 50000 rows on most systems.
|
||||
The reason is that Hibernate caches all the newly inserted `Customer` instances in the session-level cache.
|
||||
|
||||
There are several ways to avoid this problem.
|
||||
Before batch processing, enable JDBC batching. To enable JDBC batching, set the property `hibernate.jdbc.batch_size` to an integer between 10 and 50.
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
Hibernate disables insert batching at the JDBC level transparently if you use an identity identifier generator.
|
||||
====
|
||||
|
||||
If the above approach is not appropriate, you can disable the second-level cache, by setting `hibernate.cache.use_second_level_cache` to `false`.
|
||||
|
||||
==== Batch inserts
|
||||
|
||||
When you make new objects persistent, employ methods `flush()` and `clear()` to the session regularly, to control the size of the first-level cache.
|
||||
|
||||
.Flushing and clearing the `Session`
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/flush_and_clear_session.java[]
|
||||
----
|
||||
====
|
||||
|
||||
==== Batch updates
|
||||
|
||||
When you retrieve and update data, `flush()` and `clear()` the session regularly.
|
||||
In addition, use method `scroll()` to take advantage of server-side cursors for queries that return many rows of data.
|
||||
|
||||
.Using `scroll()`
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/using_scroll.java[]
|
||||
----
|
||||
====
|
||||
|
||||
==== StatelessSession
|
||||
|
||||
`StatelessSession` is a command-oriented API provided by Hibernate.
|
||||
Use it to stream data to and from the database in the form of detached objects.
|
||||
A `StatelessSession` has no persistence context associated with it and does not provide many of the higher-level life cycle semantics.
|
||||
|
||||
Some of the things not provided by a `StatelessSession` include:
|
||||
|
||||
* a first-level cache
|
||||
* interaction with any second-level or query cache
|
||||
* transactional write-behind or automatic dirty checking
|
||||
|
||||
Limitations of `StatelessSession`:
|
||||
|
||||
* Operations performed using a stateless session never cascade to associated instances.
|
||||
* Collections are ignored by a stateless session.
|
||||
* Operations performed via a stateless session bypass Hibernate's event model and interceptors.
|
||||
* Due to the lack of a first-level cache, Stateless sessions are vulnerable to data aliasing effects.
|
||||
* A stateless session is a lower-level abstraction that is much closer to the underlying JDBC.
|
||||
|
||||
.Using a `StatelessSession`
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/using_a_StatelessSession.java[]
|
||||
----
|
||||
====
|
||||
|
||||
The `Customer` instances returned by the query are immediately detached.
|
||||
They are never associated with any persistence context.
|
||||
|
||||
The `insert()`, `update()`, and `delete()` operations defined by the `StatelessSession` interface operate directly on database rows.
|
||||
They cause the corresponding SQL operations to be executed immediately.
|
||||
They have different semantics from the `save()`, `saveOrUpdate()`, and `delete()` operations defined by the `Session` interface.
|
||||
|
||||
[[batch-bulk-hql]]
|
||||
=== Hibernate Query Language for DML
|
||||
|
||||
DML, or Data Manipulation Language, refers to SQL statements such as `INSERT`, `UPDATE`, and `DELETE`.
|
||||
Hibernate provides methods for bulk SQL-style DML statement execution, in the form of Hibernate Query Language (HQL).
|
||||
|
||||
==== HQL for UPDATE and DELETE
|
||||
|
||||
.Psuedo-syntax for UPDATE and DELETE statements using HQL
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
UPDATE FROM EntityName e WHERE e.name = ?
|
||||
|
||||
DELETE FROM EntityName e WHERE e.name = ?
|
||||
----
|
||||
====
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
The `FROM` and `WHERE` clauses are each optional.
|
||||
====
|
||||
|
||||
The `FROM` clause can only refer to a single entity, which can be aliased.
|
||||
If the entity name is aliased, any property references must be qualified using that alias.
|
||||
If the entity name is not aliased, then it is illegal for any property references to be qualified.
|
||||
|
||||
Joins, either implicit or explicit, are prohibited in a bulk HQL query.
|
||||
You can use sub-queries in the `WHERE` clause, and the sub-queries themselves can contain joins.
|
||||
|
||||
.Executing an HQL UPDATE, using the `Query.executeUpdate()`
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/executeUpdate.java[]
|
||||
----
|
||||
====
|
||||
|
||||
In keeping with the EJB3 specification, HQL UPDATE statements, by default, do not effect the version or the timestamp property values for the affected entities.
|
||||
You can use a versioned update to force Hibernate to reset the version or timestamp property values, by adding the `VERSIONED` keyword after the `UPDATE` keyword.
|
||||
|
||||
.Updating the version of timestamp
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/updating_version.java[]
|
||||
----
|
||||
====
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
If you use the `VERSIONED` statement, you cannot use custom version types, which use class `org.hibernate.usertype.UserVersionType`.
|
||||
====
|
||||
|
||||
.An HQL `DELETE` statement
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/hql_delete.java[]
|
||||
----
|
||||
====
|
||||
|
||||
Method `Query.executeUpdate()` returns an `int` value, which indicates the number of entities effected by the operation.
|
||||
This may or may not correlate to the number of rows effected in the database.
|
||||
An HQL bulk operation might result in multiple SQL statements being executed, such as for joined-subclass.
|
||||
In the example of joined-subclass, a `DELETE` against one of the subclasses may actually result in deletes in the tables underlying the join, or further down the inheritance hierarchy.
|
||||
|
||||
==== HQL syntax for INSERT
|
||||
|
||||
.Pseudo-syntax for INSERT statements
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
INSERT INTO EntityName properties_list SELECT properties_list FROM ...
|
||||
----
|
||||
====
|
||||
|
||||
Only the `INSERT INTO ... SELECT ...` form is supported.
|
||||
You cannot specify explicit values to insert.
|
||||
|
||||
The `properties_list` is analogous to the column specification in the `SQL` `INSERT` statement.
|
||||
For entities involved in mapped inheritance, you can only use properties directly defined on that given class-level in the `properties_list`.
|
||||
Superclass properties are not allowed and subclass properties are irrelevant.
|
||||
In other words, `INSERT` statements are inherently non-polymorphic.
|
||||
|
||||
The SELECT statement can be any valid HQL select query, but the return types must match the types expected by the INSERT.
|
||||
Hibernate verifies the return types during query compilation, instead of expecting the database to check it.
|
||||
Problems might result from Hibernate types which are equivalent, rather than equal.
|
||||
One such example is a mismatch between a property defined as an `org.hibernate.type.DateType` and a property defined as an `org.hibernate.type.TimestampType`,
|
||||
even though the database may not make a distinction, or may be capable of handling the conversion.
|
||||
|
||||
If id property is not specified in the `properties_list`,
|
||||
Hibernate generates a value automatically.
|
||||
Automatic generation is only available if you use ID generators which operate on the database.
|
||||
Otherwise, Hibernate throws an exception during parsing.
|
||||
Available in-database generators are `org.hibernate.id.SequenceGenerator` and its subclasses, and objects which implement `org.hibernate.id.PostInsertIdentifierGenerator`.
|
||||
The most notable exception is `org.hibernate.id.TableHiLoGenerator`, which does not expose a selectable way to get its values.
|
||||
|
||||
For properties mapped as either version or timestamp, the insert statement gives you two options.
|
||||
You can either specify the property in the properties_list, in which case its value is taken from the corresponding select expressions, or omit it from the properties_list,
|
||||
in which case the seed value defined by the org.hibernate.type.VersionType is used.
|
||||
|
||||
.HQL INSERT statement
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/hql_insert.java[]
|
||||
----
|
||||
====
|
||||
|
||||
This section is only a brief overview of HQL. For more information, see <<chapters/query-hql/HQL.adoc#hql,HQL>>.
|
||||
|
||||
[[batch-bulk-jpql]]
|
||||
=== Java Persistence Query Language for DML
|
||||
|
||||
TODO
|
@ -0,0 +1,8 @@
|
||||
Session session = sessionFactory.openSession();
|
||||
Transaction tx = session.beginTransaction();
|
||||
for ( int i=0; i<100000; i++ ) {
|
||||
Customer customer = new Customer(.....);
|
||||
session.save(customer);
|
||||
}
|
||||
tx.commit();
|
||||
session.close();
|
@ -0,0 +1,11 @@
|
||||
Session session = sessionFactory.openSession();
|
||||
Transaction tx = session.beginTransaction();
|
||||
|
||||
String hqlUpdate = "update Customer c set c.name = :newName where c.name = :oldName";
|
||||
// or String hqlUpdate = "update Customer set name = :newName where name = :oldName";
|
||||
int updatedEntities = session.createQuery( hqlUpdate )
|
||||
.setString( "newName", newName )
|
||||
.setString( "oldName", oldName )
|
||||
.executeUpdate();
|
||||
tx.commit();
|
||||
session.close();
|
@ -0,0 +1,15 @@
|
||||
Session session = sessionFactory.openSession();
|
||||
Transaction tx = session.beginTransaction();
|
||||
|
||||
for ( int i=0; i<100000; i++ ) {
|
||||
Customer customer = new Customer(.....);
|
||||
session.save(customer);
|
||||
if ( i % 20 == 0 ) { //20, same as the JDBC batch size
|
||||
//flush a batch of inserts and release memory:
|
||||
session.flush();
|
||||
session.clear();
|
||||
}
|
||||
}
|
||||
|
||||
tx.commit();
|
||||
session.close();
|
@ -0,0 +1,10 @@
|
||||
Session session = sessionFactory.openSession();
|
||||
Transaction tx = session.beginTransaction();
|
||||
|
||||
String hqlDelete = "delete Customer c where c.name = :oldName";
|
||||
// or String hqlDelete = "delete Customer where name = :oldName";
|
||||
int deletedEntities = session.createQuery( hqlDelete )
|
||||
.setString( "oldName", oldName )
|
||||
.executeUpdate();
|
||||
tx.commit();
|
||||
session.close();
|
@ -0,0 +1,8 @@
|
||||
Session session = sessionFactory.openSession();
|
||||
Transaction tx = session.beginTransaction();
|
||||
|
||||
String hqlInsert = "insert into DelinquentAccount (id, name) select c.id, c.name from Customer c where ...";
|
||||
int createdEntities = session.createQuery( hqlInsert )
|
||||
.executeUpdate();
|
||||
tx.commit();
|
||||
session.close();
|
@ -0,0 +1,9 @@
|
||||
Session session = sessionFactory.openSession();
|
||||
Transaction tx = session.beginTransaction();
|
||||
String hqlVersionedUpdate = "update versioned Customer set name = :newName where name = :oldName";
|
||||
int updatedEntities = session.createQuery( hqlUpdate )
|
||||
.setString( "newName", newName )
|
||||
.setString( "oldName", oldName )
|
||||
.executeUpdate();
|
||||
tx.commit();
|
||||
session.close();
|
@ -0,0 +1,13 @@
|
||||
StatelessSession session = sessionFactory.openStatelessSession();
|
||||
Transaction tx = session.beginTransaction();
|
||||
|
||||
ScrollableResults customers = session.getNamedQuery("GetCustomers")
|
||||
.scroll(ScrollMode.FORWARD_ONLY);
|
||||
while ( customers.next() ) {
|
||||
Customer customer = (Customer) customers.get(0);
|
||||
customer.updateStuff(...);
|
||||
session.update(customer);
|
||||
}
|
||||
|
||||
tx.commit();
|
||||
session.close();
|
@ -0,0 +1,19 @@
|
||||
Session session = sessionFactory.openSession();
|
||||
Transaction tx = session.beginTransaction();
|
||||
|
||||
ScrollableResults customers = session.getNamedQuery("GetCustomers")
|
||||
.setCacheMode(CacheMode.IGNORE)
|
||||
.scroll(ScrollMode.FORWARD_ONLY);
|
||||
int count=0;
|
||||
while ( customers.next() ) {
|
||||
Customer customer = (Customer) customers.get(0);
|
||||
customer.updateStuff(...);
|
||||
if ( ++count % 20 == 0 ) {
|
||||
//flush a batch of updates and release memory:
|
||||
session.flush();
|
||||
session.clear();
|
||||
}
|
||||
}
|
||||
|
||||
tx.commit();
|
||||
session.close();
|
@ -0,0 +1,226 @@
|
||||
[[bootstrap]]
|
||||
== Bootstrap
|
||||
:sourcedir: extras
|
||||
|
||||
The term bootstrapping refers to initializing and starting a software component.
|
||||
In Hibernate, we are specifically talking about the process of building a fully functional `SessionFactory` instance or `EntityManagerFactory` instance, for JPA.
|
||||
The process is very different for each.
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
This chapter will not focus on all the possibilities of bootstrapping.
|
||||
Those will be covered in each specific more-relevant chapters later on.
|
||||
Instead we focus here on the API calls needed to perform the bootstrapping.
|
||||
====
|
||||
|
||||
[[bootstrap-native]]
|
||||
=== Native Bootstrapping
|
||||
|
||||
This section discusses the process of bootstrapping a Hibernate `SessionFactory`.
|
||||
Specifically it discusses the bootstrapping APIs as redesigned in 5.0.
|
||||
For a discussion of the legacy bootstrapping API, see <<appendices/Legacy_Bootstrap.adoc#appendix-legacy-bootstrap,Legacy Bootstrapping>>
|
||||
|
||||
[[bootstrap-native-registry]]
|
||||
==== Building the ServiceRegistry
|
||||
|
||||
The first step in native bootstrapping is the building of a `ServiceRegistry` holding the services Hibernate will need during bootstrapping and at run time.
|
||||
|
||||
Actually we are concerned with building 2 different ServiceRegistries.
|
||||
First is the `org.hibernate.boot.registry.BootstrapServiceRegistry`.
|
||||
The `BootstrapServiceRegistry` is intended to hold services that Hibernate needs at both bootstrap and run time.
|
||||
This boils down to 3 services:
|
||||
|
||||
`org.hibernate.boot.registry.classloading.spi.ClassLoaderService`:: which controls how Hibernate interacts with `ClassLoader`s
|
||||
`org.hibernate.integrator.spi.IntegratorService`:: which controls the management ands discovery of `org.hibernate.integrator.spi.Integrator` instances.
|
||||
`org.hibernate.boot.registry.selector.spi.StrategySelector`:: which control how Hibernate resolves implementations of various strategy contracts.
|
||||
This is a very powerful service, but a full discussion of it is beyond the scope of this guide.
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
If you are ok with the default behavior of Hibernate in regards to these `BootstrapServiceRegistry` services
|
||||
(which is quite often the case, especially in stand-alone environments), then building the `BootstrapServiceRegistry` can be skipped.
|
||||
====
|
||||
|
||||
If you wish to alter how the `BootstrapServiceRegistry` is built, that is controlled through the `org.hibernate.boot.registry.BootstrapServiceRegistryBuilder:`
|
||||
|
||||
.Controlling BootstrapServiceRegistry building
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/native1.java[]
|
||||
----
|
||||
====
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
The services of the `BootstrapServiceRegistry` cannot be extended (added to) nor overridden (replaced).
|
||||
====
|
||||
|
||||
The second ServiceRegistry is the `org.hibernate.boot.registry.StandardServiceRegistry`.
|
||||
You will almost always need to configure the `StandardServiceRegistry`, which is done through `org.hibernate.boot.registry.StandardServiceRegistryBuilder`:
|
||||
|
||||
.Building a BootstrapServiceRegistryBuilder
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/native2.java[]
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/native3.java[]
|
||||
----
|
||||
====
|
||||
|
||||
A `StandardServiceRegistry` is also highly configurable via the StandardServiceRegistryBuilder API.
|
||||
See the `StandardServiceRegistryBuilder` https://docs.jboss.org/hibernate/stable/orm/javadocs/org/hibernate/boot/registry/StandardServiceRegistryBuilder.html[Javadocs] for more details.
|
||||
|
||||
Some specific methods of interest:
|
||||
|
||||
.Configuring a MetadataSources
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/native5.java[]
|
||||
----
|
||||
====
|
||||
|
||||
[[event-listener-registration]]
|
||||
==== Event Listener registration
|
||||
|
||||
The main use cases for an `org.hibernate.integrator.spi.Integrator` right now are registering event listeners and providing services (see `org.hibernate.integrator.spi.ServiceContributingIntegrator`).
|
||||
With 5.0 we plan on expanding that to allow altering the metamodel describing the mapping between object and relational models.
|
||||
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/register-event-listeners-example.java[]
|
||||
----
|
||||
====
|
||||
|
||||
[[bootstrap-native-metadata]]
|
||||
==== Building the Metadata
|
||||
|
||||
The second step in native bootstrapping is the building of a `org.hibernate.boot.Metadata` object containing the parsed representations of an application's domain model and its mapping to a database.
|
||||
The first thing we obviously need to build a parsed representation is the source information to be parsed (annotated classes, `hbm.xml` files, `orm.xml` files).
|
||||
This is the purpose of `org.hibernate.boot.MetadataSources`:
|
||||
|
||||
`MetadataSources` has many other methods as well, explore its API and https://docs.jboss.org/hibernate/stable/orm/javadocs/org/hibernate/boot/MetadataSources.html[Javadocs] for more information.
|
||||
Also, all methods on `MetadataSources` offer fluent-style call chaining::
|
||||
|
||||
.Configuring a MetadataSources with method chaining
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/native6.java[]
|
||||
----
|
||||
====
|
||||
|
||||
Once we have the sources of mapping information defined, we need to build the `Metadata` object.
|
||||
If you are ok with the default behavior in building the Metadata then you can simply call the https://docs.jboss.org/hibernate/stable/orm/javadocs/org/hibernate/boot/MetadataSources.html#buildMetadata--[1buildMetadata`] method of the `MetadataSources`.
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
Notice that a `ServiceRegistry` can be passed at a number of points in this bootstrapping process.
|
||||
The suggested approach is to build a `StandardServiceRegistry` yourself and pass that along to the `MetadataSources` constructor.
|
||||
From there, `MetadataBuilder`, `Metadata`, `SessionFactoryBuilder` and `SessionFactory` will all pick up that same `StandardServiceRegistry`.
|
||||
====
|
||||
|
||||
However, if you wish to adjust the process of building `Metadata` from `MetadataSources`,
|
||||
you will need to use the `MetadataBuilder` as obtained via `MetadataSources#getMetadataBuilder`.
|
||||
`MetadataBuilder` allows a lot of control over the `Metadata` building process.
|
||||
See its https://docs.jboss.org/hibernate/stable/orm/javadocs/org/hibernate/boot/MetadataBuilder.html[Javadocs] for full details.
|
||||
|
||||
.Building Metadata via MetadataBuilder
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/native7.java[]
|
||||
----
|
||||
====
|
||||
|
||||
[[bootstrap-native-sessionfactory]]
|
||||
==== Building the SessionFactory
|
||||
|
||||
The final step in native bootstrapping is to build the `SessionFactory` itself.
|
||||
Much like discussed above, if you are ok with the default behavior of building a `SessionFactory` from a `Metadata` reference, you can simply call the https://docs.jboss.org/hibernate/stable/orm/javadocs/org/hibernate/boot/Metadata.html#buildSessionFactory--[`buildSessionFactory`] method on the `Metadata` object.
|
||||
|
||||
However, if you would like to adjust that building process you will need to use `SessionFactoryBuilder` as obtained via [`Metadata#getSessionFactoryBuilder`. Again, see its https://docs.jboss.org/hibernate/stable/orm/javadocs/org/hibernate/boot/Metadata.html#getSessionFactoryBuilder--[Javadocs] for more details.
|
||||
|
||||
.Native Bootstrapping - Putting it all together
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/native9.java[]
|
||||
----
|
||||
====
|
||||
|
||||
The bootstrapping API is quite flexible, but in most cases it makes the most sense to think of it as a 3 step process:
|
||||
|
||||
1. Build the `StandardServiceRegistry`
|
||||
2. Build the `Metadata`
|
||||
3. Use those 2 to build the `SessionFactory`
|
||||
|
||||
.Building `SessionFactory` via `SessionFactoryBuilder`
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/native8.java[]
|
||||
----
|
||||
====
|
||||
|
||||
[[bootstrap-jpa]]
|
||||
=== JPA Bootstrapping
|
||||
|
||||
Bootstrapping Hibernate as a JPA provider can be done in a JPA-spec compliant manner or using a proprietary bootstrapping approach.
|
||||
The standardized approach has some limitations in certain environments, but aside from those, it is *highly* recommended that you use JPA-standardized bootstrapping.
|
||||
|
||||
[[bootstrap-jpa-compliant]]
|
||||
==== JPA-compliant bootstrapping
|
||||
|
||||
In JPA, we are ultimately interested in bootstrapping a `javax.persistence.EntityManagerFactory` instance.
|
||||
The JPA specification defines 2 primary standardized bootstrap approaches depending on how the application intends to access the `javax.persistence.EntityManager` instances from an `EntityManagerFactory`.
|
||||
It uses the terms _EE_ and _SE_ for these two approaches, but those terms are very misleading in this context.
|
||||
What the JPA spec calls EE bootstrapping implies the existence of a container (EE, OSGi, etc), who'll manage and inject the persistence context on behalf of the application.
|
||||
What it calls SE bootstrapping is everything else. We will use the terms container-bootstrapping and application-bootstrapping in this guide.
|
||||
|
||||
If you would like additional details on accessing and using `EntityManager` instances, sections 7.6 and 7.7 of the JPA 2.1 specification cover container-managed and application-managed `EntityManagers`, respectively.
|
||||
|
||||
For compliant container-bootstrapping, the container will build an `EntityManagerFactory` for each persistent-unit defined in the `META-INF/persistence.xml` configuration file
|
||||
and make that available to the application for injection via the `javax.persistence.PersistenceUnit` annotation or via JNDI lookup.
|
||||
|
||||
.Injecting a EntityManagerFactory
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/jpa1.java[]
|
||||
----
|
||||
====
|
||||
|
||||
For compliant application-bootstrapping, rather than the container building the `EntityManagerFactory` for the application, the application builds the `EntityManagerFactory` itself using the `javax.persistence.Persistence` bootstrap class.
|
||||
The application creates an `EntityManagerFactory` by calling the `createEntityManagerFactory` method:
|
||||
|
||||
.Application bootstrapped EntityManagerFactory
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/jpa2.java[]
|
||||
----
|
||||
====
|
||||
|
||||
[[bootstrap-jpa-hibernate]]
|
||||
==== Proprietary JPA bootstrapping
|
||||
|
||||
Hibernate defines a proprietary https://docs.jboss.org/hibernate/stable/orm/javadocs/org/hibernate/jpa/boot/internal/EntityManagerFactoryBuilderImpl.html[`EntityManagerFactoryBuilderImpl`]
|
||||
utility, which allows bootstrapping the JPA environment without even in the absence of the `persistence.xml` configuration file.
|
||||
To substitute the `persistence.xml` file, Hibernate offers the `PersistenceUnitInfoDescriptor` utility, which can take configuration that's available in the standard XML configuration file.
|
||||
|
||||
.Proprietary bootstrapped `EntityManagerFactory`
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/jpa3.java[]
|
||||
----
|
||||
====
|
||||
|
||||
The `integrationSettings` allows the application develoepr to customize the bootstrapping process by specifying different `hibernate.integrator_provider` or `hibernate.strategy_registration_provider` integration providers.
|
@ -0,0 +1,2 @@
|
||||
@PersistenceUnit
|
||||
EntityManagerFactory emf;
|
@ -0,0 +1,2 @@
|
||||
// Create an EMF for our CRM persistence-unit.
|
||||
EntityManagerFactory emf = Persistence.createEntityManagerFactory( "CRM" );
|
@ -0,0 +1,18 @@
|
||||
String persistenceUnitName = ...
|
||||
List<String> entityClassNames = ...
|
||||
Properties properties = ...
|
||||
|
||||
PersistenceUnitInfo persistenceUnitInfo = new PersistenceUnitInfoImpl(
|
||||
persistenceUnitName,
|
||||
entityClassNames,
|
||||
properties
|
||||
);
|
||||
|
||||
Map<String, Object> integrationSettings = new HashMap<>();
|
||||
integrationSettings.put(AvailableSettings.INTERCEPTOR, interceptor());
|
||||
|
||||
EntityManagerFactoryBuilderImpl entityManagerFactoryBuilder = new EntityManagerFactoryBuilderImpl(
|
||||
new PersistenceUnitInfoDescriptor(persistenceUnitInfo),
|
||||
integrationSettings
|
||||
);
|
||||
EntityManagerFactory emf = entityManagerFactoryBuilder.build();
|
@ -0,0 +1,9 @@
|
||||
BootstrapServiceRegistryBuilder bootstrapRegistryBuilder = new BootstrapServiceRegistryBuilder();
|
||||
|
||||
// add a special ClassLoader
|
||||
bootstrapRegistryBuilder.applyClassLoader( mySpecialClassLoader );
|
||||
// manually add an Integrator
|
||||
bootstrapRegistryBuilder.applyIntegrator( mySpecialIntegrator );
|
||||
...
|
||||
|
||||
BootstrapServiceRegistry bootstrapRegistry = bootstrapRegistryBuilder.build();
|
@ -0,0 +1,2 @@
|
||||
// An example using an implicitly built BootstrapServiceRegistry
|
||||
StandardServiceRegistryBuilder standardRegistryBuilder = new StandardServiceRegistryBuilder();
|
@ -0,0 +1,4 @@
|
||||
// An example using an explicitly built BootstrapServiceRegistry
|
||||
BootstrapServiceRegistry bootstrapRegistry = ...;
|
||||
|
||||
StandardServiceRegistryBuilder standardRegistryBuilder = new StandardServiceRegistryBuilder( bootstrapRegistry );
|
@ -0,0 +1,19 @@
|
||||
StandardServiceRegistryBuilder standardRegistryBuilder = ...;
|
||||
|
||||
// load some properties via resource lookup
|
||||
standardRegistryBuilder.loadProperties( "org/hibernate/example/MyProperties.properties" );
|
||||
|
||||
// configure the registry from a resource lookup for a cfg.xml config file
|
||||
standardRegistryBuilder.configure( "org/hibernate/example/my.cfg.xml" );
|
||||
|
||||
// apply a random setting
|
||||
standardRegistryBuilder.applySetting( "myProp","some value" );
|
||||
|
||||
// apply a service initiator
|
||||
standardRegistryBuilder.addInitiator( new CustomServiceInitiator() );
|
||||
|
||||
// apply a service impl
|
||||
standardRegistryBuilder.addService( SomeCustomService.class,new SomeCustomServiceImpl() );
|
||||
|
||||
// and finally build the StandardServiceRegistry
|
||||
StandardServiceRegistry standardRegistry = standardRegistryBuilder.build();
|
@ -0,0 +1,23 @@
|
||||
MetadataSources sources = new MetadataSources( standardRegistry );
|
||||
|
||||
// alternatively, we can build the MetadataSources without passing
|
||||
// a service registry, in which case it will build a default
|
||||
// BootstrapServiceRegistry to use. But the approach shown
|
||||
// above is preferred
|
||||
// MetadataSources sources = new MetadataSources();
|
||||
|
||||
// add a class using JPA/Hibernate annotations for mapping
|
||||
sources.addAnnotatedClass( MyEntity.class );
|
||||
|
||||
// add the name of a class using JPA/Hibernate annotations for mapping.
|
||||
// differs from above in that accessing the Class is deferred which is
|
||||
// important if using runtime bytecode-enhancement
|
||||
sources.addAnnotatedClassName( "org.hibernate.example.Customer" );
|
||||
|
||||
// Adds the named hbm.xml resource as a source: which performs the
|
||||
// classpath lookup and parses the XML
|
||||
sources.addResource( "org/hibernate/example/Order.hbm.xml" );
|
||||
|
||||
// Adds the named JPA orm.xml resource as a source: which performs the
|
||||
// classpath lookup and parses the XML
|
||||
sources.addResource( "org/hibernate/example/Product.orm.xml" );
|
@ -0,0 +1,5 @@
|
||||
MetadataSources sources = new MetadataSources( standardRegistry )
|
||||
.addAnnotatedClass( MyEntity.class )
|
||||
.addAnnotatedClassName( "org.hibernate.example.Customer" )
|
||||
.addResource( "org/hibernate/example/Order.hbm.xml" )
|
||||
.addResource( "org/hibernate/example/Product.orm.xml" );
|
@ -0,0 +1,9 @@
|
||||
MetadataBuilder metadataBuilder = sources.getMetadataBuilder();
|
||||
|
||||
// Use the JPA-compliant implicit naming strategy
|
||||
metadataBuilder.applyImplicitNamingStrategy( ImplicitNamingStrategyJpaCompliantImpl.INSTANCE );
|
||||
|
||||
// specify the schema name to use for tables, etc when none is explicitly specified
|
||||
metadataBuilder.applyImplicitSchemaName( "my_default_schema" );
|
||||
|
||||
Metadata metadata = metadataBuilder.build();
|
@ -0,0 +1,12 @@
|
||||
SessionFactoryBuilder sessionFactoryBuilder = metadata.getSessionFactoryBuilder();
|
||||
|
||||
// Supply an SessionFactory-level Interceptor
|
||||
sessionFactoryBuilder.applyInterceptor( new MySessionFactoryInterceptor() );
|
||||
|
||||
// Add a custom observer
|
||||
sessionFactoryBuilder.addSessionFactoryObservers( new MySessionFactoryObserver() );
|
||||
|
||||
// Apply a CDI BeanManager ( for JPA event listeners )
|
||||
sessionFactoryBuilder.applyBeanManager( getBeanManagerFromSomewhere() );
|
||||
|
||||
SessionFactory sessionFactory = sessionFactoryBuilder.build();
|
@ -0,0 +1,16 @@
|
||||
StandardServiceRegistry standardRegistry = new StandardServiceRegistryBuilder()
|
||||
.configure( "org/hibernate/example/MyCfg.xml" )
|
||||
.build();
|
||||
|
||||
Metadata metadata = new MetadataSources( standardRegistry )
|
||||
.addAnnotatedClass( MyEntity.class )
|
||||
.addAnnotatedClassName( "org.hibernate.example.Customer" )
|
||||
.addResource( "org/hibernate/example/Order.hbm.xml" )
|
||||
.addResource( "org/hibernate/example/Product.orm.xml" )
|
||||
.getMetadataBuilder()
|
||||
.applyImplicitNamingStrategy( ImplicitNamingStrategyJpaCompliantImpl.INSTANCE )
|
||||
.build();
|
||||
|
||||
SessionFactory sessionFactory = metadata.getSessionFactoryBuilder()
|
||||
.applyBeanManager( getBeanManagerFromSomewhere() )
|
||||
.build();
|
@ -0,0 +1,23 @@
|
||||
public class MyIntegrator implements org.hibernate.integrator.spi.Integrator {
|
||||
|
||||
public void integrate(
|
||||
Configuration configuration,
|
||||
SessionFactoryImplementor sessionFactory,
|
||||
SessionFactoryServiceRegistry serviceRegistry) {
|
||||
// As you might expect, an EventListenerRegistry is the thing with which event listeners are registered It is a
|
||||
// service so we look it up using the service registry
|
||||
final EventListenerRegistry eventListenerRegistry = serviceRegistry.getService( EventListenerRegistry.class );
|
||||
|
||||
// If you wish to have custom determination and handling of "duplicate" listeners, you would have to add an
|
||||
// implementation of the org.hibernate.event.service.spi.DuplicationStrategy contract like this
|
||||
eventListenerRegistry.addDuplicationStrategy( myDuplicationStrategy );
|
||||
|
||||
// EventListenerRegistry defines 3 ways to register listeners:
|
||||
// 1) This form overrides any existing registrations with
|
||||
eventListenerRegistry.setListeners( EventType.AUTO_FLUSH, myCompleteSetOfListeners );
|
||||
// 2) This form adds the specified listener(s) to the beginning of the listener chain
|
||||
eventListenerRegistry.prependListeners( EventType.AUTO_FLUSH, myListenersToBeCalledFirst );
|
||||
// 3) This form adds the specified listener(s) to the end of the listener chain
|
||||
eventListenerRegistry.appendListeners( EventType.AUTO_FLUSH, myListenersToBeCalledLast );
|
||||
}
|
||||
}
|
@ -0,0 +1,81 @@
|
||||
[[caching]]
|
||||
== Caching
|
||||
|
||||
[[caching-config]]
|
||||
=== Configuring second-level caching
|
||||
|
||||
Hibernate defines the ability to integrate with pluggable providers for the purpose of caching data outside the context of a particular `Session`.
|
||||
This section defines the settings which control that behavior.
|
||||
|
||||
[[caching-config-provider]]
|
||||
=== RegionFactory
|
||||
|
||||
`org.hibernate.cache.spi.RegionFactory` defines the integration between Hibernate and a pluggable caching provider.
|
||||
`hibernate.cache.region.factory_class` is used to declare the provider to use. Hibernate comes with support for 2 popular caching libraries: Ehcache and Infinispan.
|
||||
|
||||
[[caching-config-provider-ehcache]]
|
||||
=== Ehcache
|
||||
|
||||
[IMPORTANT]
|
||||
====
|
||||
Use of the build-in integration for Ehcache requires that the hibernate-ehcache module jar (and all of its dependencies) are on the classpath.
|
||||
====
|
||||
|
||||
The hibernate-ehcache module defines 2 specific region factories: `EhCacheRegionFactory` and `SingletonEhCacheRegionFactory`.
|
||||
|
||||
[[caching-config-provider-ehcache-region-factory]]
|
||||
==== `EhCacheRegionFactory`
|
||||
|
||||
TODO
|
||||
|
||||
[[caching-config-provider-ehcache-singleton-region-factory]]
|
||||
==== `SingletonEhCacheRegionFactory`
|
||||
|
||||
TODO
|
||||
|
||||
[[caching-config-provider-infinispan]]
|
||||
=== Infinispan
|
||||
|
||||
[IMPORTANT]
|
||||
====
|
||||
Use of the build-in integration for Infinispan requires that the hibernate-infinispan module jar (and all of its dependencies) are on the classpath.
|
||||
====
|
||||
|
||||
The hibernate-infinispan module defines 2 specific providers: `infinispan` and `infinispan-jndi`.
|
||||
|
||||
TODO
|
||||
|
||||
[[caching-config-behavior]]
|
||||
=== Caching behavior
|
||||
|
||||
Besides specific provider configuration, there are a number of configurations options on the Hibernate side of the integration that control various caching behavior:
|
||||
|
||||
`hibernate.cache.use_second_level_cache`:: Enable or disable second level caching overall. Default is true, although the default region factory is `NoCachingRegionFactory`.
|
||||
`hibernate.cache.use_query_cache`:: Enable or disable second level caching of query results. Default is false.
|
||||
`hibernate.cache.query_cache_factory`:: Query result caching is handled by a special contract that deals with staleness-based invalidation of the results.
|
||||
The default implementation does not allow stale results at all. Use this for applications that would like to relax that.
|
||||
Names an implementation of `org.hibernate.cache.spi.QueryCacheFactory`
|
||||
`hibernate.cache.use_minimal_puts`:: Optimizes second-level cache operations to minimize writes, at the cost of more frequent reads. Providers typically set this appropriately.
|
||||
`hibernate.cache.region_prefix`:: Defines a name to be used as a prefix to all second-level cache region names.
|
||||
`hibernate.cache.default_cache_concurrency_strategy`:: In Hibernate second-level caching, all regions can be configured differently including the concurrency strategy to use when accessing the region.
|
||||
This setting allows to define a default strategy to be used.
|
||||
This setting is very rarely required as the pluggable providers do specify the default strategy to use.
|
||||
Valid values include:
|
||||
* read-only,
|
||||
* read-write,
|
||||
* nonstrict-read-write,
|
||||
* transactional
|
||||
hibernate.cache.use_structured_entries:: If `true`, forces Hibernate to store data in the second-level cache in a more human-friendly format.
|
||||
Can be useful if you'd like to be able to "browse" the data directly in your cache, but does have a performance impact.
|
||||
* hibernate.cache.auto_evict_collection_cache:: Enables or disables the automatic eviction of a bidirectional association's collection cache entry when the association is changed just from the owning side.
|
||||
This is disabled by default, as it has a performance impact to track this state.
|
||||
However if your application does not manage both sides of bidirectional association where the collection side is cached, the alternative is to have stale data in that collection cache.
|
||||
|
||||
[[caching-management]]
|
||||
=== Managing the Cached Data
|
||||
|
||||
At runtime Hibernate handles moving data into and out of the second-level cache in response to the operations performed by the `Session`.
|
||||
|
||||
The `org.hibernate.Cache` interface (or the `javax.persistence.Cache` interface if using JPA) allow to clear data from the second-level cache.
|
||||
|
||||
TODO
|
@ -0,0 +1,30 @@
|
||||
[[domain-model]]
|
||||
== Domain Model
|
||||
:sourcedir: extras
|
||||
|
||||
The term https://en.wikipedia.org/wiki/Domain_model[domain model] comes from the realm of data modeling.
|
||||
It is the model that ultimately describes the https://en.wikipedia.org/wiki/Problem_domain[problem domain] you are working in.
|
||||
Sometimes you will also hear the term _persistent classes_.
|
||||
|
||||
Ultimately the application's domain model is the central character in an ORM.
|
||||
They make up the classes you wish to map. Hibernate works best if these classes follow the Plain Old Java Object (POJO) / JavaBean programming model.
|
||||
However, none of these rules are hard requirements.
|
||||
Indeed, Hibernate assumes very little about the nature of your persistent objects. You can express a domain model in other ways (using trees of `java.util.Map` instances, for example).
|
||||
|
||||
Historically applications using Hibernate would have used its proprietary XML mapping file format for this purpose.
|
||||
With the coming of JPA, most of this information is now defined in a way that is portable across ORM/JPA providers using annotations (and/or standardized XML format).
|
||||
This chapter will focus on JPA mapping where possible.
|
||||
For Hibernate mapping features not supported by JPA we will prefer Hibernate extension annotations.
|
||||
|
||||
include::mapping_types.adoc[]
|
||||
include::naming.adoc[]
|
||||
include::basic_types.adoc[]
|
||||
include::embeddables.adoc[]
|
||||
include::entity.adoc[]
|
||||
include::access.adoc[]
|
||||
include::identifiers.adoc[]
|
||||
include::associations.adoc[]
|
||||
include::collections.adoc[]
|
||||
include::natural_id.adoc[]
|
||||
include::dynamic_model.adoc[]
|
||||
include::inheritance.adoc[]
|
@ -0,0 +1,95 @@
|
||||
[[access]]
|
||||
==== Access strategies
|
||||
:sourcedir: extras
|
||||
|
||||
As a JPA provider, Hibernate can introspect both the entity attributes (instance fields) or the accessors (instance properties).
|
||||
By default, the placement of the `@Id` annotation gives the default access strategy.
|
||||
When placed on a field, Hibernate will assume field-based access.
|
||||
Place on the identifier getter, Hibernate will use property-based access.
|
||||
|
||||
Embeddable types inherit the access strategy from their parent entities.
|
||||
|
||||
[[field-based-access]]
|
||||
===== Field-based access
|
||||
|
||||
.Field-based access
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/access/SimpleEntityFieldAccess.java[]
|
||||
----
|
||||
====
|
||||
|
||||
When using field-based access, adding other entity-level methods is much more flexible because Hibernate won't consider those part of the persistence state.
|
||||
To exclude a field from being part of the entity persistent state, the field must be marked with the `@Transient` annotation.
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
Another advantage of using field-based access is that some entity attributes can be hidden from outside the entity.
|
||||
An example of such attribute is the entity `@Version` field, which must not be manipulated by the data access layer.
|
||||
With field-based access, we can simply omit the the getter and the setter for this version field, and Hibernate can still leverage the optimistic concurrency control mechanism.
|
||||
====
|
||||
|
||||
[[property-based-access]]
|
||||
===== Property-based access
|
||||
|
||||
.Property-based access
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/access/SimpleEntityPropertyAccess.java[]
|
||||
----
|
||||
====
|
||||
|
||||
When using property-based access, Hibernate uses the accessors for both reading and writing the entity state.
|
||||
Every other method that will be added to the entity (e.g. helper methods for synchronizing both ends of a bidirectional one-to-many association) will have to be marked with the `@Transient` annotation.
|
||||
|
||||
===== Overriding the default access strategy
|
||||
|
||||
The default access strategy mechanism can be overridden with the JPA `@Access` annotation.
|
||||
In the following example, the `@Version` attribute is accessed by its field and not by its getter, like the rest of entity attributes.
|
||||
|
||||
.Overriding access strategy
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/access/SimpleEntityPropertyAccessOverride.java[]
|
||||
----
|
||||
====
|
||||
|
||||
[[access-embeddable-types]]
|
||||
===== Embeddable types and access strategy
|
||||
|
||||
Because embeddables are managed by their owning entities, the access strategy is therefore inherited from the entity too.
|
||||
This applies to both simple embeddable types as well as for collection of embeddables.
|
||||
|
||||
The embeddable types can overrule the default implicit access strategy (inherited from the owning entity).
|
||||
In the following example, the embeddable uses property-based access, no matter what access strategy the owning entity is choosing:
|
||||
|
||||
.Embeddable with exclusive access strategy
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/access/EmbeddableAccessType.java[]
|
||||
----
|
||||
====
|
||||
|
||||
The owning entity can use field-based access, while the embeddable uses property-based access as it has chosen explicitly:
|
||||
|
||||
.Entity including a single embeddable type
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/access/EmbeddedAccessType.java[]
|
||||
----
|
||||
====
|
||||
|
||||
This works also for collection of embeddable types:
|
||||
|
||||
.Entity including a collection of embeddable types
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/access/ElementCollectionAccessType.java[]
|
||||
----
|
||||
====
|
@ -0,0 +1,371 @@
|
||||
[[associations]]
|
||||
=== Associations
|
||||
:sourcedir: extras
|
||||
|
||||
Associations describe how two or more entities form a relationship based on a database joining semantics.
|
||||
|
||||
[[associations-many-to-one]]
|
||||
==== `@ManyToOne`
|
||||
|
||||
`@ManyToOne` is the most common association, having a direct equivalent in the relational database as well (e.g. foreign key),
|
||||
and so it establishes a relationship between a child entity and a parent.
|
||||
|
||||
.`@ManyToOne` association
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/associations/ManyToOne.java[]
|
||||
----
|
||||
|
||||
[source,sql]
|
||||
----
|
||||
include::{sourcedir}/associations/ManyToOne.sql[]
|
||||
----
|
||||
====
|
||||
|
||||
Each entity has a lifecycle of its own. Once the `@ManyToOne` association is set, Hibernate will set the associated database foreign key column.
|
||||
|
||||
.`@ManyToOne` association lifecycle
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/associations/ManyToOneLifecycle.java[]
|
||||
----
|
||||
|
||||
[source,sql]
|
||||
----
|
||||
include::{sourcedir}/associations/ManyToOneLifecycle.sql[]
|
||||
----
|
||||
====
|
||||
|
||||
[[associations-one-to-many]]
|
||||
==== `@OneToMany`
|
||||
|
||||
The `@OneToMany` association links a parent entity with one or more child entities.
|
||||
If the `@OneToMany` doesn't have a mirroring `@ManyToOne` association on the child side, the `@OneToMany` association is unidirectional.
|
||||
If there is a `@ManyToOne` association on the child side, the `@OneToMany` association is bidirectional and the application developer can navigate this relationship from both ends.
|
||||
|
||||
[[associations-one-to-many-unidirectional]]
|
||||
===== Unidirectional `@OneToMany`
|
||||
|
||||
When using a unidirectional `@OneToMany` association, Hibernate resorts to using a link table between the two joining entities.
|
||||
|
||||
.Unidirectional `@OneToMany` association
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/associations/UnidirectionalOneToMany.java[]
|
||||
----
|
||||
|
||||
[source,sql]
|
||||
----
|
||||
include::{sourcedir}/associations/UnidirectionalOneToMany.sql[]
|
||||
----
|
||||
====
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
The `@OneToMany` association is by definition a parent association, even if it's a unidirectional or a bidirectional one.
|
||||
Only the parent side of an association makes sense to cascade its entity state transitions to children.
|
||||
====
|
||||
|
||||
.Cascading `@OneToMany` association
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/associations/UnidirectionalOneToManyLifecycle.java[]
|
||||
----
|
||||
|
||||
[source,sql]
|
||||
----
|
||||
include::{sourcedir}/associations/UnidirectionalOneToManyLifecycle.sql[]
|
||||
----
|
||||
====
|
||||
|
||||
When persisting the `Person` entity, the cascade will propagate the persist operation to the underlying `Phone` children as well.
|
||||
Upon removing a `Phone` from the phones collection, the association row is deleted from the link table, and the `orphanRemoval` attribute will trigger a `Phone` removal as well.
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
The unidirectional associations are not very efficient when it comes to removing child entities.
|
||||
In this particular example, upon flushing the persistence context, Hibernate deletes all database child entries and reinserts the ones that are still found in the in-memory persistence context.
|
||||
|
||||
On the other hand, a bidirectional `@OneToMany` association is much more efficient because the child entity controls the association.
|
||||
====
|
||||
|
||||
[[associations-one-to-many-bidirectional]]
|
||||
===== Bidirectional `@OneToMany`
|
||||
|
||||
The bidirectional `@OneToMany` association also requires a `@ManyToOne` association on the child side.
|
||||
Although the Domain Model exposes two sides to navigate this association, behind the scenes, the relational database has only one foreign key for this relationship.
|
||||
|
||||
Every bidirectional association must have one owning side only (the child side), the other one being referred to as the _inverse_ (or the `mappedBy`) side.
|
||||
|
||||
.`@OneToMany` association mappedBy the `@ManyToOne` side
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/associations/BidirectionalOneToMany.java[]
|
||||
----
|
||||
|
||||
[source,sql]
|
||||
----
|
||||
include::{sourcedir}/associations/BidirectionalOneToMany.sql[]
|
||||
----
|
||||
====
|
||||
|
||||
[IMPORTANT]
|
||||
====
|
||||
Whenever a bidirectional association is formed, the application developer must make sure both sides are in-sync at all times.
|
||||
The `addPhone()` and `removePhone()` are utilities methods that synchronize both ends whenever a child element is added or removed.
|
||||
====
|
||||
|
||||
Because the `Phone` class has a `@NaturalId` column (the phone number being unique),
|
||||
the `equals()` and the `hashCode()` can make use of this property, and so the `removePhone()` logic is reduced to the `remove()` Java `Collection` method.
|
||||
|
||||
.Bidirectional `@OneToMany` with an owner `@ManyToOne` side lifecycle
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/associations/BidirectionalOneToManyLifecycle.java[]
|
||||
----
|
||||
|
||||
[source,sql]
|
||||
----
|
||||
include::{sourcedir}/associations/BidirectionalOneToManyLifecycle.sql[]
|
||||
----
|
||||
====
|
||||
|
||||
Unlike the unidirectional `@OneToMany`, the bidirectional association is much more efficient when managing the collection persistence state.
|
||||
Every element removal only requires a single update (in which the foreign key column is set to `NULL`), and,
|
||||
if the child entity lifecycle is bound to its owning parent so that the child cannot exist without its parent,
|
||||
then we can annotate the association with the `orphan-removal` attribute and disassociating the child will trigger a delete statement on the actual child table row as well.
|
||||
|
||||
[[associations-one-to-one]]
|
||||
==== `@OneToOne`
|
||||
|
||||
The `@OneToOne` association can either be unidirectional or bidirectional.
|
||||
A unidirectional association follows the relational database foreign key semantics, the client-side owning the relationship.
|
||||
A bidirectional association features a `mappedBy` `@OneToOne` parent side too.
|
||||
|
||||
[[associations-one-to-one-unidirectional]]
|
||||
===== Unidirectional `@OneToOne`
|
||||
|
||||
.Unidirectional `@OneToOne`
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/associations/UnidirectionalOneToOne.java[]
|
||||
----
|
||||
|
||||
[source,sql]
|
||||
----
|
||||
include::{sourcedir}/associations/UnidirectionalOneToOne.sql[]
|
||||
----
|
||||
====
|
||||
|
||||
From a relational database point of view, the underlying schema is identical to the unidirectional `@ManyToOne` association,
|
||||
as the client-side controls the relationship based on the foreign key column.
|
||||
|
||||
But then, it's unusual to consider the `Phone` as a client-side and the `PhoneDetails` as the parent-side because the details cannot exist without an actual phone.
|
||||
A much more natural mapping would be if the `Phone` was the parent-side, therefore pushing the foreign key into the `PhoneDetails` table.
|
||||
This mapping requires a bidirectional `@OneToOne` association as you can see in the following example:
|
||||
|
||||
[[associations-one-to-one-bidirectional]]
|
||||
===== Bidirectional `@OneToOne`
|
||||
|
||||
.Bidirectional `@OneToOne`
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/associations/BidirectionalOneToOne.java[]
|
||||
----
|
||||
|
||||
[source,sql]
|
||||
----
|
||||
include::{sourcedir}/associations/BidirectionalOneToOne.sql[]
|
||||
----
|
||||
====
|
||||
|
||||
This time, the `PhoneDetails` owns the association, and, like any bidirectional association, the parent-side can propagate its lifecycle to the child-side through cascading.
|
||||
|
||||
.Bidirectional `@OneToOne` lifecycle
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/associations/BidirectionalOneToOneLifecycle.java[]
|
||||
----
|
||||
|
||||
[source,sql]
|
||||
----
|
||||
include::{sourcedir}/associations/BidirectionalOneToOneLifecycle.sql[]
|
||||
----
|
||||
====
|
||||
|
||||
When using a bidirectional `@OneToOne` association, Hibernate enforces the unique constraint upon fetching the child-side.
|
||||
If there are more than one children associated to the same parent, Hibernate will throw a constraint violation exception.
|
||||
|
||||
.Bidirectional `@OneToOne` unique constraint
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/associations/BidirectionalOneToOneConstraint.java[]
|
||||
----
|
||||
====
|
||||
|
||||
[[associations-many-to-many]]
|
||||
==== `@ManyToMany`
|
||||
|
||||
The `@ManyToMany` association requires a link table that joins two entities.
|
||||
Like the `@OneToMany` association, `@ManyToMany` can be a either unidirectional or bidirectional.
|
||||
|
||||
[[associations-many-to-many-unidirectional]]
|
||||
===== Unidirectional `@ManyToMany`
|
||||
|
||||
.Unidirectional `@ManyToMany`
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/associations/UnidirectionalManyToMany.java[]
|
||||
----
|
||||
|
||||
[source,sql]
|
||||
----
|
||||
include::{sourcedir}/associations/UnidirectionalManyToMany.sql[]
|
||||
----
|
||||
====
|
||||
|
||||
Just like with unidirectional `@OneToMany` associations, the link table is controlled by the owning side.
|
||||
|
||||
When an entity is removed from the `@ManyToMany` collection, Hibernate simply deletes the joining record in the link table.
|
||||
Unfortunately, this operation requires removing all entries associated to a given parent and recreating the ones that are listed in the current running persistent context.
|
||||
|
||||
.Unidirectional `@ManyToMany` lifecycle
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/associations/UnidirectionalManyToManyLifecycle.java[]
|
||||
----
|
||||
|
||||
[source,sql]
|
||||
----
|
||||
include::{sourcedir}/associations/UnidirectionalManyToManyLifecycle.sql[]
|
||||
----
|
||||
====
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
For `@ManyToMany` associations, the `REMOVE` entity state transition doesn't make sense to be cascaded because it will propagate beyond the link table.
|
||||
Since the other side might be referenced by other entities on the parent-side, the automatic removal might end up in a `ConstraintViolationException`.
|
||||
|
||||
For example, if `@ManyToMany(cascade = CascadeType.ALL)` was defined and the first person would be deleted,
|
||||
Hibernate would throw an exception because another person is still associated to the address that's being deleted.
|
||||
|
||||
[source,java]
|
||||
----
|
||||
Person person1 = entityManager.find(Person.class, personId);
|
||||
entityManager.remove(person1);
|
||||
|
||||
Caused by: javax.persistence.PersistenceException: org.hibernate.exception.ConstraintViolationException: could not execute statement
|
||||
Caused by: org.hibernate.exception.ConstraintViolationException: could not execute statement
|
||||
Caused by: java.sql.SQLIntegrityConstraintViolationException: integrity constraint violation: foreign key no action; FKM7J0BNABH2YR0PE99IL1D066U table: PERSON_ADDRESS
|
||||
----
|
||||
====
|
||||
|
||||
By simply removing the parent-side, Hibernate can safely remove the associated link records as you can see in the following example:
|
||||
|
||||
.Unidirectional `@ManyToMany` entity removal
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/associations/UnidirectionalManyToManyRemove.java[]
|
||||
----
|
||||
|
||||
[source,sql]
|
||||
----
|
||||
include::{sourcedir}/associations/UnidirectionalManyToManyRemove.sql[]
|
||||
----
|
||||
====
|
||||
|
||||
[[associations-many-to-many-bidirectional]]
|
||||
===== Bidirectional `@ManyToMany`
|
||||
|
||||
A bidirectional `@ManyToMany` association has an owning and a `mappedBy` side.
|
||||
To preserve synchronicity between both sides, it's good practice to provide helper methods for adding or removing child entities.
|
||||
|
||||
.Bidirectional `@ManyToMany`
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/associations/BidirectionalManyToMany.java[]
|
||||
----
|
||||
|
||||
[source,sql]
|
||||
----
|
||||
include::{sourcedir}/associations/BidirectionalManyToMany.sql[]
|
||||
----
|
||||
====
|
||||
|
||||
With the helper methods in place, the synchronicity management can be simplified, as you can see in the following example:
|
||||
|
||||
.Bidirectional `@ManyToMany` lifecycle
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/associations/BidirectionalManyToManyLifecycle.java[]
|
||||
----
|
||||
|
||||
[source,sql]
|
||||
----
|
||||
include::{sourcedir}/associations/BidirectionalManyToManyLifecycle.sql[]
|
||||
----
|
||||
====
|
||||
|
||||
If a bidirectional `@OneToMany` association performs better when removing or changing the order of child elements,
|
||||
the `@ManyToMany` relationship cannot benefit from such an optimization because the foreign key side is not in control.
|
||||
To overcome this limitation, the the link table must be directly exposed and the `@ManyToMany` association split into two bidirectional `@OneToMany` relationships.
|
||||
|
||||
[[associations-many-to-many-bidirectional-with-link-entity]]
|
||||
===== Bidirectional many-to-many with a link entity
|
||||
|
||||
To most natural `@ManyToMany` association follows the same logic employed by the database schema,
|
||||
and the link table has an associated entity which controls the relationship for both sides that need to be joined.
|
||||
|
||||
.Bidirectional many-to-many with link entity
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/associations/BidirectionalManyToManyWithLinkEntity.java[]
|
||||
----
|
||||
|
||||
[source,sql]
|
||||
----
|
||||
include::{sourcedir}/associations/BidirectionalManyToManyWithLinkEntity.sql[]
|
||||
----
|
||||
====
|
||||
|
||||
Both the `Person` and the `Address` have a` mappedBy` `@OneToMany` side, while the `PersonAddress` owns the `person` and the `address` `@ManyToOne` associations.
|
||||
Because this mapping is formed out of two bidirectional associations, the helper methods are even more relevant.
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
The aforementioned example uses a Hibernate specific mapping for the link entity since JPA doesn't allow building a composite identifier out of multiple `@ManyToOne` associations.
|
||||
For more details, see the <<chapters/domain/identifiers.adoc#identifiers-composite-associations,Composite identifiers - associations>> section.
|
||||
====
|
||||
|
||||
The entity state transitions are better managed than in the previous bidirectional `@ManyToMany` case.
|
||||
|
||||
.Bidirectional many-to-many with link entity lifecycle
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/associations/BidirectionalManyToManyWithLinkEntityLifecycle.java[]
|
||||
----
|
||||
|
||||
[source,sql]
|
||||
----
|
||||
include::{sourcedir}/associations/BidirectionalManyToManyWithLinkEntityLifecycle.sql[]
|
||||
----
|
||||
====
|
||||
|
||||
There is only one delete statement executed because, this time, the association is controlled by the `@ManyToOne` side which only has to monitor the state of the underlying foreign key relationship to trigger the right DML statement.
|
@ -0,0 +1,897 @@
|
||||
[[basic]]
|
||||
=== Basic Types
|
||||
:sourcedir: extras
|
||||
|
||||
Basic value types usually map a single database column, to a single, non-aggregated Java type.
|
||||
Hibernate provides a number of built-in basic types, which follow the natural mappings recommended by the JDBC specifications.
|
||||
|
||||
Internally Hibernate uses a registry of basic types when it needs to resolve a specific `org.hibernate.type.Type`.
|
||||
|
||||
[[basic-provided]]
|
||||
==== Hibernate-provided BasicTypes
|
||||
|
||||
.Standard BasicTypes
|
||||
[cols=",,,",options="header",]
|
||||
|=======================================================================================================================================================================================================================================================================================
|
||||
|Hibernate type (org.hibernate.type package) |JDBC type |Java type |BasicTypeRegistry key(s)
|
||||
|StringType |VARCHAR |java.lang.String |string, java.lang.String
|
||||
|MaterializedClob |CLOB |java.lang.String |materialized_clob
|
||||
|TextType |LONGVARCHAR |java.lang.String |text
|
||||
|CharacterType |CHAR |char, java.lang.Character |char, java.lang.Character
|
||||
|BooleanType |BIT |boolean, java.lang.Boolean |boolean, java.lang.Boolean
|
||||
|NumericBooleanType |INTEGER, 0 is false, 1 is true |boolean, java.lang.Boolean |numeric_boolean
|
||||
|YesNoType |CHAR, 'N'/'n' is false, 'Y'/'y' is true. The uppercase value is written to the database. |boolean, java.lang.Boolean |yes_no
|
||||
|TrueFalseType |CHAR, 'F'/'f' is false, 'T'/'t' is true. The uppercase value is written to the database. |boolean, java.lang.Boolean |true_false
|
||||
|ByteType |TINYINT |byte, java.lang.Byte |byte, java.lang.Byte
|
||||
|ShortType |SMALLINT |short, java.lang.Short |short, java.lang.Short
|
||||
|IntegerTypes |INTEGER |int, java.lang.Integer |int, java.lang.Integer
|
||||
|LongType |BIGINT |long, java.lang.Long |long, java.lang.Long
|
||||
|FloatType |FLOAT |float, java.lang.Float |float, java.lang.Float
|
||||
|DoubleType |DOUBLE |double, java.lang.Double |double, java.lang.Double
|
||||
|BigIntegerType |NUMERIC |java.math.BigInteger |big_integer, java.math.BigInteger
|
||||
|BigDecimalType |NUMERIC |java.math.BigDecimal |big_decimal, java.math.bigDecimal
|
||||
|TimestampType |TIMESTAMP |java.sql.Timestamp |timestamp, java.sql.Timestamp
|
||||
|TimeType |TIME |java.sql.Time |time, java.sql.Time
|
||||
|DateType |DATE |java.sql.Date |date, java.sql.Date
|
||||
|CalendarType |TIMESTAMP |java.util.Calendar |calendar, java.util.Calendar
|
||||
|CalendarDateType |DATE |java.util.Calendar |calendar_date
|
||||
|CalendarTimeType |TIME |java.util.Calendar |calendar_time
|
||||
|CurrencyType |java.util.Currency |VARCHAR |currency, java.util.Currency
|
||||
|LocaleType |VARCHAR |java.util.Locale |locale, java.utility.locale
|
||||
|TimeZoneType |VARCHAR, using the TimeZone ID |java.util.TimeZone |timezone, java.util.TimeZone
|
||||
|UrlType |VARCHAR |java.net.URL |url, java.net.URL
|
||||
|ClassType |VARCHAR (class FQN) |java.lang.Class |class, java.lang.Class
|
||||
|BlobType |BLOB |java.sql.Blob |blog, java.sql.Blob
|
||||
|ClobType |CLOB |java.sql.Clob |clob, java.sql.Clob
|
||||
|BinaryType |VARBINARY |byte[] |binary, byte[]
|
||||
|MaterializedBlobType |BLOB |byte[] |materized_blob
|
||||
|ImageType |LONGVARBINARY |byte[] |image
|
||||
|WrapperBinaryType |VARBINARY |java.lang.Byte[] |wrapper-binary, Byte[], java.lang.Byte[]
|
||||
|CharArrayType |VARCHAR |char[] |characters, char[]
|
||||
|CharacterArrayType |VARCHAR |java.lang.Character[] |wrapper-characters, Character[], java.lang.Character[]
|
||||
|UUIDBinaryType |BINARY |java.util.UUID |uuid-binary, java.util.UUID
|
||||
|UUIDCharType |CHAR, can also read VARCHAR |java.util.UUID |uuid-char
|
||||
|PostgresUUIDType |PostgreSQL UUID, through Types#OTHER, which complies to the PostgreSQL JDBC driver definition |java.util.UUID |pg-uuid
|
||||
|SerializableType |VARBINARY |implementors of java.lang.Serializable |Unlike the other value types, multiple instances of this type are registered. It is registered once under java.io.Serializable, and registered under the specific java.io.Serializable implementation class names.
|
||||
|StringNVarcharType |NVARCHAR |java.lang.String |nstring
|
||||
|NTextType |LONGNVARCHAR |java.lang.String |ntext
|
||||
|NClobType |NCLOB |java.sql.NClob |nclob, java.sql.NClob
|
||||
|MaterializedNClobType |NCLOB |java.lang.String |materialized_nclob
|
||||
|PrimitiveCharacterArrayNClobType |NCHAR |char[] |N/A
|
||||
|CharacterNCharType |NCHAR |java.lang.Character |ncharacter
|
||||
|CharacterArrayNClobType |NCLOB |java.lang.Character[] |N/A
|
||||
|=======================================================================================================================================================================================================================================================================================
|
||||
|
||||
.BasicTypes added by hibernate-java8
|
||||
[cols=",,,",options="header",]
|
||||
|=================================================================================================
|
||||
|Hibernate type (org.hibernate.type package) |JDBC type |Java type |BasicTypeRegistry key(s)
|
||||
|DurationType |BIGINT |java.time.Duration |Duration, java.time.Duration
|
||||
|InstantType |TIMESTAMP |java.time.Instant |Instant, java.time.Instant
|
||||
|LocalDateTimeType |TIMESTAMP |java.time.LocalDateTime |LocalDateTime, java.time.LocalDateTime
|
||||
|LocalDateType |DATE |java.time.LocalDate |LocalDate, java.time.LocalDate
|
||||
|LocalTimeType |TIME |java.time.LocalTime |LocalTime, java.time.LocalTime
|
||||
|OffsetDateTimeType |TIMESTAMP |java.time.OffsetDateTime |OffsetDateTime, java.time.OffsetDateTime
|
||||
|OffsetTimeType |TIME |java.time.OffsetTime |OffsetTime, java.time.OffsetTime
|
||||
|OffsetTimeType |TIMESTAMP |java.time.ZonedDateTime |ZonedDateTime, java.time.ZonedDateTime
|
||||
|=================================================================================================
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
To use these hibernate-java8 types just add the `hibernate-java8` dependency to your classpath and Hibernate will take care of the rest.
|
||||
See <<basic-datetime>> for more about Java 8 Date/Time types.
|
||||
====
|
||||
|
||||
These mappings are managed by a service inside Hibernate called the `org.hibernate.type.BasicTypeRegistry`, which essentially maintains a map of `org.hibernate.type.BasicType` (a `org.hibernate.type.Type` specialization) instances keyed by a name.
|
||||
That is the purpose of the "BasicTypeRegistry key(s)" column in the previous tables.
|
||||
|
||||
[[basic-annotation]]
|
||||
==== The `@Basic` annotation
|
||||
|
||||
Strictly speaking, a basic type is denoted with with the `javax.persistence.Basic` annotation.
|
||||
Generally speaking the `@Basic` annotation can be ignored, as it is assumed by default.
|
||||
Both of the following examples are ultimately the same.
|
||||
|
||||
.With `@Basic`
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/basic/ex1.java[]
|
||||
----
|
||||
====
|
||||
|
||||
.Without `@Basic`
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/basic/ex2.java[]
|
||||
----
|
||||
====
|
||||
|
||||
[TIP]
|
||||
====
|
||||
The JPA specification strictly limits the Java types that can be marked as basic to the following listing:
|
||||
|
||||
* Java primitive types (`boolean`, `int`, etc)
|
||||
* wrappers for the primitive types (`java.lang.Boolean`, `java.lang.Integer`, etc)
|
||||
* `java.lang.String`
|
||||
* `java.math.BigInteger`
|
||||
* `java.math.BigDecimal`
|
||||
* `java.util.Date`
|
||||
* `java.util.Calendar`
|
||||
* `java.sql.Date`
|
||||
* `java.sql.Time`
|
||||
* `java.sql.Timestamp`
|
||||
* `byte[]` or `Byte[]`
|
||||
* `char[]` or `Character[]`
|
||||
* `enums`
|
||||
* any other type that implements `Serializable` (JPA's "support" for `Serializable` types is to directly serialize their state to the database).
|
||||
|
||||
If provider portability is a concern, you should stick to just these basic types.
|
||||
Note that JPA 2.1 did add the notion of an `javax.persistence.AttributeConverter` to help alleviate some of these concerns; see <<basic-jpa-convert>> for more on this topic.
|
||||
====
|
||||
|
||||
The `@Basic` annotation defines 2 attributes.
|
||||
|
||||
`optional` - boolean (defaults to true):: Defines whether this attribute allows nulls.
|
||||
JPA defines this as "a hint", which essentially means that it effect is specifically required.
|
||||
As long as the type is not primitive, Hibernate takes this to mean that the underlying column should be `NULLABLE`.
|
||||
`fetch` - FetchType (defaults to EAGER):: Defines whether this attribute should be fetched eagerly or lazily.
|
||||
JPA says that EAGER is a requirement to the provider (Hibernate) that the value should be fetched when the owner is fetched, while LAZY is merely a hint that the value be fetched when the attribute is accessed.
|
||||
Hibernate ignores this setting for basic types unless you are using bytecode enhancement.
|
||||
See the <chapters/pc/BytecodeEnhancement.adoc#BytecodeEnhancement,BytecodeEnhancement>> for additional information on fetching and on bytecode enhancement.
|
||||
|
||||
==== The `@Column` annotation
|
||||
|
||||
JPA defines rules for implicitly determining the name of tables and columns.
|
||||
For a detailed discussion of implicit naming see <<naming.adoc#naming,Naming>>.
|
||||
|
||||
For basic type attributes, the implicit naming rule is that the column name is the same as the attribute name.
|
||||
If that implicit naming rule does not meet your requirements, you can explicitly tell Hibernate (and other providers) the column name to use.
|
||||
|
||||
.Explicit column naming
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/basic/ExplicitColumnNaming.java[]
|
||||
----
|
||||
====
|
||||
|
||||
Here we use `@Column` to explicitly map the `description` attribute to the `NOTES` column, as opposed to the implicit column name `description`.
|
||||
|
||||
The `@Column` annotation defines other mapping information as well. See its javadocs for details.
|
||||
|
||||
[[basic-registry]]
|
||||
==== BasicTypeRegistry
|
||||
|
||||
We said before that a Hibernate type is not a Java type, nor a SQL type, but that it understands both and performs the marshalling between them.
|
||||
But looking at the basic type mappings from the previous examples,
|
||||
how did Hibernate know to use its `org.hibernate.type.StringType` for mapping for `java.lang.String` attributes,
|
||||
or its `org.hibernate.type.IntegerType` for mapping `java.lang.Integer` attributes?
|
||||
|
||||
The answer lies in a service inside Hibernate called the `org.hibernate.type.BasicTypeRegistry`, which essentially maintains a map of `org.hibernate.type.BasicType` (a `org.hibernate.type.Type` specialization) instances keyed by a name.
|
||||
|
||||
We will see later, in the <<basic-explicit>> section, that we can explicitly tell Hibernate which BasicType to use for a particular attribute.
|
||||
But first let's explore how implicit resolution works and how applications can adjust implicit resolution.
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
A thorough discussion of the `BasicTypeRegistry` and all the different ways to contribute types to it is beyond the scope of this documentation.
|
||||
Please see Integrations Guide for complete details.
|
||||
====
|
||||
|
||||
As an example, take a String attribute such as we saw before with Product#sku.
|
||||
Since there was no explicit type mapping, Hibernate looks to the `BasicTypeRegistry` to find the registered mapping for `java.lang.String`.
|
||||
This goes back to the "BasicTypeRegistry key(s)" column we saw in the tables at the start of this chapter.
|
||||
|
||||
As a baseline within `BasicTypeRegistry`, Hibernate follows the recommended mappings of JDBC for Java types.
|
||||
JDBC recommends mapping Strings to VARCHAR, which is the exact mapping that `StringType` handles.
|
||||
So that is the baseline mapping within `BasicTypeRegistry` for Strings.
|
||||
|
||||
Applications can also extend (add new `BasicType` registrations) or override (replace an exiting `BasicType` registration) using one of the
|
||||
`MetadataBuilder#applyBasicType` methods or the `MetadataBuilder#applyTypes` method during bootstrap.
|
||||
For more details, see <<basic-custom>> section.
|
||||
|
||||
[[basic-explicit]]
|
||||
==== Explicit BasicTypes
|
||||
|
||||
Sometimes you want a particular attribute to be handled differently.
|
||||
Occasionally Hibernate will implicitly pick a `BasicType` that you do not want (and for some reason you do not want to adjust the `BasicTypeRegistry`).
|
||||
|
||||
In these cases you must explicitly tell Hibernate the `BasicType` to use, via the `org.hibernate.annotations.Type` annotation.
|
||||
|
||||
.Using `@org.hibernate.annotations.Type`
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/basic/explicitType.java[]
|
||||
----
|
||||
====
|
||||
|
||||
This tells Hibernate to store the Strings as nationalized data.
|
||||
This is just for illustration purposes; for better ways to indicate nationalized character data see <<basic-nationalized>> section.
|
||||
|
||||
Additionally the description is to be handled as a LOB. Again, for better ways to indicate LOBs see <<basic-lob>> section.
|
||||
|
||||
The `org.hibernate.annotations.Type#type` attribute can name any of the following:
|
||||
|
||||
* Fully qualified name of any `org.hibernate.type.Type` implementation
|
||||
* Any key registered with `BasicTypeRegistry`
|
||||
* The name of any known "type definitions"
|
||||
|
||||
[[basic-custom]]
|
||||
==== Custom BasicTypes
|
||||
|
||||
Hibernate makes it relatively easy for developers to create their own basic type mappings type.
|
||||
For example, you might want to persist properties of type `java.lang.BigInteger` to `VARCHAR` columns, or support completely new types.
|
||||
|
||||
There are two approaches to developing a custom BasicType.
|
||||
As a means of illustrating the different approaches, let's consider a use case where we need to support a class called `Fizzywig` from a third party library.
|
||||
Let's assume that Fizzywig naturally stores as a VARCHAR.
|
||||
|
||||
The first approach is to directly implement the BasicType interface.
|
||||
|
||||
.Custom BasicType implementation
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/basic/FizzywigType1.java[]
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/basic/FizzywigType1_reg.java[]
|
||||
----
|
||||
====
|
||||
|
||||
The second approach is to implement the UserType interface.
|
||||
|
||||
.Custom UserType implementation
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/basic/FizzywigType2.java[]
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/basic/FizzywigType2_reg.java[]
|
||||
----
|
||||
====
|
||||
|
||||
For additional information on developing and registering custom types, see the Hibernate Integration Guide.
|
||||
|
||||
[[basic-enums]]
|
||||
==== Mapping enums
|
||||
|
||||
Hibernate supports the mapping of Java enums as basic value types in a number of different ways.
|
||||
|
||||
==== @Enumerated
|
||||
|
||||
The original JPA-compliant way to map enums was via the `@Enumerated` and `@MapKeyEnumerated` for map keys annotations which works on the principle that the enum values are stored according to one of 2 strategies indicated by `javax.persistence.EnumType`:
|
||||
|
||||
* `ORDINAL` - stored according to the enum value's ordinal position within the enum class, as indicated by java.lang.Enum#ordinal
|
||||
* `STRING` - stored according to the enum value's name, as indicated by java.lang.Enum#name
|
||||
|
||||
.`@Enumerated(ORDINAL)` example
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/basic/EnumeratedOrdinal.java[]
|
||||
----
|
||||
====
|
||||
|
||||
In the ORDINAL example, the gender column is defined as an (nullable) INTEGER type and would hold:
|
||||
|
||||
* `NULL` - null
|
||||
* `0` - MALE
|
||||
* `1` - FEMALE
|
||||
|
||||
.`@Enumerated(STRING)` example
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/basic/EnumeratedString.java[]
|
||||
----
|
||||
====
|
||||
|
||||
In the STRING example, the gender column is defined as an (nullable) VARCHAR type and would hold:
|
||||
|
||||
* `NULL` - null
|
||||
* `MALE` - MALE
|
||||
* `FEMALE` - FEMALE
|
||||
|
||||
[[basic-attribute-converter]]
|
||||
==== AttributeConverter
|
||||
|
||||
You can also map enums in a JPA compliant way using a JPA 2.1 AttributeConverter.
|
||||
Let's revisit the Gender enum example, but instead we want to store the more standardized `'M'` and `'F'` codes.
|
||||
|
||||
.Enum mapping with AttributeConverter example
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/basic/EnumAttributeConverter.java[]
|
||||
----
|
||||
====
|
||||
|
||||
Here, the gender column is defined as a CHAR type and would hold:
|
||||
|
||||
* `NULL` - null
|
||||
* `'M'` - MALE
|
||||
* `'F'` - FEMALE
|
||||
|
||||
For additional details on using AttributeConverters, see <<basic-jpa-convert>> section.
|
||||
|
||||
Note that JPA explicitly disallows the use of an AttributeConverter with an attribute marked as `@Enumerated`.
|
||||
So if using the AttributeConverter approach, be sure to not mark the attribute as `@Enumerated`.
|
||||
|
||||
==== Custom type
|
||||
|
||||
You can also map enums using a Hibernate custom type mapping.
|
||||
Let's again revisit the Gender enum example, this time using a custom Type to store the more standardized `'M'` and `'F'` codes.
|
||||
|
||||
.Enum mapping with custom Type example
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/basic/EnumCustomType.java[]
|
||||
----
|
||||
====
|
||||
|
||||
Again, the gender column is defined as a CHAR type and would hold:
|
||||
|
||||
* `NULL` - null
|
||||
* `'M'` - MALE
|
||||
* `'F'` - FEMALE
|
||||
|
||||
For additional details on using custom types, see <<basic-custom>> section..
|
||||
|
||||
[[basic-lob]]
|
||||
==== Mapping LOBs
|
||||
|
||||
Mapping LOBs (database Large Objects) come in 2 forms, those using the JDBC locator types and those materializing the LOB data.
|
||||
|
||||
JDBC LOB locators exist to allow efficient access to the LOB data.
|
||||
They allow the JDBC driver to stream parts of the LOB data as needed, potentially freeing up memory space.
|
||||
However they can be unnatural to deal with and have certain limitations.
|
||||
For example, a LOB locator is only portably valid during the duration of the transaction in which it was obtained.
|
||||
|
||||
The idea of materialized LOBs is to trade-off the potential efficiency (not all drivers handle LOB data efficiently) for a more natural programming paradigm using familiar Java types such as String or byte[], etc for these LOBs.
|
||||
|
||||
Materialized deals with the entire LOB contents in memory, whereas LOB locators (in theory) allow streaming parts of the LOB contents into memory as needed.
|
||||
|
||||
The JDBC LOB locator types include:
|
||||
|
||||
* `java.sql.Blob`
|
||||
* `java.sql.Clob`
|
||||
* `java.sql.NClob`
|
||||
|
||||
Mapping materialized forms of these LOB values would use more familiar Java types such as String, char[], byte[], etc.
|
||||
The trade off for "more familiar" is usually performance.
|
||||
|
||||
For a first look, let's assume we have a CLOB column that we would like to map (NCLOB character LOB data will be covered in <<basic-nationalized>> section.
|
||||
|
||||
.CLOB - SQL
|
||||
====
|
||||
[source,sql]
|
||||
----
|
||||
include::{sourcedir}/basic/Clob.sql[]
|
||||
----
|
||||
====
|
||||
|
||||
Let's first map this using the JDBC locator.
|
||||
|
||||
.CLOB - locator mapping
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/basic/ClobLocator.java[]
|
||||
----
|
||||
====
|
||||
|
||||
We could also map a materialized form.
|
||||
|
||||
.CLOB - materialized mapping
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/basic/ClobMaterialized.java[]
|
||||
----
|
||||
====
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
How JDBC deals with LOB data varies from driver to driver.
|
||||
Hibernate tries to handle all these variances for you.
|
||||
However some drivers do not allow Hibernate to always do that in an automatic fashion (looking directly at you PostgreSQL JDBC drivers).
|
||||
In such cases you may have to do some extra to get LOBs working. Such discussions are beyond the scope of this guide however.
|
||||
====
|
||||
|
||||
We might even want the materialized data as a char array (for some crazy reason).
|
||||
|
||||
.CLOB - materialized char[] mapping
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/basic/ClobMaterializedCharArray.java[]
|
||||
----
|
||||
====
|
||||
|
||||
We'd map BLOB data in a similar fashion.
|
||||
|
||||
.BLOB - SQL
|
||||
====
|
||||
[source,sql]
|
||||
----
|
||||
include::{sourcedir}/basic/Blob.sql[]
|
||||
----
|
||||
====
|
||||
|
||||
Let's first map this using the JDBC locator.
|
||||
|
||||
.BLOB - locator mapping
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/basic/BlobLocator.java[]
|
||||
----
|
||||
====
|
||||
|
||||
We could also map a materialized BLOB form.
|
||||
|
||||
.BLOB - materialized mapping
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/basic/BlobMaterialized.java[]
|
||||
----
|
||||
====
|
||||
|
||||
[[basic-nationalized]]
|
||||
==== Mapping Nationalized Character Data
|
||||
|
||||
JDBC 4 added the ability to explicitly handle nationalized character data.
|
||||
To this end it added specific nationalized character data types.
|
||||
|
||||
* `NCHAR`
|
||||
* `NVARCHAR`
|
||||
* `LONGNVARCHAR`
|
||||
* `NCLOB`
|
||||
|
||||
To map a specific attribute to a nationalized variant data type, Hibernate defines the `@Nationalized` annotation.
|
||||
|
||||
.NVARCHAR mapping
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/basic/NVARCHAR.java[]
|
||||
----
|
||||
====
|
||||
|
||||
.NCLOB (locator) mapping
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/basic/NCLOB_locator.java[]
|
||||
----
|
||||
====
|
||||
|
||||
.NCLOB (materialized) mapping
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/basic/NCLOB_materialized.java[]
|
||||
----
|
||||
====
|
||||
|
||||
If you application and database are entirely nationalized you may instead want to enable nationalized character data as the default.
|
||||
You can do this via the `hibernate.use_nationalized_character_data` setting or by calling `MetadataBuilder#enableGlobalNationalizedCharacterDataSupport` during bootstrap.
|
||||
|
||||
[[basic-uuid]]
|
||||
==== Mapping UUID Values
|
||||
|
||||
Hibernate also allows you to map UUID values, again in a number of ways.
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
The default UUID mapping is as binary because it represents more efficient storage.
|
||||
However many applications prefer the readability of character storage.
|
||||
To switch the default mapping, simply call `MetadataBuilder.applyBasicType( UUIDCharType.INSTANCE, UUID.class.getName() )`
|
||||
====
|
||||
|
||||
==== UUID as binary
|
||||
|
||||
As mentioned, the default mapping for UUID attributes.
|
||||
Maps the UUID to a `byte[]` using `java.util.UUID#getMostSignificantBits` and `java.util.UUID#getLeastSignificantBits` and stores that as BINARY data.
|
||||
|
||||
Chosen as the default simply because it is generally more efficient from storage perspective.
|
||||
|
||||
==== UUID as (var)char
|
||||
|
||||
Maps the UUID to a String using `java.util.UUID#toString` and `java.util.UUID#fromString` and stores that as CHAR or VARCHAR data.
|
||||
|
||||
==== PostgeSQL-specific UUID
|
||||
|
||||
[IMPORTANT]
|
||||
====
|
||||
When using one of the PostgreSQL Dialects, this becomes the default UUID mapping
|
||||
====
|
||||
|
||||
Maps the UUID using PostgreSQL's specific UUID data type.
|
||||
The PostgreSQL JDBC driver chooses to map its UUID type to the `OTHER` code.
|
||||
Note that this can cause difficulty as the driver chooses to map many different data types to OTHER.
|
||||
|
||||
==== UUID as identifier
|
||||
|
||||
Hibernate supports using UUID values as identifiers, and they can even be generated on user'sbehalf.
|
||||
For details see the discussion of generators in <<chapters/domain/identifiers.adoc#identifiers,_Identifier generators_>>
|
||||
|
||||
[[basic-datetime]]
|
||||
==== Mapping Date/Time Values
|
||||
|
||||
Hibernate allows various Java Date/Time classes to be mapped as persistent domain model entity properties.
|
||||
The SQL standard defines three Date/Time types:
|
||||
|
||||
DATE:: Represents a calendar date by storing years, months and days. The JDBC equivalent is `java.sql.Date`
|
||||
TIME:: Represents the time of a day and it stores hours, minutes and seconds. The JDBC equivalent is `java.sql.Time`
|
||||
TIMESTAMP:: It stores both a DATE and a TIME plus nanoseconds. The JDBC equivalent is `java.sql.Timestamp`
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
To avoid dependencies on the `java.sql` package, it's common to use the `java.util` or `java.time` Date/Time classes instead.
|
||||
====
|
||||
|
||||
While the `java.sql` classes define a direct association to the SQL Date/Time data types,
|
||||
the `java.util` or `java.time` properties need to explicitly mark the SQL type correlation with the `@Temporal` annotation.
|
||||
This way, a `java.util.Date` or a `java.util.Calendar` cn be mapped to either an SQL `DATE`, `TIME` or `TIMESTAMP` type.
|
||||
|
||||
Considering the following entity:
|
||||
|
||||
.`java.util.Date` mapping
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/basic/DateTemporal.java[]
|
||||
----
|
||||
====
|
||||
|
||||
When persisting such entity:
|
||||
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
DateEvent dateEvent = new DateEvent(new Date());
|
||||
entityManager.persist(dateEvent);
|
||||
----
|
||||
====
|
||||
|
||||
Hibernate generates the following INSERT statement:
|
||||
|
||||
====
|
||||
[source,sql]
|
||||
----
|
||||
INSERT INTO DateEvent
|
||||
( timestamp, id )
|
||||
VALUES ( '2015-12-29', 1 )
|
||||
----
|
||||
====
|
||||
|
||||
Only the year, month and the day field were saved into the the database.
|
||||
|
||||
If we change the `@Temporal` type to TIME:
|
||||
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
@Temporal(TemporalType.TIME)
|
||||
private Date timestamp;
|
||||
----
|
||||
====
|
||||
|
||||
Hibernate will issue an INSERT statement containing the hour, minutes and seconds.
|
||||
|
||||
====
|
||||
[source,sql]
|
||||
----
|
||||
INSERT INTO DateEvent
|
||||
( timestamp, id )
|
||||
VALUES ( '16:51:58', 1 )
|
||||
----
|
||||
====
|
||||
|
||||
When the the `@Temporal` type is set to TIMESTAMP:
|
||||
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
@Temporal(TemporalType.TIMESTAMP)
|
||||
private Date timestamp;
|
||||
----
|
||||
====
|
||||
|
||||
Hibernate will include both the DATE, the TIME and the nanoseconds in the INSERT statement:
|
||||
|
||||
====
|
||||
[source,sql]
|
||||
----
|
||||
INSERT INTO DateEvent
|
||||
( timestamp, id )
|
||||
VALUES ( '2015-12-29 16:54:04.544', 1 )
|
||||
----
|
||||
====
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
Just like the `java.util.Date`, the `java.util.Calendar` requires the `@Temporal` annotation in order to know what JDBC data type to be chosen: DATE, TIME or TIMESTAMP.
|
||||
If the `java.util.Date` marks a point in time, the `java.util.Calendar` takes into consideration the default Time Zone.
|
||||
====
|
||||
|
||||
[[basic-datetime-java8]]
|
||||
===== Mapping Java 8 Date/Time Values
|
||||
|
||||
Java 8 came with a new Date/Time API, offering support for instant dates, intervals, local and zoned Date/Time immutable instances, bundled in the `java.time` package.
|
||||
Hibernate added support for the new Date/Time API in a new module, which must be included with the following Maven dependency:
|
||||
|
||||
.`hibernate-java8` Maven dependency
|
||||
====
|
||||
[source,xml]
|
||||
----
|
||||
<dependency>
|
||||
<groupId>org.hibernate</groupId>
|
||||
<artifactId>hibernate-java8</artifactId>
|
||||
<version>${hibernate.version}</version>
|
||||
</dependency>
|
||||
----
|
||||
====
|
||||
|
||||
The mapping between the standard SQL Date/Time types and the supported Java 8 Date/Time class types looks as follows;
|
||||
|
||||
DATE:: `java.time.LocalDate`
|
||||
TIME:: `java.time.LocalTime`, `java.time.OffsetTime`
|
||||
TIMESTAMP:: `java.time.Instant`, `java.time.LocalDateTime`, `java.time.OffsetDateTime` and `java.time.ZonedDateTime`
|
||||
|
||||
[IMPORTANT]
|
||||
====
|
||||
Because the mapping between Java 8 Date/Time classes and the SQL types is implicit, there is not need to specify the `@Temporal` annotation.
|
||||
Setting it on the `java.time` classes throws the following exception:
|
||||
|
||||
----
|
||||
org.hibernate.AnnotationException: @Temporal should only be set on a java.util.Date or java.util.Calendar property
|
||||
----
|
||||
====
|
||||
|
||||
[[basic-jpa-convert]]
|
||||
==== JPA 2.1 AttributeConverters
|
||||
|
||||
Although Hibernate has long been offering <<basic-custom,custom types>>, as a JPA 2.1 provider,
|
||||
it also supports `AttributeConverter`s as well.
|
||||
|
||||
With a custom `AttributeConverter`, the application developer can map a given JDBC type to an entity basic type.
|
||||
|
||||
In the following example, the `java.util.Period` is going to be mapped to a `VARCHAR` database column.
|
||||
|
||||
.`java.util.Period` custom `AttributeConverter`
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/basic/PeriodStringConverter.java[]
|
||||
----
|
||||
====
|
||||
|
||||
To make use of this custom converter, the `@Convert` annotation must decorate the entity attribute.
|
||||
|
||||
.Entity using the custom `AttributeConverter`
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/basic/PeriodStringConvert.java[]
|
||||
----
|
||||
====
|
||||
|
||||
When persisting such entity, Hibernate will do the type conversion based on the `AttributeConverter` logic:
|
||||
|
||||
.Persisting entity using the custom `AttributeConverter`
|
||||
====
|
||||
[source,sql]
|
||||
----
|
||||
include::{sourcedir}/basic/PeriodStringConvert.sql[]
|
||||
----
|
||||
====
|
||||
|
||||
[[mapping-quoted-identifiers]]
|
||||
==== SQL quoted identifiers
|
||||
|
||||
You can force Hibernate to quote an identifier in the generated SQL by enclosing the table or column name in backticks in the mapping document.
|
||||
Hibernate will use the correct quotation style for the SQL `Dialect`.
|
||||
This is usually double quotes, but the SQL Server uses brackets and MySQL uses backticks.
|
||||
|
||||
.Quoting column names
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
@Entity @Table(name="`Line Item`")
|
||||
class LineItem {
|
||||
|
||||
@Id
|
||||
@Column(name="`Item Id`")
|
||||
private Integer id;
|
||||
|
||||
@Column(name="`Item #`")
|
||||
private Integer itemNumber
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
[[mapping-generated]]
|
||||
==== Generated properties
|
||||
|
||||
Generated properties are properties that have their values generated by the database.
|
||||
Typically, Hibernate applications needed to `refresh` objects that contain any properties for which the database was generating values.
|
||||
Marking properties as generated, however, lets the application delegate this responsibility to Hibernate.
|
||||
When Hibernate issues an SQL INSERT or UPDATE for an entity that has defined generated properties, it immediately issues a select afterwards to retrieve the generated values.
|
||||
|
||||
Properties marked as generated must additionally be _non-insertable_ and _non-updateable_.
|
||||
Only `@Version` and `@Basic` types can be marked as generated.
|
||||
|
||||
`never` (the default):: the given property value is not generated within the database.
|
||||
`insert`:: the given property value is generated on insert, but is not regenerated on subsequent updates. Properties like _creationTimestamp_ fall into this category.
|
||||
`always`:: the property value is generated both on insert and on update.
|
||||
|
||||
To mark a property as generated, use The Hibernate specific `@Generated` annotation.
|
||||
|
||||
[[mapping-column-read-and-write]]
|
||||
==== Column transformers: read and write expressions
|
||||
|
||||
Hibernate allows you to customize the SQL it uses to read and write the values of columns mapped to `@Basic` types.
|
||||
For example, if your database provides a set of data encryption functions, you can invoke them for individual columns like this:
|
||||
|
||||
.`@ColumnTransformer` example
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
@Entity
|
||||
class CreditCard {
|
||||
|
||||
@Id
|
||||
private Integer id;
|
||||
|
||||
@Column(name="credit_card_num")
|
||||
@ColumnTransformer(
|
||||
read="decrypt(credit_card_num)",
|
||||
write="encrypt(?)"
|
||||
)
|
||||
private String creditCardNumber;
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
You can use the plural form `@ColumnTransformers` if more than one columns need to define either of these rules.
|
||||
====
|
||||
|
||||
If a property uses more than one column, you must use the `forColumn` attribute to specify which column, the expressions are targeting.
|
||||
|
||||
.`@ColumnTransformer` `forColumn` attribute usage
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
@Entity
|
||||
class User {
|
||||
|
||||
@Id
|
||||
private Integer id;
|
||||
|
||||
@Type(type="com.acme.type.CreditCardType")
|
||||
@Columns( {
|
||||
@Column(name="credit_card_num"),
|
||||
@Column(name="exp_date")
|
||||
})
|
||||
@ColumnTransformer(
|
||||
forColumn="credit_card_num",
|
||||
read="decrypt(credit_card_num)",
|
||||
write="encrypt(?)"
|
||||
)
|
||||
private CreditCard creditCard;
|
||||
}
|
||||
----
|
||||
====
|
||||
|
||||
Hibernate applies the custom expressions automatically whenever the property is referenced in a query.
|
||||
This functionality is similar to a derived-property <<mapping-column-formula>> with two differences:
|
||||
|
||||
* The property is backed by one or more columns that are exported as part of automatic schema generation.
|
||||
* The property is read-write, not read-only.
|
||||
|
||||
The `write` expression, if specified, must contain exactly one '?' placeholder for the value.
|
||||
|
||||
[[mapping-column-formula]]
|
||||
==== Formula
|
||||
|
||||
Sometimes, you want the Database to do some computation for you rather than in the JVM, you might also create some kind of virtual column.
|
||||
You can use a SQL fragment (aka formula) instead of mapping a property into a column. This kind of property is read only (its value is calculated by your formula fragment)
|
||||
|
||||
.`@Formula` mapping usage
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
@Formula("obj_length * obj_height * obj_width")
|
||||
private long objectVolume;
|
||||
----
|
||||
====
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
The SQL fragment can be as complex as you want and even include subselects.
|
||||
====
|
||||
|
||||
[[mapping-column-any]]
|
||||
==== Any
|
||||
|
||||
There is one more type of property mapping.
|
||||
The `@Any` mapping defines a polymorphic association to classes from multiple tables.
|
||||
This type of mapping requires more than one column.
|
||||
The first column contains the type of the associated entity.
|
||||
The remaining columns contain the identifier.
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
It is impossible to specify a foreign key constraint for this kind of association.
|
||||
This is not the usual way of mapping polymorphic associations and you should use this only in special cases (e.g. audit logs, user session data, etc).
|
||||
====
|
||||
|
||||
The `@Any` annotation describes the column holding the metadata information.
|
||||
To link the value of the metadata information and an actual entity type, the `@AnyDef` and `@AnyDefs` annotations are used.
|
||||
The `metaType` attribute allows the application to specify a custom type that maps database column values to persistent classes that have identifier properties of the type specified by `idType`.
|
||||
You must specify the mapping from values of the metaType to class names.
|
||||
|
||||
.`@Any` mapping usage
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
@Any(
|
||||
metaColumn = @Column( name = "property_type" ),
|
||||
fetch=FetchType.EAGER
|
||||
)
|
||||
@AnyMetaDef(
|
||||
idType = "integer",
|
||||
metaType = "string",
|
||||
metaValues = {
|
||||
@MetaValue( value = "S", targetEntity = StringProperty.class ),
|
||||
@MetaValue( value = "I", targetEntity = IntegerProperty.class )
|
||||
}
|
||||
)
|
||||
@JoinColumn( name = "property_id" )
|
||||
private Property mainProperty;
|
||||
----
|
||||
====
|
||||
|
||||
Note that `@AnyDef` can be mutualized and reused. It is recommended to place it as a package metadata in this case.
|
||||
|
||||
.`@AnyMetaDef` mapping usage
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
//on a package
|
||||
@AnyMetaDef( name="property"
|
||||
idType = "integer",
|
||||
metaType = "string",
|
||||
metaValues = {
|
||||
@MetaValue( value = "S", targetEntity = StringProperty.class ),
|
||||
@MetaValue( value = "I", targetEntity = IntegerProperty.class )
|
||||
}
|
||||
)
|
||||
package org.hibernate.test.annotations.any;
|
||||
|
||||
//in a class
|
||||
@Any(
|
||||
metaDef="property",
|
||||
metaColumn = @Column( name = "property_type" ),
|
||||
fetch=FetchType.EAGER
|
||||
)
|
||||
@JoinColumn( name = "property_id" )
|
||||
private Property mainProperty;
|
||||
----
|
||||
====
|
||||
|
@ -0,0 +1,582 @@
|
||||
[[collections]]
|
||||
=== Collections
|
||||
:sourcedir: extras
|
||||
|
||||
Naturally Hibernate also allows to persist collections.
|
||||
These persistent collections can contain almost any other Hibernate type, including: basic types, custom types, components and references to other entities.
|
||||
In this context, the distinction between value and reference semantics is very important.
|
||||
An object in a collection might be handled with _value_ semantics (its life cycle being fully depends on the collection owner),
|
||||
or it might be a reference to another entity with its own life cycle.
|
||||
In the latter case, only the _link_ between the two objects is considered to be a state held by the collection.
|
||||
|
||||
The owner of the collection is always an entity, even if the collection is defined by an embeddable type.
|
||||
Collections form one/many-to-many associations between types, so there can be:
|
||||
|
||||
- value type collections
|
||||
- embeddable type collections
|
||||
- entity collections
|
||||
|
||||
Hibernate uses its own collection implementations which are enriched with lazy-loading, caching or state change detection semantics.
|
||||
For this reason, persistent collections must be declared as an interface type.
|
||||
The actual interface might be `java.util.Collection`, `java.util.List`, `java.util.Set`, `java.util.Map`, `java.util.SortedSet`, `java.util.SortedMap` or even other object types (meaning you will have to write an implementation of `org.hibernate.usertype.UserCollectionType`).
|
||||
|
||||
As the following example demonstrates, it's important to use the interface type and not the collection implementation, as declared in the entity mapping.
|
||||
|
||||
.Hibernate uses its own collection implementations
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/collections/CollectionProxy.java[]
|
||||
----
|
||||
====
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
It is important that collections be defined using the appropriate Java Collections Framework interface rather than a specific implementation.
|
||||
From a theoretical perspective, this just follows good design principles.
|
||||
From a practical perspective, Hibernate (like other persistence providers) will use their own collection implementations which conform to the Java Collections Framework interfaces.
|
||||
====
|
||||
|
||||
The persistent collections injected by Hibernate behave like `ArrayList`, `HashSet`, `TreeSet`, `HashMap` or `TreeMap`, depending on the interface type.
|
||||
|
||||
[[collections-synopsis]]
|
||||
==== Collections as a value type
|
||||
|
||||
Value and embeddable type collections have a similar behavior as simple value types because they are automatically persisted when referenced by a persistent object and automatically deleted when unreferenced.
|
||||
If a collection is passed from one persistent object to another, its elements might be moved from one table to another.
|
||||
|
||||
[IMPORTANT]
|
||||
====
|
||||
Two entities cannot share a reference to the same collection instance.
|
||||
Collection-valued properties do not support null value semantics because Hibernate does not distinguish between a null collection reference and an empty collection.
|
||||
====
|
||||
|
||||
[[collections-value]]
|
||||
==== Collections of value types
|
||||
|
||||
Collections of value type include basic and embeddable types.
|
||||
Collections cannot be nested, and, when used in collections, embeddable types are not allowed to define other collections.
|
||||
|
||||
For collections of value types, JPA 2.0 defines the `@ElementCollection` annotation.
|
||||
The lifecycle of the value-type collection is entirely controlled by its owning entity.
|
||||
|
||||
Considering the previous example mapping, when clearing the phone collection, Hibernate deletes all the associated phones.
|
||||
When adding a new element to the value type collection, Hibernate issues a new insert statement.
|
||||
|
||||
.Value type collection lifecycle
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/collections/ElementCollectionLifecycle.java[]
|
||||
----
|
||||
|
||||
[source,sql]
|
||||
----
|
||||
include::{sourcedir}/collections/ElementCollectionLifecycle.sql[]
|
||||
----
|
||||
====
|
||||
|
||||
If removing all elements or adding new ones is rather straightforward, removing a certain entry actually requires reconstructing the whole collection from scratch.
|
||||
|
||||
.Removing collection elements
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/collections/ElementCollectionLifecycleRemove.java[]
|
||||
----
|
||||
|
||||
[source,sql]
|
||||
----
|
||||
include::{sourcedir}/collections/ElementCollectionLifecycleRemove.sql[]
|
||||
----
|
||||
====
|
||||
|
||||
Depending on the number of elements, this behavior might not be efficient, if many elements need to be deleted and reinserted back into the database table.
|
||||
A workaround is to use an `@OrderColumn`, which, although not as efficient as when using the actual link table primary key, might improve the efficiency of the remove operations.
|
||||
|
||||
.Removing collection elements using the order column
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/collections/ElementCollectionOrderColumnLifecycleRemove.java[]
|
||||
----
|
||||
|
||||
[source,sql]
|
||||
----
|
||||
include::{sourcedir}/collections/ElementCollectionOrderColumnLifecycleRemove.sql[]
|
||||
----
|
||||
====
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
The `@OrderColumn` column works best when removing from the tail of the collection, as it only requires a single delete statement.
|
||||
Removing from the head or the middle of the collection requires deleting the extra elements and updating the remaining ones to preserve element order.
|
||||
====
|
||||
|
||||
Embeddable type collections behave the same way as value type collections.
|
||||
Adding embeddables to the collection triggers the associated insert statements and removing elements from the collection will generate delete statements.
|
||||
|
||||
.Embeddable type collections
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/collections/EmbeddableElementCollectionLifecycle.java[]
|
||||
----
|
||||
|
||||
[source,sql]
|
||||
----
|
||||
include::{sourcedir}/collections/EmbeddableElementCollectionLifecycle.sql[]
|
||||
----
|
||||
====
|
||||
|
||||
[[collections-entity]]
|
||||
==== Collections of entities
|
||||
|
||||
If value type collections can only form a one-to-many association between an owner entity and multiple basic or embeddable types,
|
||||
entity collections can represent both <<chapters/domain/associations.adoc#associations-one-to-many,@OneToMany>> and <<chapters/domain/associations.adoc#associations-many-to-many,@ManyToMany>> associations.
|
||||
|
||||
From a relational database perspective, associations are defined by the foreign key side (the child-side).
|
||||
With value type collections, only the entity can control the association (the parent-side), but for a collection of entities, both sides of the association are managed by the persistence context.
|
||||
|
||||
For ths reason, entity collections can be devised into two main categories: unidirectional and bidirectional associations.
|
||||
Unidirectional associations are very similar to value type collections, since only the parent side controls this relationship.
|
||||
Bidirectional associations are more tricky, since, even if sides need to be in-sync at all times, only one side is responsible for managing the association.
|
||||
A bidirectional association has an _owning_ side and an _inverse (mappedBy)_ side.
|
||||
|
||||
Another way of categorizing entity collections is by the underlying collection type, and so we can have:
|
||||
|
||||
* bags
|
||||
* indexed lists
|
||||
* sets
|
||||
* sorted sets
|
||||
* maps
|
||||
* sorted maps
|
||||
* arrays
|
||||
|
||||
In the following sections, we will go through all these collection types and discuss both unidirectional and bidirectional associations.
|
||||
|
||||
[[collections-bag]]
|
||||
==== Bags
|
||||
|
||||
Bags are unordered lists and we can have unidirectional bags or bidirectional ones.
|
||||
|
||||
[[collections-unidirectional-bag]]
|
||||
===== Unidirectional bags
|
||||
|
||||
The unidirectional bag is mapped using a single `@OneToMany` annotation on the parent side of the association.
|
||||
Behind the scenes, Hibernate requires an association table to manage the parent-child relationship, as we can see in the following example:
|
||||
|
||||
.Unidirectional bag
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/collections/UnidirectionalBag.java[]
|
||||
----
|
||||
|
||||
[source,sql]
|
||||
----
|
||||
include::{sourcedir}/collections/UnidirectionalBag.sql[]
|
||||
----
|
||||
====
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
Because both the parent and the child sides are entities, the persistence context manages each entity separately.
|
||||
Cascades can propagate an entity state transition from a parent entity to its children.
|
||||
====
|
||||
|
||||
By marking the parent side with the `CascadeType.ALL` attribute, the unidirectional association lifecycle becomes very similar to that of a value type collection.
|
||||
|
||||
.Unidirectional bag lifecycle
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/collections/UnidirectionalBagLifecyclePersist.java[]
|
||||
----
|
||||
|
||||
[source,sql]
|
||||
----
|
||||
include::{sourcedir}/collections/UnidirectionalBagLifecyclePersist.sql[]
|
||||
----
|
||||
====
|
||||
|
||||
In the example above, once the parent entity is persisted, the child entities are going to be persisted as well.
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
Just like value type collections, unidirectional bags are not as efficient when it comes to modifying the collection structure (removing or reshuffling elements).
|
||||
Because the parent-side cannot uniquely identify each individual child, Hibernate might delete all child table rows associate to the parent entity and re-add them according to the current collection state.
|
||||
====
|
||||
|
||||
[[collections-bidirectional-bag]]
|
||||
===== Bidirectional bags
|
||||
|
||||
The bidirectional bag is the most common type of entity collection.
|
||||
The `@ManyToOne` side is the owning side of the bidirectional bag association, while the `@OneToMany` is the _inverse_ side, being marked with the `mappedBy` attribute.
|
||||
|
||||
.Bidirectional bag
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/collections/BidirectionalBag.java[]
|
||||
----
|
||||
|
||||
[source,sql]
|
||||
----
|
||||
include::{sourcedir}/collections/BidirectionalBag.sql[]
|
||||
----
|
||||
====
|
||||
|
||||
.Bidirectional bag lifecycle
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/collections/BidirectionalBagLifecycle.java[]
|
||||
----
|
||||
|
||||
[source,sql]
|
||||
----
|
||||
include::{sourcedir}/collections/BidirectionalBagLifecycle.sql[]
|
||||
----
|
||||
====
|
||||
|
||||
.Bidirectional bag with orphan removal
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/collections/BidirectionalBagOrphanRemoval.java[]
|
||||
----
|
||||
====
|
||||
|
||||
When rerunning the previous example, the child will get removed because the parent-side propagates the removal upon disassociating the child entity reference.
|
||||
|
||||
[[collections-list]]
|
||||
==== Ordered Lists
|
||||
|
||||
Although they use the `List` interface on the Java side, bags don't retain element order.
|
||||
To preserve the collection element order, there are two possibilities:
|
||||
|
||||
`@OrderBy`:: the collection is ordered upon retrieval using a child entity property
|
||||
`@OrderColumn`:: the collection uses a dedicated order column in the collection link table
|
||||
|
||||
[[collections-unidirectional-ordered-list]]
|
||||
===== Unidirectional ordered lists
|
||||
|
||||
When using the `@OrderBy` annotation, the mapping looks as follows:
|
||||
|
||||
.Unidirectional `@OrderBy` list
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/collections/UnidirectionalOrderByList.java[]
|
||||
----
|
||||
====
|
||||
|
||||
The database mapping is the same as with the <<collections-unidirectional-bag>> example, so it won't be repeated.
|
||||
Upon fetching the collection, Hibernate generates the following select statement:
|
||||
|
||||
.Unidirectional `@OrderBy` list select statement
|
||||
====
|
||||
[source,sql]
|
||||
----
|
||||
include::{sourcedir}/collections/UnidirectionalOrderByListSelect.sql[]
|
||||
----
|
||||
====
|
||||
|
||||
The child table column is used to order the list elements.
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
The `@OrderBy` annotation can take multiple entity properties, and each property can take an ordering direction too (e.g. `@OrderBy("name ASC, type DESC")`).
|
||||
|
||||
If no property is specified (e.g. `@OrderBy`), the primary key of the child entity table is used for ordering.
|
||||
====
|
||||
|
||||
Another ordering option is to use the `@OrderColumn` annotation:
|
||||
|
||||
.Unidirectional `@OrderColumn` list
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/collections/UnidirectionalOrderColumnList.java[]
|
||||
----
|
||||
|
||||
[source,sql]
|
||||
----
|
||||
include::{sourcedir}/collections/UnidirectionalOrderColumnList.sql[]
|
||||
----
|
||||
====
|
||||
|
||||
This time, the link table takes the `order_id` column and uses it to materialize the collection element order.
|
||||
When fetching the list, the following select query is executed:
|
||||
|
||||
.Unidirectional `@OrderColumn` list select statement
|
||||
====
|
||||
[source,sql]
|
||||
----
|
||||
include::{sourcedir}/collections/UnidirectionalOrderColumnListSelect.sql[]
|
||||
----
|
||||
====
|
||||
|
||||
With the `order_id` column in place, Hibernate can order the list in-memory after it's being fetched from the database.
|
||||
|
||||
[[collections-bidirectional-ordered-list]]
|
||||
===== Bidirectional ordered lists
|
||||
|
||||
The mapping is similar with the <<collections-bidirectional-bag>> example, just that the parent side is going to be annotated with either `@OrderBy` or `@OrderColumn`.
|
||||
|
||||
.Bidirectional `@OrderBy` list
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/collections/BidirectionalOrderByList.java[]
|
||||
----
|
||||
====
|
||||
|
||||
Just like with the unidirectional `@OrderBy` list, the `number` column is used to order the statement on the SQL level.
|
||||
|
||||
When using the `@OrderColumn` annotation, the `order_id` column is going to be embedded in the child table:
|
||||
|
||||
.Bidirectional `@OrderColumn` list
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/collections/BidirectionalOrderColumnList.java[]
|
||||
----
|
||||
|
||||
[source,sql]
|
||||
----
|
||||
include::{sourcedir}/collections/BidirectionalOrderColumnList.sql[]
|
||||
----
|
||||
====
|
||||
|
||||
When fetching the collection, Hibernate will use the fetched ordered columns to sort the elements according to the `@OrderColumn` mapping.
|
||||
|
||||
[[collections-set]]
|
||||
==== Sets
|
||||
|
||||
Sets are collections that don't allow duplicate entries and Hibernate supports both the unordered `Set` and the natural-ordering `SortedSet`.
|
||||
|
||||
[[collections-unidirectional-set]]
|
||||
===== Unidirectional sets
|
||||
|
||||
The unidirectional set uses a link table to hold the parent-child associations and the entity mapping looks as follows:
|
||||
|
||||
.Unidirectional set
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/collections/UnidirectionalSet.java[]
|
||||
----
|
||||
====
|
||||
|
||||
The unidirectional set lifecycle is similar to that of the <<collections-unidirectional-bag>>, so it can be omitted.
|
||||
The only difference is that `Set` doesn't allow duplicates, but this constraint is enforced by the Java object contract rather then the database mapping.
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
When using sets, it's very important to supply proper equals/hashCode implementations for child entities.
|
||||
In the absence of a custom equals/hashCode implementation logic, Hibernate will use the default Java reference-based object equality which might render unexpected results when mixing detached and managed object instances.
|
||||
====
|
||||
|
||||
[[collections-bidirectional-set]]
|
||||
===== Bidirectional sets
|
||||
|
||||
Just like bidirectional bags, the bidirectional set doesn't use a link table, and the child table has a foreign key referencing the parent table primary key.
|
||||
The lifecycle is just like with bidirectional bags except for the duplicates which are filtered out.
|
||||
|
||||
.Bidirectional set
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/collections/BidirectionalSet.java[]
|
||||
----
|
||||
====
|
||||
|
||||
[[collections-sorted-set]]
|
||||
==== Sorted sets
|
||||
|
||||
For sorted sets, the entity mapping must use the `SortedSet` interface instead.
|
||||
According to the `SortedSet` contract, all elements must implement the comparable interface and therefore provide the sorting logic.
|
||||
|
||||
[[collections-unidirectional-sorted-set]]
|
||||
===== Unidirectional sorted sets
|
||||
|
||||
A `SortedSet` that relies on the natural sorting order given by the child element `Comparable` implementation logic must be annotated with the `@SortNatural` Hibernate annotation.
|
||||
|
||||
.Unidirectional natural sorted set
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/collections/UnidirectionalNaturalSortedSet.java[]
|
||||
----
|
||||
====
|
||||
|
||||
The lifecycle and the database mapping are identical to the <<collections-unidirectional-bag>>, so they are intentionally omitted.
|
||||
|
||||
To provide a custom sorting logic, Hibernate also provides a `@SortComparator` annotation:
|
||||
|
||||
.Unidirectional custom comparator sorted set
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/collections/UnidirectionalCustomSortedSet.java[]
|
||||
----
|
||||
====
|
||||
|
||||
[[collections-bidirectional-sorted-set]]
|
||||
===== Bidirectional sorted sets
|
||||
|
||||
The `@SortNatural` and `@SortComparator` work the same for bidirectional sorted sets too:
|
||||
|
||||
.Bidirectional natural sorted set
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/collections/BidirectionalSortedSet.java[]
|
||||
----
|
||||
====
|
||||
|
||||
[[collections-map]]
|
||||
==== Maps
|
||||
|
||||
A `java.util.Map` is ternary association because it required a parent entity a map key and a value.
|
||||
An entity can either be a map key or a map value, depending on the mapping.
|
||||
Hibernate allows using the following map keys:
|
||||
|
||||
`MapKeyColumn`:: for value type maps, the map key is a column in the link table that defines the grouping logic
|
||||
`MapKey`:: the map key is either the primary key or another property of the entity stored as a map entry value
|
||||
`MapKeyEnumerated`:: the map key is an `Enum` of the target child entity
|
||||
`MapKeyTemporal`:: the map key is a `Date` or a `Calendar` of the target child entity
|
||||
`MapKeyJoinColumn`:: the map key is a an entity mapped as an association in the child entity that's stored as a map entry key
|
||||
|
||||
[[collections-map-value-type]]
|
||||
===== Value type maps
|
||||
|
||||
A map of value type must use the `@ElementCollection` annotation, just like value type lists, bags or sets.
|
||||
|
||||
.Value type map with an entity as a map key
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/collections/ElementCollectionMap.java[]
|
||||
----
|
||||
|
||||
[source,sql]
|
||||
----
|
||||
include::{sourcedir}/collections/ElementCollectionMap.sql[]
|
||||
----
|
||||
====
|
||||
|
||||
Adding entries to the map generates the following SQL statements:
|
||||
|
||||
.Adding value type map entries
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/collections/ElementCollectionMapPersist.java[]
|
||||
----
|
||||
|
||||
[source,sql]
|
||||
----
|
||||
include::{sourcedir}/collections/ElementCollectionMapPersist.sql[]
|
||||
----
|
||||
====
|
||||
|
||||
[[collections-map-unidirectional]]
|
||||
===== Unidirectional maps
|
||||
|
||||
A unidirectional map exposes a parent-child association from the parent-side only.
|
||||
The following example shows a unidirectional map which also uses a `@MapKeyTemporal` annotation.
|
||||
The map key is a timestamp and it's taken from the child entity table.
|
||||
|
||||
.Unidirectional Map
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/collections/UnidirectionalMap.java[]
|
||||
----
|
||||
|
||||
[source,sql]
|
||||
----
|
||||
include::{sourcedir}/collections/UnidirectionalMap.sql[]
|
||||
----
|
||||
====
|
||||
|
||||
[[collections-map-bidirectional]]
|
||||
===== Bidirectional maps
|
||||
|
||||
Like most bidirectional associations, this relationship is owned by the child-side while the parent is the inverse side abd can propagate its own state transitions to the child entities.
|
||||
In the following example, you can see that `@MapKeyEnumerated` was used so that the `Phone` enumeration becomes the map key.
|
||||
|
||||
.Bidirectional Map
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/collections/BidirectionalMap.java[]
|
||||
----
|
||||
|
||||
[source,sql]
|
||||
----
|
||||
include::{sourcedir}/collections/BidirectionalMap.sql[]
|
||||
----
|
||||
====
|
||||
|
||||
[[collections-array]]
|
||||
==== Arrays
|
||||
|
||||
When it comes to arrays, there is quite a difference between Java arrays and relational database array types (e.g. VARRAY, ARRAY).
|
||||
First, not all database systems implement the SQL-99 ARRAY type, and, for this reason, Hibernate doesn't support native database array types.
|
||||
Second, Java arrays are relevant for basic types only, since storing multiple embeddables or entities should always be done using the Java Collection API.
|
||||
|
||||
[[collections-array-binary]]
|
||||
==== Arrays as binary
|
||||
|
||||
By default, Hibernate will choose a BINARY type, as supported by the current `Dialect`.
|
||||
|
||||
.Binary arrays
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/collections/BaseTypeBinaryArray.java[]
|
||||
----
|
||||
|
||||
[source,sql]
|
||||
----
|
||||
include::{sourcedir}/collections/BaseTypeBinaryArray.sql[]
|
||||
----
|
||||
====
|
||||
|
||||
[[collections-as-basic]]
|
||||
==== Collections as basic value type
|
||||
|
||||
Notice how all the previous examples explicitly mark the collection attribute as either `ElementCollection`, `OneToMany` or `ManyToMany`.
|
||||
Collections not marked as such require a custom Hibernate `Type` and the collection elements must be stored in a single database column.
|
||||
|
||||
This is sometimes beneficial. Consider a use-case such as a `VARCHAR` column that represents a delimited list/set of Strings.
|
||||
|
||||
.Comma delimited collection
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/collections/CommaDelimitedStringCollection.java[]
|
||||
----
|
||||
====
|
||||
|
||||
The developer can use the comma-delimited collection like any other collection we've discussed so far and Hibernate will take care of the type transformation part.
|
||||
The collection itself behaves like any other basic value type, as its lifecycle is bound to its owner entity.
|
||||
|
||||
.Comma delimited collection lifecycle
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/collections/CommaDelimitedStringCollectionLifecycle.java[]
|
||||
----
|
||||
|
||||
[source,sql]
|
||||
----
|
||||
include::{sourcedir}/collections/CommaDelimitedStringCollectionLifecycle.sql[]
|
||||
----
|
||||
====
|
||||
|
||||
See the Hibernate Integrations Guide for more details on developing custom value type mappings.
|
@ -0,0 +1,44 @@
|
||||
[[dynamic-model]]
|
||||
=== Dynamic Model
|
||||
:sourcedir: extras
|
||||
|
||||
[IMPORTANT]
|
||||
====
|
||||
JPA only acknowledges the entity model mapping, so if you are concerned about JPA provider portability it's best to stick to the strict POJO model.
|
||||
On the other hand, Hibernate can work with both POJO entities as well as with dynamic entity models.
|
||||
====
|
||||
|
||||
[[mapping-model-dynamic]]
|
||||
==== Dynamic mapping models
|
||||
|
||||
Persistent entities do not necessarily have to be represented as POJO/JavaBean classes.
|
||||
Hibernate also supports dynamic models (using `Map`s of `Map`s at runtime).
|
||||
With this approach, you do not write persistent classes, only mapping files.
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
The mapping of dynamic models is beyond the scope of this chapter.
|
||||
We will discuss using such models with Hibernate, in the <<mapping, next chapter>>.
|
||||
====
|
||||
|
||||
A given entity has just one entity mode within a given SessionFactory.
|
||||
This is a change from previous versions which allowed to define multiple entity modes for an entity and to select which to load.
|
||||
Entity modes can now be mixed within a domain model; a dynamic entity might reference a POJO entity, and vice versa.
|
||||
|
||||
.Working with Dynamic Domain Models
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/dynamic/listing10.java[]
|
||||
----
|
||||
====
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
The main advantage of dynamic models is quick turnaround time for prototyping without the need for entity class implementation.
|
||||
The main down-fall is that you lose compile-time type checking and will likely deal with many exceptions at runtime.
|
||||
However, as a result of the Hibernate mapping, the database schema can easily be normalized and sound, allowing to add a proper domain model implementation on top later on.
|
||||
|
||||
It is also interesting to note that dynamic models are great for certain integration use cases as well.
|
||||
Envers, for example, makes extensive use of dynamic models to represent the historical data.
|
||||
====
|
@ -0,0 +1,164 @@
|
||||
[[embeddables]]
|
||||
=== Embeddable types
|
||||
:sourcedir: extras
|
||||
|
||||
Historically Hibernate called these components.
|
||||
JPA calls them embeddables.
|
||||
Either way the concept is the same: a composition of values.
|
||||
For example we might have a Name class that is a composition of first-name and last-name, or an Address class that is a composition of street, city, postal code, etc.
|
||||
|
||||
.Usage of the word _embeddable_
|
||||
[NOTE]
|
||||
====
|
||||
To avoid any confusion with the annotation that marks a given embeddable type, the annotation will be further referred as `@Embeddable`.
|
||||
|
||||
Throughout this chapter and thereafter, for brevity sake, embeddable types may also be referred as _embeddable_.
|
||||
====
|
||||
|
||||
.Simple embeddable type example
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/embeddable/Name.java[]
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/embeddable/Address.java[]
|
||||
----
|
||||
====
|
||||
|
||||
An embeddable type is another form of value type, and its lifecycle is bound to a parent entity type, therefore inheriting the attribute access from its parent (for details on attribute access, see <<chapters/domain/entity.adoc#access-embeddable-types,Access strategies>>).
|
||||
|
||||
Embeddable types can be made up of basic values as well as associations, with the caveat that, when used as collection elements, they cannot define collections themselves.
|
||||
|
||||
==== Component / Embedded
|
||||
|
||||
Most often, embeddable types are used to group multiple basic type mappings and reuse them across several entities.
|
||||
|
||||
.Simple Embeddedable
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/embeddable/Person.java[]
|
||||
----
|
||||
====
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
JPA defines two terms for working with an embeddable type: `@Embeddable` and `@Embedded`.
|
||||
`@Embeddable` is used to describe the mapping type itself (e.g. `Name`).
|
||||
`@Embedded` is for referencing a given embeddable type (e.g. `person.name`).
|
||||
====
|
||||
|
||||
So, the embeddable type is represented by the `Name` class and the parent makes use of it through the `person.name` object composition.
|
||||
|
||||
.Person table
|
||||
====
|
||||
[source,sql]
|
||||
----
|
||||
include::{sourcedir}/embeddable/Person1.sql[]
|
||||
----
|
||||
====
|
||||
|
||||
The composed values are mapped to the same table as the parent table.
|
||||
Composition is part of good OO data modeling (idiomatic Java).
|
||||
In fact, that table could also be mapped by the following entity type instead.
|
||||
|
||||
.Alternative to embeddable type composition
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/embeddable/Person_alt.java[]
|
||||
----
|
||||
====
|
||||
|
||||
The composition form is certainly more Object-oriented, and that becomes more evident as we work with multiple embeddable types.
|
||||
|
||||
[[embeddable-multiple]]
|
||||
==== Multiple embeddable types
|
||||
|
||||
.Multiple embeddable types
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/embeddable/Contact.java[]
|
||||
----
|
||||
====
|
||||
|
||||
Although from an object-oriented perspective, it's much more convenient to work with embeddable types, this example doesn't work as-is.
|
||||
When the same embeddable type is included multiple times in the same parent entity type, the JPA specification demands setting the associated column names explicitly.
|
||||
|
||||
This requirement is due to how object properties are mapped to database columns.
|
||||
By default, JPA expects a database column having the same name with its associated object property.
|
||||
When including multiple embeddables, the implicit name-based mapping rule doesn't work anymore because multiple object properties could end-up being mapped to the same database column.
|
||||
|
||||
We have a few options to handle this issue.
|
||||
|
||||
[[embeddable-multiple-jpa]]
|
||||
==== JPA's AttributeOverride
|
||||
|
||||
JPA defines the `@AttributeOverride` annotation to handle this scenario.
|
||||
|
||||
.JPA's AttributeOverride
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/embeddable/Contact-AttributeOverride.java[]
|
||||
----
|
||||
====
|
||||
|
||||
This way, the mapping conflict is resolved by setting up explicit name-based property-column type mappings.
|
||||
|
||||
[[embeddable-multiple-namingstrategy]]
|
||||
==== ImplicitNamingStrategy
|
||||
|
||||
[IMPORTANT]
|
||||
====
|
||||
This is a Hibernate specific feature.
|
||||
Users concerned with JPA provider portability should instead prefer explicit column naming with <<embeddable-multiple-jpa,`@AttributeOverride`>>.
|
||||
====
|
||||
|
||||
Hibernate naming strategies are covered in detail in <<naming.adoc#naming,Naming>>.
|
||||
However, for the purposes of this discussion, Hibernate has the capability to interpret implicit column names in a way that is safe for use with multiple embeddable types.
|
||||
|
||||
.Enabling embeddable type safe implicit naming
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/embeddable/component-safe-implicit-naming.java[]
|
||||
----
|
||||
====
|
||||
|
||||
====
|
||||
[source,sql]
|
||||
----
|
||||
include::{sourcedir}/embeddable/Contact-ImplicitNamingStrategy.sql[]
|
||||
----
|
||||
====
|
||||
|
||||
Now the "path" to attributes are used in the implicit column naming.
|
||||
You could even develop your own to do special implicit naming.
|
||||
|
||||
[[embeddable-collections]]
|
||||
==== Collections of embeddable types
|
||||
|
||||
Collections of embeddable types are specifically value collections (as embeddable types are a value type).
|
||||
Value collections are covered in detail in <<collection.adoc#collections-value,Collections of value types>>.
|
||||
|
||||
[[embeddable-mapkey]]
|
||||
==== Embeddable types as Map key
|
||||
|
||||
Embeddable types can also be used as `Map` keys.
|
||||
This topic is converted in detail in <<collection.adoc#collections-map,Map - key>>.
|
||||
|
||||
[[embeddable-identifier]]
|
||||
==== Embeddable types as identifiers
|
||||
|
||||
Embeddable types can also be used as entity type identifiers.
|
||||
This usage is covered in detail in <chapters/domain/identifiers.adoc#identifiers-composite,Composite identifiers>>.
|
||||
|
||||
[IMPORTANT]
|
||||
====
|
||||
Embeddable types that are used as collection entries, map keys or entity type identifiers cannot include their own collection mappings.
|
||||
====
|
@ -0,0 +1,303 @@
|
||||
[[entity]]
|
||||
=== Entity types
|
||||
:sourcedir: extras
|
||||
|
||||
.Usage of the word _entity_
|
||||
[NOTE]
|
||||
====
|
||||
The entity type describes the mapping between the actual persistable domain model object and a database table row.
|
||||
To avoid any confusion with the annotation that marks a given entity type, the annotation will be further referred as `@Entity`.
|
||||
|
||||
Throughout this chapter and thereafter, entity types will be simply referred as _entity_.
|
||||
====
|
||||
|
||||
[[entity-pojo]]
|
||||
==== POJO Models
|
||||
|
||||
Section _2.1 The Entity Class_ of the _JPA 2.1 specification_ defines its requirements for an entity class.
|
||||
Applications that wish to remain portable across JPA providers should adhere to these requirements.
|
||||
|
||||
* The entity class must be annotated with the `javax.persistence.Entity` annotation (or be denoted as such in XML mapping)
|
||||
* The entity class must have a public or protected no-argument constructor. It may define additional constructors as well.
|
||||
* The entity class must be a top-level class.
|
||||
* An enum or interface may not be designated as an entity.
|
||||
* The entity class must not be final. No methods or persistent instance variables of the entity class may be final.
|
||||
* If an entity instance is to be used remotely as a detached object, the entity class must implement the `Serializable` interface.
|
||||
* Both abstract and concrete classes can be entities. Entities may extend non-entity classes as well as entity classes, and non-entity classes may extend entity classes.
|
||||
* The persistent state of an entity is represented by instance variables, which may correspond to JavaBean-style properties.
|
||||
An instance variable must be directly accessed only from within the methods of the entity by the entity instance itself.
|
||||
The state of the entity is available to clients only through the entity’s accessor methods (getter/setter methods) or other business methods.
|
||||
|
||||
Hibernate, however, is not as strict in its requirements. The differences from the list above include:
|
||||
|
||||
* The entity class must have a no-argument constructor, which may be public, protected or package visibility. It may define additional constructors as well.
|
||||
* The entity class _need not_ be a top-level class.
|
||||
* Technically Hibernate can persist final classes or classes with final persistent state accessor (getter/setter) methods.
|
||||
However, it is generally not a good idea as doing so will stop Hibernate from being able to generate proxies for lazy-loading the entity.
|
||||
* Hibernate does not restrict the application developer from exposing instance variables and reference them from outside the entity class itself.
|
||||
The validity of such a paradigm, however, is debatable at best.
|
||||
|
||||
Let's look at each requirement in detail.
|
||||
|
||||
[[entity-pojo-final]]
|
||||
==== Prefer non-final classes
|
||||
|
||||
A central feature of Hibernate is the ability to load lazily certain entity instance variables (attributes) via runtime proxies.
|
||||
This feature depends upon the entity class being non-final or else implementing an interface that declares all the attribute getters/setters.
|
||||
You can still persist final classes that do not implement such an interface with Hibernate,
|
||||
but you will not be able to use proxies for fetching lazy associations, therefore limiting your options for performance tuning.
|
||||
For the very same reason, you should also avoid declaring persistent attribute getters and setters as final.
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
Starting in 5.0 Hibernate offers a more robust version of bytecode enhancement as another means for handling lazy loading.
|
||||
Hibernate had some bytecode re-writing capabilities prior to 5.0 but they were very rudimentary.
|
||||
See the <chapters/pc/BytecodeEnhancement.adoc#BytecodeEnhancement,BytecodeEnhancement>> for additional information on fetching and on bytecode enhancement.
|
||||
====
|
||||
|
||||
[[entity-pojo-constructor]]
|
||||
==== Implement a no-argument constructor
|
||||
|
||||
The entity class should have a no-argument constructor. Both Hibernate and JPA require this.
|
||||
|
||||
JPA requires that this constructor be defined as public or protected.
|
||||
Hibernate, for the most part, does not care about the constructor visibility, as long as the system SecurityManager allows overriding the visibility setting.
|
||||
That said, the constructor should be defined with at least package visibility if you wish to leverage runtime proxy generation.
|
||||
|
||||
[[entity-pojo-accessors]]
|
||||
==== Declare getters and setters for persistent attributes
|
||||
|
||||
The JPA specification requires this, otherwise the model would prevent accessing the entity persistent state fields directly from outside the entity itself.
|
||||
|
||||
Although Hibernate does not require it, it is recommended to follow the JavaBean conventions and define getters and setters for entity persistent attributes.
|
||||
Nevertheless, you can still tell Hibernate to directly access the entity fields.
|
||||
|
||||
Attributes (whether fields or getters/setters) need not be declared public.
|
||||
Hibernate can deal with attributes declared with public, protected, package or private visibility.
|
||||
Again, if wanting to use runtime proxy generation for lazy loading, the getter/setter should grant access to at least package visibility.
|
||||
|
||||
[[entity-pojo-identifier]]
|
||||
==== Provide identifier attribute(s)
|
||||
|
||||
[IMPORTANT]
|
||||
====
|
||||
Historically this was considered optional.
|
||||
However, not defining identifier attribute(s) on the entity should be considered a deprecated feature that will be removed in an upcoming release.
|
||||
====
|
||||
|
||||
The identifier attribute does not necessarily need to be mapped to the column(s) that physically define the primary key.
|
||||
However, it should map to column(s) that can uniquely identify each row.
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
We recommend that you declare consistently-named identifier attributes on persistent classes and that you use a nullable (i.e., non-primitive) type.
|
||||
====
|
||||
|
||||
The placement of the `@Id` annotation marks the <<chapters/domain/access.adoc#access,persistence state access strategy>>.
|
||||
|
||||
.Identifier
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/entity/Identifier.java[]
|
||||
----
|
||||
====
|
||||
|
||||
Hibernate offers multiple identifier generation strategies, see the <<chapters/domain/identifiers.adoc#identifiers,Identifier Generators>> chapter for more about this topic.
|
||||
|
||||
[[entity-pojo-mapping]]
|
||||
==== Mapping the entity
|
||||
|
||||
The main piece in mapping the entity is the `javax.persistence.Entity` annotation.
|
||||
The `@Entity` annotation defines just one attribute `name` which is used to give a specific entity name for use in JPQL queries.
|
||||
By default, the entity name represents the unqualified name of the entity class itself.
|
||||
|
||||
.Simple `@Entity`
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/entity/SimpleEntity.java[]
|
||||
----
|
||||
====
|
||||
|
||||
An entity models a database table.
|
||||
The identifier uniquely identifies each row in that table.
|
||||
By default, the name of the table is assumed to be the same as the name of the entity.
|
||||
To explicitly give the name of the table or to specify other information about the table, we would use the `javax.persistence.Table` annotation.
|
||||
|
||||
.Simple `@Entity` with `@Table`
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/entity/SimpleEntityWithTable.java[]
|
||||
----
|
||||
====
|
||||
|
||||
[[mapping-model-pojo-equalshashcode]]
|
||||
==== Implementing `equals()` and `hashCode()`
|
||||
|
||||
[NOTE]
|
||||
====
|
||||
Much of the discussion in this section deals with the relation of an entity to a Hibernate Session, whether the entity is managed, transient or detached.
|
||||
If you are unfamiliar with these topics, they are explained in the <<chapters/pc/PersistenceContext.adoc#pc,Persistence Context>> chapter.
|
||||
====
|
||||
|
||||
Whether to implement `equals()` and `hashCode()` methods in your domain model, let alone how to implement them, is a surprisingly tricky discussion when it comes to ORM.
|
||||
|
||||
There is really just one absolute case: a class that acts as an identifier must implement equals/hashCode based on the id value(s).
|
||||
Generally this is pertinent for user-defined classes used as composite identifiers.
|
||||
Beyond this one very specific use case and few others we will discuss below, you may want to consider not implementing equals/hashCode altogether.
|
||||
|
||||
So what's all the fuss? Normally, most Java objects provide a built-in `equals()` and `hashCode()` based on the object's identity, so each new object will be different from all others.
|
||||
This is generally what you want in ordinary Java programming.
|
||||
Conceptually however this starts to break down when you start to think about the possibility of multiple instances of a class representing the same data.
|
||||
|
||||
This is, in fact, exactly the case when dealing with data coming from a database.
|
||||
Every time we load a specific `Person` from the database we would naturally get a unique instance.
|
||||
Hibernate, however, works hard to make sure that does not happen within a given `Session`.
|
||||
In fact Hibernate guarantees equivalence of persistent identity (database row) and Java identity inside a particular session scope.
|
||||
So if we ask a Hibernate `Session` to load that specific Person multiple times we will actually get back the same __instance__:
|
||||
|
||||
.Scope of identity
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/entity/listing1.java[]
|
||||
----
|
||||
====
|
||||
|
||||
Consider another example using a persistent `java.util.Set`:
|
||||
|
||||
.Set usage with Session-scoped identity
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/entity/listing3.java[]
|
||||
----
|
||||
====
|
||||
|
||||
However, the semantic changes when we mix instances loaded from different Sessions:
|
||||
|
||||
.Mixed Sessions
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/entity/listing2.java[]
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/entity/listing4.java[]
|
||||
----
|
||||
====
|
||||
|
||||
Specifically the outcome in this last example will depend on whether the `Person` class implemented equals/hashCode, and, if so, how.
|
||||
|
||||
Consider yet another case:
|
||||
|
||||
.Sets with transient entities
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/entity/listing5.java[]
|
||||
----
|
||||
====
|
||||
|
||||
In cases where you will be dealing with entities outside of a Session (whether they be transient or detached), especially in cases where you will be using them in Java collections,
|
||||
you should consider implementing equals/hashCode.
|
||||
|
||||
A common initial approach is to use the entity's identifier attribute as the basis for equals/hashCode calculations:
|
||||
|
||||
.Naive equals/hashCode implementation
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/entity/listing6.java[]
|
||||
----
|
||||
====
|
||||
|
||||
It turns out that this still breaks when adding transient instance of `Person` to a set as we saw in the last example:
|
||||
|
||||
.Still trouble
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/entity/listing7.java[]
|
||||
----
|
||||
====
|
||||
|
||||
The issue here is a conflict between _the use of generated identifier_, _the contract of `Set`_ and _the equals/hashCode implementations_.
|
||||
`Set` says that the equals/hashCode value for an object should not change while the object is part of the Set.
|
||||
But that is exactly what happened here because the equals/hasCode are based on the (generated) id, which was not set until the `session.getTransaction().commit()` call.
|
||||
|
||||
Note that this is just a concern when using generated identifiers.
|
||||
If you are using assigned identifiers this will not be a problem, assuming the identifier value is assigned prior to adding to the `Set`.
|
||||
|
||||
Another option is to force the identifier to be generated and set prior to adding to the `Set`:
|
||||
|
||||
.Forcing identifier generation
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/entity/listing8.java[]
|
||||
----
|
||||
====
|
||||
|
||||
But this is often not feasible.
|
||||
|
||||
The final approach is to use a "better" equals/hashCode implementation, making use of a natural-id or business-key.
|
||||
|
||||
.Better equals/hashCode with natural-id
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/entity/listing9.java[]
|
||||
----
|
||||
====
|
||||
|
||||
As you can see the question of equals/hashCode is not trivial, nor is there a one-size-fits-all solution.
|
||||
|
||||
For details on mapping the identifier, see the <<chapters/domain/identifiers.adoc#identiifers,Identifiers>> chapter.
|
||||
|
||||
[[entity-pojo-optlock]]
|
||||
==== Mapping optimistic locking
|
||||
|
||||
JPA defines support for optimistic locking based on either a version (sequential numeric) or timestamp strategy.
|
||||
To enable this style of optimistic locking simply add the `javax.persistence.Version` to the persistent attribute that defines the optimistic locking value.
|
||||
According to JPA, the valid types for these attributes are limited to:
|
||||
|
||||
* `int` or `Integer`
|
||||
* `short` or `Short`
|
||||
* `long` or `Long`
|
||||
* `java.sql.Timestamp`
|
||||
|
||||
.Version
|
||||
====
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/entity/Version.java[]
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/entity/Timestamp.java[]
|
||||
----
|
||||
|
||||
[source,java]
|
||||
----
|
||||
include::{sourcedir}/entity/Instant.java[]
|
||||
----
|
||||
====
|
||||
|
||||
Hibernate supports a form of optimistic locking that does not require a dedicated "version attribute".
|
||||
This is intended mainly for use with modeling legacy schemas.
|
||||
The idea is that you can get Hibernate to perform "version checks" using either all of the entity's attributes, or just the attributes that have changed.
|
||||
This is achieved through the use of the `org.hibernate.annotations.OptimisticLocking` annotation which defines a single attribute of type `org.hibernate.annotations.OptimisticLockType`.
|
||||
There are 4 available OptimisticLockTypes:
|
||||
|
||||
`NONE`:: optimistic locking is disabled even if there is a `@Version` annotation present
|
||||
`VERSION` (the default):: performs optimistic locking based on a `@Version` as described above
|
||||
`ALL`:: performs optimistic locking based on _all_ fields as part of an expanded WHERE clause restriction for the UPDATE/DELETE SQL statements
|
||||
`DIRTY`:: performs optimistic locking based on _dirty_ fields as part of an expanded WHERE clause restriction for the UPDATE/DELETE SQL statements.
|
||||
|
||||
|
@ -0,0 +1,18 @@
|
||||
@Entity
|
||||
public class Patch {
|
||||
|
||||
@Id
|
||||
private Long id;
|
||||
|
||||
@ElementCollection
|
||||
@CollectionTable(
|
||||
name="patch_change",
|
||||
joinColumns=@JoinColumn(name="patch_id")
|
||||
)
|
||||
@OrderColumn(name = "index_id")
|
||||
private List<Change> changes = new ArrayList<>();
|
||||
|
||||
public List<Change> getChanges() {
|
||||
return changes;
|
||||
}
|
||||
}
|
@ -0,0 +1,28 @@
|
||||
@Embeddable
|
||||
@Access(AccessType.PROPERTY)
|
||||
public static class Change {
|
||||
|
||||
private String path;
|
||||
|
||||
private String diff;
|
||||
|
||||
public Change() {}
|
||||
|
||||
@Column(name = "path", nullable = false)
|
||||
public String getPath() {
|
||||
return path;
|
||||
}
|
||||
|
||||
public void setPath(String path) {
|
||||
this.path = path;
|
||||
}
|
||||
|
||||
@Column(name = "diff", nullable = false)
|
||||
public String getDiff() {
|
||||
return diff;
|
||||
}
|
||||
|
||||
public void setDiff(String diff) {
|
||||
this.diff = diff;
|
||||
}
|
||||
}
|
@ -0,0 +1,9 @@
|
||||
@Entity
|
||||
public class Patch {
|
||||
|
||||
@Id
|
||||
private Long id;
|
||||
|
||||
@Embedded
|
||||
private Change change;
|
||||
}
|
@ -0,0 +1,14 @@
|
||||
@Entity
|
||||
public class Simple {
|
||||
|
||||
@Id
|
||||
private Integer id;
|
||||
|
||||
public Integer getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId( Integer id ) {
|
||||
this.id = id;
|
||||
}
|
||||
}
|
@ -0,0 +1,14 @@
|
||||
@Entity
|
||||
public class Simple {
|
||||
|
||||
private Integer id;
|
||||
|
||||
@Id
|
||||
public Integer getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId( Integer id ) {
|
||||
this.id = id;
|
||||
}
|
||||
}
|
@ -0,0 +1,18 @@
|
||||
@Entity
|
||||
public class Simple {
|
||||
|
||||
private Integer id;
|
||||
|
||||
@Version
|
||||
@Access( AccessType.FIELD )
|
||||
private Integer version;
|
||||
|
||||
@Id
|
||||
public Integer getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId( Integer id ) {
|
||||
this.id = id;
|
||||
}
|
||||
}
|
@ -0,0 +1,106 @@
|
||||
@Entity
|
||||
public class Person {
|
||||
|
||||
@Id
|
||||
@GeneratedValue
|
||||
private Long id;
|
||||
|
||||
@NaturalId
|
||||
private String registrationNumber;
|
||||
|
||||
public Person() {}
|
||||
|
||||
public Person(String registrationNumber) {
|
||||
this.registrationNumber = registrationNumber;
|
||||
}
|
||||
|
||||
@ManyToMany(cascade = { CascadeType.PERSIST, CascadeType.MERGE} )
|
||||
private List<Address> addresses = new ArrayList<>();
|
||||
|
||||
public List<Address> getAddresses() {
|
||||
return addresses;
|
||||
}
|
||||
|
||||
public void addAddress(Address address) {
|
||||
addresses.add(address);
|
||||
address.getOwners().add(this);
|
||||
}
|
||||
|
||||
public void removeAddress(Address address) {
|
||||
addresses.remove(address);
|
||||
address.getOwners().remove(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
Person person = (Person) o;
|
||||
return Objects.equals(registrationNumber, person.registrationNumber);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(registrationNumber);
|
||||
}
|
||||
}
|
||||
|
||||
@Entity
|
||||
public class Address {
|
||||
|
||||
@Id
|
||||
@GeneratedValue
|
||||
private Long id;
|
||||
|
||||
private String street;
|
||||
|
||||
private String number;
|
||||
|
||||
private String postalCode;
|
||||
|
||||
@ManyToMany(mappedBy = "addresses")
|
||||
private List<Person> owners = new ArrayList<>();
|
||||
|
||||
public Address() {}
|
||||
|
||||
public Address(String street, String number, String postalCode) {
|
||||
this.street = street;
|
||||
this.number = number;
|
||||
this.postalCode = postalCode;
|
||||
}
|
||||
|
||||
public Long getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public String getStreet() {
|
||||
return street;
|
||||
}
|
||||
|
||||
public String getNumber() {
|
||||
return number;
|
||||
}
|
||||
|
||||
public String getPostalCode() {
|
||||
return postalCode;
|
||||
}
|
||||
|
||||
public List<Person> getOwners() {
|
||||
return owners;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
Address address = (Address) o;
|
||||
return Objects.equals(street, address.street) &&
|
||||
Objects.equals(number, address.number) &&
|
||||
Objects.equals(postalCode, address.postalCode);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(street, number, postalCode);
|
||||
}
|
||||
}
|
@ -0,0 +1,30 @@
|
||||
CREATE TABLE Address (
|
||||
id BIGINT NOT NULL ,
|
||||
number VARCHAR(255) ,
|
||||
postalCode VARCHAR(255) ,
|
||||
street VARCHAR(255) ,
|
||||
PRIMARY KEY ( id )
|
||||
)
|
||||
|
||||
CREATE TABLE Person (
|
||||
id BIGINT NOT NULL ,
|
||||
registrationNumber VARCHAR(255) ,
|
||||
PRIMARY KEY ( id )
|
||||
)
|
||||
|
||||
CREATE TABLE Person_Address (
|
||||
owners_id BIGINT NOT NULL ,
|
||||
addresses_id BIGINT NOT NULL
|
||||
)
|
||||
|
||||
ALTER TABLE Person
|
||||
ADD CONSTRAINT UK_23enodonj49jm8uwec4i7y37f
|
||||
UNIQUE (registrationNumber)
|
||||
|
||||
ALTER TABLE Person_Address
|
||||
ADD CONSTRAINT FKm7j0bnabh2yr0pe99il1d066u
|
||||
FOREIGN KEY (addresses_id) REFERENCES Address
|
||||
|
||||
ALTER TABLE Person_Address
|
||||
ADD CONSTRAINT FKbn86l24gmxdv2vmekayqcsgup
|
||||
FOREIGN KEY (owners_id) REFERENCES Person
|
@ -0,0 +1,17 @@
|
||||
Person person1 = new Person("ABC-123");
|
||||
Person person2 = new Person("DEF-456");
|
||||
|
||||
Address address1 = new Address("12th Avenue", "12A", "4005A");
|
||||
Address address2 = new Address("18th Avenue", "18B", "4007B");
|
||||
|
||||
person1.addAddress(address1);
|
||||
person1.addAddress(address2);
|
||||
|
||||
person2.addAddress(address1);
|
||||
|
||||
entityManager.persist(person1);
|
||||
entityManager.persist(person2);
|
||||
|
||||
entityManager.flush();
|
||||
|
||||
person1.removeAddress(address1);
|
@ -0,0 +1,26 @@
|
||||
INSERT INTO Person ( registrationNumber, id )
|
||||
VALUES ( 'ABC-123', 1 )
|
||||
|
||||
INSERT INTO Address ( number, postalCode, street, id )
|
||||
VALUES ( '12A', '4005A', '12th Avenue', 2 )
|
||||
|
||||
INSERT INTO Address ( number, postalCode, street, id )
|
||||
VALUES ( '18B', '4007B', '18th Avenue', 3 )
|
||||
|
||||
INSERT INTO Person ( registrationNumber, id )
|
||||
VALUES ( 'DEF-456', 4 )
|
||||
|
||||
INSERT INTO Person_Address ( owners_id, addresses_id )
|
||||
VALUES ( 1, 2 )
|
||||
|
||||
INSERT INTO Person_Address ( owners_id, addresses_id )
|
||||
VALUES ( 1, 3 )
|
||||
|
||||
INSERT INTO Person_Address ( owners_id, addresses_id )
|
||||
VALUES ( 4, 2 )
|
||||
|
||||
DELETE FROM Person_Address
|
||||
WHERE owners_id = 1
|
||||
|
||||
INSERT INTO Person_Address ( owners_id, addresses_id )
|
||||
VALUES ( 1, 3 )
|
@ -0,0 +1,163 @@
|
||||
@Entity
|
||||
public class Person {
|
||||
|
||||
@Id
|
||||
@GeneratedValue
|
||||
private Long id;
|
||||
|
||||
@NaturalId
|
||||
private String registrationNumber;
|
||||
|
||||
@OneToMany(mappedBy = "person", cascade = CascadeType.ALL, orphanRemoval = true)
|
||||
private List<PersonAddress> addresses = new ArrayList<>();
|
||||
|
||||
public Person() {}
|
||||
|
||||
public Person(String registrationNumber) {
|
||||
this.registrationNumber = registrationNumber;
|
||||
}
|
||||
|
||||
public Long getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public List<PersonAddress> getAddresses() {
|
||||
return addresses;
|
||||
}
|
||||
|
||||
public void addAddress(Address address) {
|
||||
PersonAddress personAddress = new PersonAddress(this, address);
|
||||
addresses.add(personAddress);
|
||||
address.getOwners().add(personAddress);
|
||||
}
|
||||
|
||||
public void removeAddress(Address address) {
|
||||
PersonAddress personAddress = new PersonAddress(this, address);
|
||||
address.getOwners().remove(personAddress);
|
||||
addresses.remove(personAddress);
|
||||
personAddress.setPerson(null);
|
||||
personAddress.setAddress(null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
Person person = (Person) o;
|
||||
return Objects.equals(registrationNumber, person.registrationNumber);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(registrationNumber);
|
||||
}
|
||||
}
|
||||
|
||||
@Entity
|
||||
public class PersonAddress implements Serializable {
|
||||
|
||||
@Id
|
||||
@ManyToOne
|
||||
private Person person;
|
||||
|
||||
@Id
|
||||
@ManyToOne
|
||||
private Address address;
|
||||
|
||||
public PersonAddress() {}
|
||||
|
||||
public PersonAddress(Person person, Address address) {
|
||||
this.person = person;
|
||||
this.address = address;
|
||||
}
|
||||
|
||||
public Person getPerson() {
|
||||
return person;
|
||||
}
|
||||
|
||||
public void setPerson(Person person) {
|
||||
this.person = person;
|
||||
}
|
||||
|
||||
public Address getAddress() {
|
||||
return address;
|
||||
}
|
||||
|
||||
public void setAddress(Address address) {
|
||||
this.address = address;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
PersonAddress that = (PersonAddress) o;
|
||||
return Objects.equals(person, that.person) &&
|
||||
Objects.equals(address, that.address);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(person, address);
|
||||
}
|
||||
}
|
||||
|
||||
@Entity
|
||||
public class Address {
|
||||
|
||||
@Id
|
||||
@GeneratedValue
|
||||
private Long id;
|
||||
|
||||
private String street;
|
||||
|
||||
private String number;
|
||||
|
||||
private String postalCode;
|
||||
|
||||
@OneToMany(mappedBy = "address", cascade = CascadeType.ALL, orphanRemoval = true)
|
||||
private List<PersonAddress> owners = new ArrayList<>();
|
||||
|
||||
public Address() {}
|
||||
|
||||
public Address(String street, String number, String postalCode) {
|
||||
this.street = street;
|
||||
this.number = number;
|
||||
this.postalCode = postalCode;
|
||||
}
|
||||
|
||||
public Long getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public String getStreet() {
|
||||
return street;
|
||||
}
|
||||
|
||||
public String getNumber() {
|
||||
return number;
|
||||
}
|
||||
|
||||
public String getPostalCode() {
|
||||
return postalCode;
|
||||
}
|
||||
|
||||
public List<PersonAddress> getOwners() {
|
||||
return owners;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
Address address = (Address) o;
|
||||
return Objects.equals(street, address.street) &&
|
||||
Objects.equals(number, address.number) &&
|
||||
Objects.equals(postalCode, address.postalCode);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(street, number, postalCode);
|
||||
}
|
||||
}
|
@ -0,0 +1,31 @@
|
||||
CREATE TABLE Address (
|
||||
id BIGINT NOT NULL ,
|
||||
number VARCHAR(255) ,
|
||||
postalCode VARCHAR(255) ,
|
||||
street VARCHAR(255) ,
|
||||
PRIMARY KEY ( id )
|
||||
)
|
||||
|
||||
CREATE TABLE Person (
|
||||
id BIGINT NOT NULL ,
|
||||
registrationNumber VARCHAR(255) ,
|
||||
PRIMARY KEY ( id )
|
||||
)
|
||||
|
||||
CREATE TABLE PersonAddress (
|
||||
person_id BIGINT NOT NULL ,
|
||||
address_id BIGINT NOT NULL ,
|
||||
PRIMARY KEY ( person_id, address_id )
|
||||
)
|
||||
|
||||
ALTER TABLE Person
|
||||
ADD CONSTRAINT UK_23enodonj49jm8uwec4i7y37f
|
||||
UNIQUE (registrationNumber)
|
||||
|
||||
ALTER TABLE PersonAddress
|
||||
ADD CONSTRAINT FK8b3lru5fyej1aarjflamwghqq
|
||||
FOREIGN KEY (person_id) REFERENCES Person
|
||||
|
||||
ALTER TABLE PersonAddress
|
||||
ADD CONSTRAINT FK7p69mgialumhegyl4byrh65jk
|
||||
FOREIGN KEY (address_id) REFERENCES Address
|
@ -0,0 +1,20 @@
|
||||
Person person1 = new Person("ABC-123");
|
||||
Person person2 = new Person("DEF-456");
|
||||
|
||||
Address address1 = new Address("12th Avenue", "12A", "4005A");
|
||||
Address address2 = new Address("18th Avenue", "18B", "4007B");
|
||||
|
||||
entityManager.persist(person1);
|
||||
entityManager.persist(person2);
|
||||
|
||||
entityManager.persist(address1);
|
||||
entityManager.persist(address2);
|
||||
|
||||
person1.addAddress(address1);
|
||||
person1.addAddress(address2);
|
||||
|
||||
person2.addAddress(address1);
|
||||
|
||||
entityManager.flush();
|
||||
|
||||
person1.removeAddress(address1);
|
@ -0,0 +1,23 @@
|
||||
INSERT INTO Person ( registrationNumber, id )
|
||||
VALUES ( 'ABC-123', 1 )
|
||||
|
||||
INSERT INTO Person ( registrationNumber, id )
|
||||
VALUES ( 'DEF-456', 2 )
|
||||
|
||||
INSERT INTO Address ( number, postalCode, street, id )
|
||||
VALUES ( '12A', '4005A', '12th Avenue', 3 )
|
||||
|
||||
INSERT INTO Address ( number, postalCode, street, id )
|
||||
VALUES ( '18B', '4007B', '18th Avenue', 4 )
|
||||
|
||||
INSERT INTO PersonAddress ( person_id, address_id )
|
||||
VALUES ( 1, 3 )
|
||||
|
||||
INSERT INTO PersonAddress ( person_id, address_id )
|
||||
VALUES ( 1, 4 )
|
||||
|
||||
INSERT INTO PersonAddress ( person_id, address_id )
|
||||
VALUES ( 2, 3 )
|
||||
|
||||
DELETE FROM PersonAddress
|
||||
WHERE person_id = 1 AND address_id = 3
|
@ -0,0 +1,80 @@
|
||||
@Entity
|
||||
public class Person {
|
||||
|
||||
@Id
|
||||
@GeneratedValue
|
||||
private Long id;
|
||||
|
||||
public Person() {}
|
||||
|
||||
public Person(Long id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
@OneToMany(mappedBy = "person", cascade = CascadeType.ALL, orphanRemoval = true)
|
||||
private List<Phone> phones = new ArrayList<>();
|
||||
|
||||
public List<Phone> getPhones() {
|
||||
return phones;
|
||||
}
|
||||
|
||||
public void addPhone(Phone phone) {
|
||||
phones.add(phone);
|
||||
phone.setPerson(this);
|
||||
}
|
||||
|
||||
public void removePhone(Phone phone) {
|
||||
phones.remove(phone);
|
||||
phone.setPerson(null);
|
||||
}
|
||||
}
|
||||
|
||||
@Entity
|
||||
public class Phone {
|
||||
|
||||
@Id
|
||||
@GeneratedValue
|
||||
private Long id;
|
||||
|
||||
@NaturalId
|
||||
@Column(unique = true)
|
||||
private String number;
|
||||
|
||||
@ManyToOne
|
||||
private Person person;
|
||||
|
||||
public Phone() {}
|
||||
|
||||
public Phone(String number) {
|
||||
this.number = number;
|
||||
}
|
||||
|
||||
public Long getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public String getNumber() {
|
||||
return number;
|
||||
}
|
||||
|
||||
public Person getPerson() {
|
||||
return person;
|
||||
}
|
||||
|
||||
public void setPerson(Person person) {
|
||||
this.person = person;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
Phone phone = (Phone) o;
|
||||
return Objects.equals(number, phone.number);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(number);
|
||||
}
|
||||
}
|
@ -0,0 +1,19 @@
|
||||
CREATE TABLE Person (
|
||||
id BIGINT NOT NULL ,
|
||||
PRIMARY KEY ( id )
|
||||
)
|
||||
|
||||
CREATE TABLE Phone (
|
||||
id BIGINT NOT NULL ,
|
||||
number VARCHAR(255) ,
|
||||
person_id BIGINT ,
|
||||
PRIMARY KEY ( id )
|
||||
)
|
||||
|
||||
ALTER TABLE Phone
|
||||
ADD CONSTRAINT UK_l329ab0g4c1t78onljnxmbnp6
|
||||
UNIQUE (number)
|
||||
|
||||
ALTER TABLE Phone
|
||||
ADD CONSTRAINT FKmw13yfsjypiiq0i1osdkaeqpg
|
||||
FOREIGN KEY (person_id) REFERENCES Person
|
@ -0,0 +1,10 @@
|
||||
Person person = new Person();
|
||||
Phone phone1 = new Phone("123-456-7890");
|
||||
Phone phone2 = new Phone("321-654-0987");
|
||||
|
||||
person.addPhone(phone1);
|
||||
person.addPhone(phone2);
|
||||
entityManager.persist(person);
|
||||
entityManager.flush();
|
||||
|
||||
person.removePhone(phone1);
|
@ -0,0 +1,10 @@
|
||||
INSERT INTO Phone
|
||||
( number, person_id, id )
|
||||
VALUES ( '123-456-7890', NULL, 2 )
|
||||
|
||||
INSERT INTO Phone
|
||||
( number, person_id, id )
|
||||
VALUES ( '321-654-0987', NULL, 3 )
|
||||
|
||||
DELETE FROM Phone
|
||||
WHERE id = 2
|
@ -0,0 +1,85 @@
|
||||
@Entity
|
||||
public class Phone {
|
||||
|
||||
@Id
|
||||
@GeneratedValue
|
||||
private Long id;
|
||||
|
||||
private String number;
|
||||
|
||||
@OneToOne(mappedBy = "phone", cascade = CascadeType.ALL, orphanRemoval = true)
|
||||
private PhoneDetails details;
|
||||
|
||||
public Phone() {}
|
||||
|
||||
public Phone(String number) {
|
||||
this.number = number;
|
||||
}
|
||||
|
||||
public Long getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public String getNumber() {
|
||||
return number;
|
||||
}
|
||||
|
||||
public PhoneDetails getDetails() {
|
||||
return details;
|
||||
}
|
||||
|
||||
public void addDetails(PhoneDetails details) {
|
||||
details.setPhone(this);
|
||||
this.details = details;
|
||||
}
|
||||
|
||||
public void removeDetails() {
|
||||
if (details != null) {
|
||||
details.setPhone(null);
|
||||
this.details = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Entity
|
||||
public class PhoneDetails {
|
||||
|
||||
@Id
|
||||
@GeneratedValue
|
||||
private Long id;
|
||||
|
||||
private String provider;
|
||||
|
||||
private String technology;
|
||||
|
||||
@OneToOne(fetch = FetchType.LAZY)
|
||||
@JoinColumn(name = "phone_id")
|
||||
private Phone phone;
|
||||
|
||||
public PhoneDetails() {}
|
||||
|
||||
public PhoneDetails(String provider, String technology) {
|
||||
this.provider = provider;
|
||||
this.technology = technology;
|
||||
}
|
||||
|
||||
public String getProvider() {
|
||||
return provider;
|
||||
}
|
||||
|
||||
public String getTechnology() {
|
||||
return technology;
|
||||
}
|
||||
|
||||
public void setTechnology(String technology) {
|
||||
this.technology = technology;
|
||||
}
|
||||
|
||||
public Phone getPhone() {
|
||||
return phone;
|
||||
}
|
||||
|
||||
public void setPhone(Phone phone) {
|
||||
this.phone = phone;
|
||||
}
|
||||
}
|
@ -0,0 +1,17 @@
|
||||
CREATE TABLE Phone (
|
||||
id BIGINT NOT NULL ,
|
||||
number VARCHAR(255) ,
|
||||
PRIMARY KEY ( id )
|
||||
)
|
||||
|
||||
CREATE TABLE PhoneDetails (
|
||||
id BIGINT NOT NULL ,
|
||||
provider VARCHAR(255) ,
|
||||
technology VARCHAR(255) ,
|
||||
phone_id BIGINT ,
|
||||
PRIMARY KEY ( id )
|
||||
)
|
||||
|
||||
ALTER TABLE PhoneDetails
|
||||
ADD CONSTRAINT FKeotuev8ja8v0sdh29dynqj05p
|
||||
FOREIGN KEY (phone_id) REFERENCES Phone
|
@ -0,0 +1,14 @@
|
||||
Phone phone = new Phone("123-456-7890");
|
||||
PhoneDetails details = new PhoneDetails("T-Mobile", "GSM");
|
||||
|
||||
phone.addDetails(details);
|
||||
entityManager.persist(phone);
|
||||
|
||||
PhoneDetails otherDetails = new PhoneDetails("T-Mobile", "CDMA");
|
||||
otherDetails.setPhone(phone);
|
||||
entityManager.persist(otherDetails);
|
||||
entityManager.flush();
|
||||
entityManager.clear();
|
||||
|
||||
//throws javax.persistence.PersistenceException: org.hibernate.HibernateException: More than one row with the given identifier was found: 1
|
||||
entityManager.find(Phone.class, phone.getId()).getDetails().getProvider();
|
@ -0,0 +1,5 @@
|
||||
Phone phone = new Phone("123-456-7890");
|
||||
PhoneDetails details = new PhoneDetails("T-Mobile", "GSM");
|
||||
|
||||
phone.addDetails(details);
|
||||
entityManager.persist(phone);
|
@ -0,0 +1,5 @@
|
||||
INSERT INTO Phone ( number, id )
|
||||
VALUES ( '123 - 456 - 7890', 1 )
|
||||
|
||||
INSERT INTO PhoneDetails ( phone_id, provider, technology, id )
|
||||
VALUES ( 1, 'T - Mobile, GSM', 2 )
|
@ -0,0 +1,47 @@
|
||||
@Entity
|
||||
public class Person {
|
||||
|
||||
@Id
|
||||
@GeneratedValue
|
||||
private Long id;
|
||||
|
||||
public Person() {}
|
||||
}
|
||||
|
||||
@Entity
|
||||
public class Phone {
|
||||
|
||||
@Id
|
||||
@GeneratedValue
|
||||
private Long id;
|
||||
|
||||
private String number;
|
||||
|
||||
@ManyToOne
|
||||
@JoinColumn(name = "person_id",
|
||||
foreignKey = @ForeignKey(name = "PERSON_ID_FK")
|
||||
)
|
||||
private Person person;
|
||||
|
||||
public Phone() {}
|
||||
|
||||
public Phone(String number) {
|
||||
this.number = number;
|
||||
}
|
||||
|
||||
public Long getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public String getNumber() {
|
||||
return number;
|
||||
}
|
||||
|
||||
public Person getPerson() {
|
||||
return person;
|
||||
}
|
||||
|
||||
public void setPerson(Person person) {
|
||||
this.person = person;
|
||||
}
|
||||
}
|
@ -0,0 +1,15 @@
|
||||
CREATE TABLE Person (
|
||||
id BIGINT NOT NULL ,
|
||||
PRIMARY KEY ( id )
|
||||
)
|
||||
|
||||
CREATE TABLE Phone (
|
||||
id BIGINT NOT NULL ,
|
||||
number VARCHAR(255) ,
|
||||
person_id BIGINT ,
|
||||
PRIMARY KEY ( id )
|
||||
)
|
||||
|
||||
ALTER TABLE Phone
|
||||
ADD CONSTRAINT PERSON_ID_FK
|
||||
FOREIGN KEY (person_id) REFERENCES Person
|
@ -0,0 +1,9 @@
|
||||
Person person = new Person();
|
||||
entityManager.persist(person);
|
||||
|
||||
Phone phone = new Phone("123-456-7890");
|
||||
phone.setPerson(person);
|
||||
entityManager.persist(phone);
|
||||
|
||||
entityManager.flush();
|
||||
phone.setPerson(null);
|
@ -0,0 +1,10 @@
|
||||
INSERT INTO Person ( id )
|
||||
VALUES ( 1 )
|
||||
|
||||
INSERT INTO Phone ( number, person_id, id )
|
||||
VALUES ( '123-456-7890', 1, 2 )
|
||||
|
||||
UPDATE Phone
|
||||
SET number = '123-456-7890',
|
||||
person_id = NULL
|
||||
WHERE id = 2
|
@ -0,0 +1,47 @@
|
||||
@Entity
|
||||
public class Person {
|
||||
|
||||
@Id
|
||||
@GeneratedValue
|
||||
private Long id;
|
||||
|
||||
public Person() {}
|
||||
|
||||
@ManyToMany(cascade = { CascadeType.PERSIST, CascadeType.MERGE} )
|
||||
private List<Address> addresses = new ArrayList<>();
|
||||
|
||||
public List<Address> getAddresses() {
|
||||
return addresses;
|
||||
}
|
||||
}
|
||||
|
||||
@Entity
|
||||
public class Address {
|
||||
|
||||
@Id
|
||||
@GeneratedValue
|
||||
private Long id;
|
||||
|
||||
private String street;
|
||||
|
||||
private String number;
|
||||
|
||||
public Address() {}
|
||||
|
||||
public Address(String street, String number) {
|
||||
this.street = street;
|
||||
this.number = number;
|
||||
}
|
||||
|
||||
public Long getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public String getStreet() {
|
||||
return street;
|
||||
}
|
||||
|
||||
public String getNumber() {
|
||||
return number;
|
||||
}
|
||||
}
|
@ -0,0 +1,24 @@
|
||||
CREATE TABLE Address (
|
||||
id BIGINT NOT NULL ,
|
||||
number VARCHAR(255) ,
|
||||
street VARCHAR(255) ,
|
||||
PRIMARY KEY ( id )
|
||||
)
|
||||
|
||||
CREATE TABLE Person (
|
||||
id BIGINT NOT NULL ,
|
||||
PRIMARY KEY ( id )
|
||||
)
|
||||
|
||||
CREATE TABLE Person_Address (
|
||||
Person_id BIGINT NOT NULL ,
|
||||
addresses_id BIGINT NOT NULL
|
||||
)
|
||||
|
||||
ALTER TABLE Person_Address
|
||||
ADD CONSTRAINT FKm7j0bnabh2yr0pe99il1d066u
|
||||
FOREIGN KEY (addresses_id) REFERENCES Address
|
||||
|
||||
ALTER TABLE Person_Address
|
||||
ADD CONSTRAINT FKba7rc9qe2vh44u93u0p2auwti
|
||||
FOREIGN KEY (Person_id) REFERENCES Person
|
@ -0,0 +1,17 @@
|
||||
Person person1 = new Person();
|
||||
Person person2 = new Person();
|
||||
|
||||
Address address1 = new Address("12th Avenue", "12A");
|
||||
Address address2 = new Address("18th Avenue", "18B");
|
||||
|
||||
person1.getAddresses().add(address1);
|
||||
person1.getAddresses().add(address2);
|
||||
|
||||
person2.getAddresses().add(address1);
|
||||
|
||||
entityManager.persist(person1);
|
||||
entityManager.persist(person2);
|
||||
|
||||
entityManager.flush();
|
||||
|
||||
person1.getAddresses().remove(address1);
|
@ -0,0 +1,24 @@
|
||||
INSERT INTO Person ( id )
|
||||
VALUES ( 1 )
|
||||
|
||||
INSERT INTO Address ( number, street, id )
|
||||
VALUES ( '12A', '12th Avenue', 2 )
|
||||
|
||||
INSERT INTO Address ( number, street, id )
|
||||
VALUES ( '18B', '18th Avenue', 3 )
|
||||
|
||||
INSERT INTO Person ( id )
|
||||
VALUES ( 4 )
|
||||
|
||||
INSERT INTO Person_Address ( Person_id, addresses_id )
|
||||
VALUES ( 1, 2 )
|
||||
INSERT INTO Person_Address ( Person_id, addresses_id )
|
||||
VALUES ( 1, 3 )
|
||||
INSERT INTO Person_Address ( Person_id, addresses_id )
|
||||
VALUES ( 4, 2 )
|
||||
|
||||
DELETE FROM Person_Address
|
||||
WHERE Person_id = 1
|
||||
|
||||
INSERT INTO Person_Address ( Person_id, addresses_id )
|
||||
VALUES ( 1, 3 )
|
@ -0,0 +1,2 @@
|
||||
Person person1 = entityManager.find(Person.class, personId);
|
||||
entityManager.remove(person1);
|
@ -0,0 +1,5 @@
|
||||
DELETE FROM Person_Address
|
||||
WHERE Person_id = 1
|
||||
|
||||
DELETE FROM Person
|
||||
WHERE id = 1
|
@ -0,0 +1,40 @@
|
||||
@Entity
|
||||
public class Person {
|
||||
|
||||
@Id
|
||||
@GeneratedValue
|
||||
private Long id;
|
||||
|
||||
public Person() {}
|
||||
|
||||
@OneToMany(cascade = CascadeType.ALL, orphanRemoval = true)
|
||||
private List<Phone> phones = new ArrayList<>();
|
||||
|
||||
public List<Phone> getPhones() {
|
||||
return phones;
|
||||
}
|
||||
}
|
||||
|
||||
@Entity
|
||||
public class Phone {
|
||||
|
||||
@Id
|
||||
@GeneratedValue
|
||||
private Long id;
|
||||
|
||||
private String number;
|
||||
|
||||
public Phone() {}
|
||||
|
||||
public Phone(String number) {
|
||||
this.number = number;
|
||||
}
|
||||
|
||||
public Long getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public String getNumber() {
|
||||
return number;
|
||||
}
|
||||
}
|
@ -0,0 +1,27 @@
|
||||
CREATE TABLE Person (
|
||||
id BIGINT NOT NULL ,
|
||||
PRIMARY KEY ( id )
|
||||
)
|
||||
|
||||
CREATE TABLE Person_Phone (
|
||||
Person_id BIGINT NOT NULL ,
|
||||
phones_id BIGINT NOT NULL
|
||||
)
|
||||
|
||||
CREATE TABLE Phone (
|
||||
id BIGINT NOT NULL ,
|
||||
number VARCHAR(255) ,
|
||||
PRIMARY KEY ( id )
|
||||
)
|
||||
|
||||
ALTER TABLE Person_Phone
|
||||
ADD CONSTRAINT UK_9uhc5itwc9h5gcng944pcaslf
|
||||
UNIQUE (phones_id)
|
||||
|
||||
ALTER TABLE Person_Phone
|
||||
ADD CONSTRAINT FKr38us2n8g5p9rj0b494sd3391
|
||||
FOREIGN KEY (phones_id) REFERENCES Phone
|
||||
|
||||
ALTER TABLE Person_Phone
|
||||
ADD CONSTRAINT FK2ex4e4p7w1cj310kg2woisjl2
|
||||
FOREIGN KEY (Person_id) REFERENCES Person
|
@ -0,0 +1,10 @@
|
||||
Person person = new Person();
|
||||
Phone phone1 = new Phone("123-456-7890");
|
||||
Phone phone2 = new Phone("321-654-0987");
|
||||
|
||||
person.getPhones().add(phone1);
|
||||
person.getPhones().add(phone2);
|
||||
entityManager.persist(person);
|
||||
entityManager.flush();
|
||||
|
||||
person.getPhones().remove(phone1);
|
@ -0,0 +1,29 @@
|
||||
INSERT INTO Person
|
||||
( id )
|
||||
VALUES ( 1 )
|
||||
|
||||
INSERT INTO Phone
|
||||
( number, id )
|
||||
VALUES ( '123 - 456 - 7890', 2 )
|
||||
|
||||
INSERT INTO Phone
|
||||
( number, id )
|
||||
VALUES ( '321 - 654 - 0987', 3 )
|
||||
|
||||
INSERT INTO Person_Phone
|
||||
( Person_id, phones_id )
|
||||
VALUES ( 1, 2 )
|
||||
|
||||
INSERT INTO Person_Phone
|
||||
( Person_id, phones_id )
|
||||
VALUES ( 1, 3 )
|
||||
|
||||
DELETE FROM Person_Phone
|
||||
WHERE Person_id = 1
|
||||
|
||||
INSERT INTO Person_Phone
|
||||
( Person_id, phones_id )
|
||||
VALUES ( 1, 3 )
|
||||
|
||||
DELETE FROM Phone
|
||||
WHERE id = 2
|
@ -0,0 +1,66 @@
|
||||
@Entity
|
||||
public class Phone {
|
||||
|
||||
@Id
|
||||
@GeneratedValue
|
||||
private Long id;
|
||||
|
||||
private String number;
|
||||
|
||||
@OneToOne
|
||||
@JoinColumn(name = "details_id")
|
||||
private PhoneDetails details;
|
||||
|
||||
public Phone() {}
|
||||
|
||||
public Phone(String number) {
|
||||
this.number = number;
|
||||
}
|
||||
|
||||
public Long getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public String getNumber() {
|
||||
return number;
|
||||
}
|
||||
|
||||
public PhoneDetails getDetails() {
|
||||
return details;
|
||||
}
|
||||
|
||||
public void setDetails(PhoneDetails details) {
|
||||
this.details = details;
|
||||
}
|
||||
}
|
||||
|
||||
@Entity
|
||||
public class PhoneDetails {
|
||||
|
||||
@Id
|
||||
@GeneratedValue
|
||||
private Long id;
|
||||
|
||||
private String provider;
|
||||
|
||||
private String technology;
|
||||
|
||||
public PhoneDetails() {}
|
||||
|
||||
public PhoneDetails(String provider, String technology) {
|
||||
this.provider = provider;
|
||||
this.technology = technology;
|
||||
}
|
||||
|
||||
public String getProvider() {
|
||||
return provider;
|
||||
}
|
||||
|
||||
public String getTechnology() {
|
||||
return technology;
|
||||
}
|
||||
|
||||
public void setTechnology(String technology) {
|
||||
this.technology = technology;
|
||||
}
|
||||
}
|
@ -0,0 +1,17 @@
|
||||
CREATE TABLE Phone (
|
||||
id BIGINT NOT NULL ,
|
||||
number VARCHAR(255) ,
|
||||
details_id BIGINT ,
|
||||
PRIMARY KEY ( id )
|
||||
)
|
||||
|
||||
CREATE TABLE PhoneDetails (
|
||||
id BIGINT NOT NULL ,
|
||||
provider VARCHAR(255) ,
|
||||
technology VARCHAR(255) ,
|
||||
PRIMARY KEY ( id )
|
||||
)
|
||||
|
||||
ALTER TABLE Phone
|
||||
ADD CONSTRAINT FKnoj7cj83ppfqbnvqqa5kolub7
|
||||
FOREIGN KEY (details_id) REFERENCES PhoneDetails
|
@ -0,0 +1,5 @@
|
||||
create table step(
|
||||
...
|
||||
instruction BLOB not null,
|
||||
...
|
||||
)
|
@ -0,0 +1,10 @@
|
||||
@Entity
|
||||
public class Step {
|
||||
...
|
||||
|
||||
@Lob
|
||||
@Basic
|
||||
public Blob instructions;
|
||||
|
||||
...
|
||||
}
|
@ -0,0 +1,10 @@
|
||||
@Entity
|
||||
public class Step {
|
||||
...
|
||||
|
||||
@Lob
|
||||
@Basic
|
||||
public byte[] instructions;
|
||||
|
||||
...
|
||||
}
|
@ -0,0 +1,5 @@
|
||||
create table product(
|
||||
...
|
||||
description CLOB not null,
|
||||
...
|
||||
)
|
@ -0,0 +1,10 @@
|
||||
@Entity
|
||||
public class Product {
|
||||
...
|
||||
|
||||
@Lob
|
||||
@Basic
|
||||
public Clob description;
|
||||
|
||||
...
|
||||
}
|
@ -0,0 +1,10 @@
|
||||
@Entity
|
||||
public class Product {
|
||||
...
|
||||
|
||||
@Lob
|
||||
@Basic
|
||||
public String description;
|
||||
|
||||
...
|
||||
}
|
@ -0,0 +1,10 @@
|
||||
@Entity
|
||||
public class Product {
|
||||
...
|
||||
|
||||
@Lob
|
||||
@Basic
|
||||
public char[] description;
|
||||
|
||||
...
|
||||
}
|
@ -0,0 +1,24 @@
|
||||
@Entity
|
||||
public class DateEvent {
|
||||
|
||||
@Id
|
||||
@GeneratedValue
|
||||
private Long id;
|
||||
|
||||
@Temporal(TemporalType.DATE)
|
||||
private Date timestamp;
|
||||
|
||||
public DateEvent() {}
|
||||
|
||||
public DateEvent(Date timestamp) {
|
||||
this.timestamp = timestamp;
|
||||
}
|
||||
|
||||
public Long getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public Date getTimestamp() {
|
||||
return timestamp;
|
||||
}
|
||||
}
|
@ -0,0 +1,54 @@
|
||||
public enum Gender {
|
||||
|
||||
MALE('M'),
|
||||
FEMALE('F');
|
||||
|
||||
private final char code;
|
||||
|
||||
private Gender( char code ) {
|
||||
this.code = code;
|
||||
}
|
||||
|
||||
public static Gender fromCode( char code ) {
|
||||
if ( code == 'M' || code == 'm' ) {
|
||||
return MALE;
|
||||
}
|
||||
if ( code == 'F' || code == 'f' ) {
|
||||
return FEMALE;
|
||||
}
|
||||
throw...
|
||||
}
|
||||
|
||||
public char getCode() {
|
||||
return code;
|
||||
}
|
||||
}
|
||||
|
||||
@Entity
|
||||
public class Person {
|
||||
...
|
||||
|
||||
@Basic
|
||||
@Convert( converter = GenderConverter.class )
|
||||
public Gender gender;
|
||||
}
|
||||
|
||||
@Converter
|
||||
public class GenderConverter implements AttributeConverter<Character, Gender> {
|
||||
|
||||
public Character convertToDatabaseColumn( Gender value ) {
|
||||
if ( value == null ) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return value.getCode();
|
||||
}
|
||||
|
||||
public Gender convertToEntityAttribute( Character value ) {
|
||||
if ( value == null ) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return Gender.fromCode( value );
|
||||
}
|
||||
}
|
@ -0,0 +1,82 @@
|
||||
import org.hibernate.type.descriptor.java.CharacterTypeDescriptor;
|
||||
|
||||
public enum Gender {
|
||||
|
||||
MALE('M'),
|
||||
FEMALE('F');
|
||||
|
||||
private final char code;
|
||||
|
||||
private Gender( char code ) {
|
||||
this.code = code;
|
||||
}
|
||||
|
||||
public static Gender fromCode( char code ) {
|
||||
if ( code == 'M' || code == 'm' ) {
|
||||
return MALE;
|
||||
}
|
||||
if ( code == 'F' || code == 'f' ) {
|
||||
return FEMALE;
|
||||
}
|
||||
throw...
|
||||
}
|
||||
|
||||
public char getCode() {
|
||||
return code;
|
||||
}
|
||||
}
|
||||
|
||||
public static class GenderJavaTypeDescriptor extends AbstractTypeDescriptor<Gender> {
|
||||
public static final GenderJavaTypeDescriptor INSTANCE = new GenderJavaTypeDescriptor();
|
||||
|
||||
public String toString( Gender value ) {
|
||||
return value == null ? null : value.name();
|
||||
}
|
||||
|
||||
public Gender fromString( String string ) {
|
||||
return string == null ? null : Gender.valueOf( string );
|
||||
}
|
||||
|
||||
public <X> X unwrap( Gender value, Class<X> type, WrapperOptions options ) {
|
||||
return CharacterTypeDescriptor.INSTANCE.unwrap(
|
||||
value == null ? null : value.getCode(),
|
||||
type,
|
||||
options
|
||||
);
|
||||
}
|
||||
|
||||
public <X> Gender wrap( X value, WrapperOptions options ) {
|
||||
return CharacterTypeDescriptor.INSTANCE.wrap( value, options );
|
||||
}
|
||||
}
|
||||
|
||||
@Entity
|
||||
public class Person {
|
||||
...
|
||||
|
||||
@Basic
|
||||
@Type( type = GenderType.class )
|
||||
public Gender gender;
|
||||
}
|
||||
|
||||
@Converter
|
||||
public class GenderType extends AbstractSingleColumnStandardBasicType<Gender> {
|
||||
|
||||
public static final GenderType INSTANCE = new GenderType();
|
||||
|
||||
private GenderType() {
|
||||
super(
|
||||
CharTypeDescriptor.INSTANCE,
|
||||
GenderJavaTypeDescriptor.INSTANCE
|
||||
);
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return "gender";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean registerUnderJavaType() {
|
||||
return true;
|
||||
}
|
||||
}
|
@ -0,0 +1,12 @@
|
||||
@Entity
|
||||
public class Person {
|
||||
...
|
||||
|
||||
@Enumerated
|
||||
public Gender gender;
|
||||
|
||||
public static enum Gender {
|
||||
MALE,
|
||||
FEMALE
|
||||
}
|
||||
}
|
@ -0,0 +1,12 @@
|
||||
@Entity
|
||||
public class Person {
|
||||
...
|
||||
|
||||
@Enumerated( STRING )
|
||||
public Gender gender;
|
||||
|
||||
public static enum Gender {
|
||||
MALE,
|
||||
FEMALE
|
||||
}
|
||||
}
|
@ -0,0 +1,17 @@
|
||||
@Entity
|
||||
public class Product {
|
||||
|
||||
@Id
|
||||
@Basic
|
||||
private Integer id;
|
||||
|
||||
@Basic
|
||||
private String sku;
|
||||
|
||||
@Basic
|
||||
private String name;
|
||||
|
||||
@Basic
|
||||
@Column( name = "NOTES" )
|
||||
private String description;
|
||||
}
|
@ -0,0 +1,44 @@
|
||||
public class FizzywigType1 implements org.hibernate.type.BasicType {
|
||||
public static final FizzywigType1 INSTANCE = new FizzywigType1();
|
||||
|
||||
@Override
|
||||
public String[] getRegistrationKeys() {
|
||||
return new String[]{Fizzywig.class.getName()};
|
||||
}
|
||||
|
||||
@Override
|
||||
public int[] sqlTypes( Mapping mapping ) {
|
||||
return new int[]{java.sql.Types.VARCHAR};
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class getReturnedClass() {
|
||||
return Money.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object nullSafeGet(
|
||||
ResultSet rs,
|
||||
String[] names,
|
||||
SessionImplementor session,
|
||||
Object owner) throws SQLException {
|
||||
return Fizzwig.fromString(
|
||||
StringType.INSTANCE.get( rs, names[0], sesson )
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void nullSafeSet(
|
||||
PreparedStatement st,
|
||||
Object value,
|
||||
int index,
|
||||
boolean[] settable,
|
||||
SessionImplementor session) throws SQLException {
|
||||
final String dbValue = value == null
|
||||
? null
|
||||
: (( Fizzywig ) value).asString();
|
||||
StringType.INSTANCE.nullSafeSet( st, value, index, settable, session );
|
||||
}
|
||||
|
||||
...
|
||||
}
|
@ -0,0 +1,3 @@
|
||||
MetadataSources metadataSources = ...;
|
||||
metadataSources.getMetaDataBuilder().applyBasicType( FizzwigType1.INSTANCE );
|
||||
...
|
@ -0,0 +1,40 @@
|
||||
public class FizzywigType2 implements org.hibernate.usertype.UserType {
|
||||
|
||||
public static final String KEYS = new String[]{Fizzywig.class.getName()};
|
||||
public static final FizzywigType1 INSTANCE = new FizzywigType1();
|
||||
|
||||
@Override
|
||||
public int[] sqlTypes( Mapping mapping ) {
|
||||
return new int[]{java.sql.Types.VARCHAR};
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class getReturnedClass() {
|
||||
return Fizzywig.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object nullSafeGet(
|
||||
ResultSet rs,
|
||||
String[] names,
|
||||
SessionImplementor session,
|
||||
Object owner) throws SQLException {
|
||||
return Fizzwig.fromString(
|
||||
StringType.INSTANCE.get( rs, names[0], sesson )
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void nullSafeSet(
|
||||
PreparedStatement st,
|
||||
Object value,
|
||||
int index,
|
||||
SessionImplementor session) throws SQLException {
|
||||
final String dbValue = value == null
|
||||
? null
|
||||
: (( Fizzywig ) value).asString();
|
||||
StringType.INSTANCE.nullSafeSet( st, value, index, session );
|
||||
}
|
||||
|
||||
...
|
||||
}
|
@ -0,0 +1,3 @@
|
||||
MetadataSources metadataSources = ...;
|
||||
metadataSources.getMetaDataBuilder().applyBasicType( FizzwigType2.KEYS,FizzwigType2.INSTANCE )
|
||||
...
|
@ -0,0 +1,13 @@
|
||||
@Entity
|
||||
public class Product {
|
||||
...
|
||||
|
||||
@Lob
|
||||
@Basic
|
||||
@Nationalized
|
||||
public NClob description;
|
||||
// Clob also works, because NClob
|
||||
// extends Clob. The db type is
|
||||
// still NCLOB either way and
|
||||
// handled as such
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user