SVN layout migration for core/trunk
git-svn-id: https://svn.jboss.org/repos/hibernate/core/trunk@11722 1b8cb986-b30d-0410-93ca-fae66ebed9b2
This commit is contained in:
parent
41e94b3cf8
commit
d8d6d82e30
|
@ -0,0 +1,34 @@
|
||||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
|
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
|
<parent>
|
||||||
|
<groupId>org.hibernate</groupId>
|
||||||
|
<artifactId>hibernate-code</artifactId>
|
||||||
|
<version>3.3.0.beta1</version>
|
||||||
|
<relativePath>../pom.xml</relativePath>
|
||||||
|
</parent>
|
||||||
|
|
||||||
|
<groupId>org.hibernate</groupId>
|
||||||
|
<artifactId>hibernate-ehcache</artifactId>
|
||||||
|
<packaging>jar</packaging>
|
||||||
|
|
||||||
|
<name>Hibernate Ehcache integration</name>
|
||||||
|
<description>Integration of Hibernate with Ehcache</description>
|
||||||
|
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>${groupId}</groupId>
|
||||||
|
<artifactId>hibernate-core</artifactId>
|
||||||
|
<version>${hibernate.core.version}</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>net.sf.ehcache</groupId>
|
||||||
|
<artifactId>ehcache</artifactId>
|
||||||
|
<version>1.2.3</version>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
|
|
||||||
|
</project>
|
|
@ -0,0 +1,275 @@
|
||||||
|
//$Id: EhCache.java 10717 2006-11-03 19:05:21Z max.andersen@jboss.com $
|
||||||
|
/**
|
||||||
|
* Copyright 2003-2006 Greg Luck, Jboss Inc
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.hibernate.cache;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Iterator;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import net.sf.ehcache.CacheManager;
|
||||||
|
import net.sf.ehcache.Element;
|
||||||
|
import org.apache.commons.logging.Log;
|
||||||
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* EHCache plugin for Hibernate
|
||||||
|
* <p/>
|
||||||
|
* EHCache uses a {@link net.sf.ehcache.store.MemoryStore} and a
|
||||||
|
* {@link net.sf.ehcache.store.DiskStore}.
|
||||||
|
* The {@link net.sf.ehcache.store.DiskStore} requires that both keys and values be {@link java.io.Serializable}.
|
||||||
|
* However the MemoryStore does not and in ehcache-1.2 nonSerializable Objects are permitted. They are discarded
|
||||||
|
* if an attempt it made to overflow them to Disk or to replicate them to remote cache peers.
|
||||||
|
*
|
||||||
|
* @author Greg Luck
|
||||||
|
* @author Emmanuel Bernard
|
||||||
|
*/
|
||||||
|
public class EhCache implements Cache {
|
||||||
|
private static final Log log = LogFactory.getLog( EhCache.class );
|
||||||
|
|
||||||
|
private static final int SIXTY_THOUSAND_MS = 60000;
|
||||||
|
|
||||||
|
private net.sf.ehcache.Cache cache;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new Hibernate pluggable cache based on a cache name.
|
||||||
|
* <p/>
|
||||||
|
*
|
||||||
|
* @param cache The underlying EhCache instance to use.
|
||||||
|
*/
|
||||||
|
public EhCache(net.sf.ehcache.Cache cache) {
|
||||||
|
this.cache = cache;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets a value of an element which matches the given key.
|
||||||
|
*
|
||||||
|
* @param key the key of the element to return.
|
||||||
|
* @return The value placed into the cache with an earlier put, or null if not found or expired
|
||||||
|
* @throws CacheException
|
||||||
|
*/
|
||||||
|
public Object get(Object key) throws CacheException {
|
||||||
|
try {
|
||||||
|
if ( log.isDebugEnabled() ) {
|
||||||
|
log.debug( "key: " + key );
|
||||||
|
}
|
||||||
|
if ( key == null ) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
Element element = cache.get( key );
|
||||||
|
if ( element == null ) {
|
||||||
|
if ( log.isDebugEnabled() ) {
|
||||||
|
log.debug( "Element for " + key + " is null" );
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return element.getObjectValue();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (net.sf.ehcache.CacheException e) {
|
||||||
|
throw new CacheException( e );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public Object read(Object key) throws CacheException {
|
||||||
|
return get( key );
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Puts an object into the cache.
|
||||||
|
*
|
||||||
|
* @param key a key
|
||||||
|
* @param value a value
|
||||||
|
* @throws CacheException if the {@link CacheManager}
|
||||||
|
* is shutdown or another {@link Exception} occurs.
|
||||||
|
*/
|
||||||
|
public void update(Object key, Object value) throws CacheException {
|
||||||
|
put( key, value );
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Puts an object into the cache.
|
||||||
|
*
|
||||||
|
* @param key a key
|
||||||
|
* @param value a value
|
||||||
|
* @throws CacheException if the {@link CacheManager}
|
||||||
|
* is shutdown or another {@link Exception} occurs.
|
||||||
|
*/
|
||||||
|
public void put(Object key, Object value) throws CacheException {
|
||||||
|
try {
|
||||||
|
Element element = new Element( key, value );
|
||||||
|
cache.put( element );
|
||||||
|
}
|
||||||
|
catch (IllegalArgumentException e) {
|
||||||
|
throw new CacheException( e );
|
||||||
|
}
|
||||||
|
catch (IllegalStateException e) {
|
||||||
|
throw new CacheException( e );
|
||||||
|
}
|
||||||
|
catch (net.sf.ehcache.CacheException e) {
|
||||||
|
throw new CacheException( e );
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Removes the element which matches the key.
|
||||||
|
* <p/>
|
||||||
|
* If no element matches, nothing is removed and no Exception is thrown.
|
||||||
|
*
|
||||||
|
* @param key the key of the element to remove
|
||||||
|
* @throws CacheException
|
||||||
|
*/
|
||||||
|
public void remove(Object key) throws CacheException {
|
||||||
|
try {
|
||||||
|
cache.remove( key );
|
||||||
|
}
|
||||||
|
catch (ClassCastException e) {
|
||||||
|
throw new CacheException( e );
|
||||||
|
}
|
||||||
|
catch (IllegalStateException e) {
|
||||||
|
throw new CacheException( e );
|
||||||
|
}
|
||||||
|
catch (net.sf.ehcache.CacheException e) {
|
||||||
|
throw new CacheException( e );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove all elements in the cache, but leave the cache
|
||||||
|
* in a useable state.
|
||||||
|
*
|
||||||
|
* @throws CacheException
|
||||||
|
*/
|
||||||
|
public void clear() throws CacheException {
|
||||||
|
try {
|
||||||
|
cache.removeAll();
|
||||||
|
}
|
||||||
|
catch (IllegalStateException e) {
|
||||||
|
throw new CacheException( e );
|
||||||
|
}
|
||||||
|
catch (net.sf.ehcache.CacheException e) {
|
||||||
|
throw new CacheException( e );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove the cache and make it unuseable.
|
||||||
|
*
|
||||||
|
* @throws CacheException
|
||||||
|
*/
|
||||||
|
public void destroy() throws CacheException {
|
||||||
|
try {
|
||||||
|
cache.getCacheManager().removeCache( cache.getName() );
|
||||||
|
}
|
||||||
|
catch (IllegalStateException e) {
|
||||||
|
throw new CacheException( e );
|
||||||
|
}
|
||||||
|
catch (net.sf.ehcache.CacheException e) {
|
||||||
|
throw new CacheException( e );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calls to this method should perform there own synchronization.
|
||||||
|
* It is provided for distributed caches. Because EHCache is not distributed
|
||||||
|
* this method does nothing.
|
||||||
|
*/
|
||||||
|
public void lock(Object key) throws CacheException {
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calls to this method should perform there own synchronization.
|
||||||
|
* It is provided for distributed caches. Because EHCache is not distributed
|
||||||
|
* this method does nothing.
|
||||||
|
*/
|
||||||
|
public void unlock(Object key) throws CacheException {
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the next timestamp;
|
||||||
|
*/
|
||||||
|
public long nextTimestamp() {
|
||||||
|
return Timestamper.next();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the lock timeout for this cache.
|
||||||
|
*/
|
||||||
|
public int getTimeout() {
|
||||||
|
// 60 second lock timeout
|
||||||
|
return Timestamper.ONE_MS * SIXTY_THOUSAND_MS;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getRegionName() {
|
||||||
|
return cache.getName();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Warning: This method can be very expensive to run. Allow approximately 1 second
|
||||||
|
* per 1MB of entries. Running this method could create liveness problems
|
||||||
|
* because the object lock is held for a long period
|
||||||
|
* <p/>
|
||||||
|
*
|
||||||
|
* @return the approximate size of memory ehcache is using for the MemoryStore for this cache
|
||||||
|
*/
|
||||||
|
public long getSizeInMemory() {
|
||||||
|
try {
|
||||||
|
return cache.calculateInMemorySize();
|
||||||
|
}
|
||||||
|
catch (Throwable t) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public long getElementCountInMemory() {
|
||||||
|
try {
|
||||||
|
return cache.getMemoryStoreSize();
|
||||||
|
}
|
||||||
|
catch (net.sf.ehcache.CacheException ce) {
|
||||||
|
throw new CacheException( ce );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public long getElementCountOnDisk() {
|
||||||
|
return cache.getDiskStoreSize();
|
||||||
|
}
|
||||||
|
|
||||||
|
public Map toMap() {
|
||||||
|
try {
|
||||||
|
Map result = new HashMap();
|
||||||
|
Iterator iter = cache.getKeys().iterator();
|
||||||
|
while ( iter.hasNext() ) {
|
||||||
|
Object key = iter.next();
|
||||||
|
result.put( key, cache.get( key ).getObjectValue() );
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
catch (Exception e) {
|
||||||
|
throw new CacheException( e );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public String toString() {
|
||||||
|
return "EHCache(" + getRegionName() + ')';
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,167 @@
|
||||||
|
//$Id: EhCacheProvider.java 9964 2006-05-30 15:40:54Z epbernard $
|
||||||
|
/**
|
||||||
|
* Copyright 2003-2006 Greg Luck, Jboss Inc
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.hibernate.cache;
|
||||||
|
|
||||||
|
import java.util.Properties;
|
||||||
|
import java.net.URL;
|
||||||
|
|
||||||
|
import net.sf.ehcache.CacheManager;
|
||||||
|
import org.apache.commons.logging.Log;
|
||||||
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
import org.hibernate.cfg.Environment;
|
||||||
|
import org.hibernate.util.StringHelper;
|
||||||
|
import org.hibernate.util.ConfigHelper;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cache Provider plugin for Hibernate
|
||||||
|
*
|
||||||
|
* Use <code>hibernate.cache.provider_class=org.hibernate.cache.EhCacheProvider</code>
|
||||||
|
* in Hibernate 3.x or later
|
||||||
|
*
|
||||||
|
* Taken from EhCache 0.9 distribution
|
||||||
|
* @author Greg Luck
|
||||||
|
* @author Emmanuel Bernard
|
||||||
|
*/
|
||||||
|
/**
|
||||||
|
* Cache Provider plugin for ehcache-1.2. New in this provider are ehcache support for multiple
|
||||||
|
* Hibernate session factories, each with its own ehcache configuration, and non Serializable keys and values.
|
||||||
|
* Ehcache-1.2 also has many other features such as cluster support and listeners, which can be used seamlessly simply
|
||||||
|
* by configurion in ehcache.xml.
|
||||||
|
* <p/>
|
||||||
|
* Use <code>hibernate.cache.provider_class=org.hibernate.cache.EhCacheProvider</code> in the Hibernate configuration
|
||||||
|
* to enable this provider for Hibernate's second level cache.
|
||||||
|
* <p/>
|
||||||
|
* When configuring multiple ehcache CacheManagers, as you would where you have multiple Hibernate Configurations and
|
||||||
|
* multiple SessionFactories, specify in each Hibernate configuration the ehcache configuration using
|
||||||
|
* the property <code>hibernate.cache.provider_configuration_file_resource_path</code> An example to set an ehcache configuration
|
||||||
|
* called ehcache-2.xml would be <code>hibernate.cache.provider_configuration_file_resource_path=/ehcache-2.xml</code>. If the leading
|
||||||
|
* slash is not there one will be added. The configuration file will be looked for in the root of the classpath.
|
||||||
|
* <p/>
|
||||||
|
* Updated for ehcache-1.2. Note this provider requires ehcache-1.2.jar. Make sure ehcache-1.1.jar or earlier
|
||||||
|
* is not in the classpath or it will not work.
|
||||||
|
* <p/>
|
||||||
|
* See http://ehcache.sf.net for documentation on ehcache
|
||||||
|
* <p/>
|
||||||
|
*
|
||||||
|
* @author Greg Luck
|
||||||
|
* @author Emmanuel Bernard
|
||||||
|
*/
|
||||||
|
public class EhCacheProvider implements CacheProvider {
|
||||||
|
|
||||||
|
private static final Log log = LogFactory.getLog(EhCacheProvider.class);
|
||||||
|
|
||||||
|
private CacheManager manager;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Builds a Cache.
|
||||||
|
* <p>
|
||||||
|
* Even though this method provides properties, they are not used.
|
||||||
|
* Properties for EHCache are specified in the ehcache.xml file.
|
||||||
|
* Configuration will be read from ehcache.xml for a cache declaration
|
||||||
|
* where the name attribute matches the name parameter in this builder.
|
||||||
|
*
|
||||||
|
* @param name the name of the cache. Must match a cache configured in ehcache.xml
|
||||||
|
* @param properties not used
|
||||||
|
* @return a newly built cache will be built and initialised
|
||||||
|
* @throws CacheException inter alia, if a cache of the same name already exists
|
||||||
|
*/
|
||||||
|
public Cache buildCache(String name, Properties properties) throws CacheException {
|
||||||
|
try {
|
||||||
|
net.sf.ehcache.Cache cache = manager.getCache(name);
|
||||||
|
if (cache == null) {
|
||||||
|
log.warn("Could not find configuration [" + name + "]; using defaults.");
|
||||||
|
manager.addCache(name);
|
||||||
|
cache = manager.getCache(name);
|
||||||
|
log.debug("started EHCache region: " + name);
|
||||||
|
}
|
||||||
|
return new EhCache(cache);
|
||||||
|
}
|
||||||
|
catch (net.sf.ehcache.CacheException e) {
|
||||||
|
throw new CacheException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the next timestamp.
|
||||||
|
*/
|
||||||
|
public long nextTimestamp() {
|
||||||
|
return Timestamper.next();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Callback to perform any necessary initialization of the underlying cache implementation
|
||||||
|
* during SessionFactory construction.
|
||||||
|
*
|
||||||
|
* @param properties current configuration settings.
|
||||||
|
*/
|
||||||
|
public void start(Properties properties) throws CacheException {
|
||||||
|
if (manager != null) {
|
||||||
|
log.warn("Attempt to restart an already started EhCacheProvider. Use sessionFactory.close() " +
|
||||||
|
" between repeated calls to buildSessionFactory. Using previously created EhCacheProvider." +
|
||||||
|
" If this behaviour is required, consider using net.sf.ehcache.hibernate.SingletonEhCacheProvider.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
String configurationResourceName = null;
|
||||||
|
if (properties != null) {
|
||||||
|
configurationResourceName = (String) properties.get( Environment.CACHE_PROVIDER_CONFIG );
|
||||||
|
}
|
||||||
|
if ( StringHelper.isEmpty( configurationResourceName ) ) {
|
||||||
|
manager = new CacheManager();
|
||||||
|
} else {
|
||||||
|
URL url = loadResource(configurationResourceName);
|
||||||
|
manager = new CacheManager(url);
|
||||||
|
}
|
||||||
|
} catch (net.sf.ehcache.CacheException e) {
|
||||||
|
//yukky! Don't you have subclasses for that!
|
||||||
|
//TODO race conditions can happen here
|
||||||
|
if (e.getMessage().startsWith("Cannot parseConfiguration CacheManager. Attempt to create a new instance of " +
|
||||||
|
"CacheManager using the diskStorePath")) {
|
||||||
|
throw new CacheException("Attempt to restart an already started EhCacheProvider. Use sessionFactory.close() " +
|
||||||
|
" between repeated calls to buildSessionFactory. Consider using net.sf.ehcache.hibernate.SingletonEhCacheProvider."
|
||||||
|
, e );
|
||||||
|
} else {
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private URL loadResource(String configurationResourceName) {
|
||||||
|
URL url = ConfigHelper.locateConfig( configurationResourceName );
|
||||||
|
if (log.isDebugEnabled()) {
|
||||||
|
log.debug("Creating EhCacheProvider from a specified resource: "
|
||||||
|
+ configurationResourceName + " Resolved to URL: " + url);
|
||||||
|
}
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Callback to perform any necessary cleanup of the underlying cache implementation
|
||||||
|
* during SessionFactory.close().
|
||||||
|
*/
|
||||||
|
public void stop() {
|
||||||
|
if (manager != null) {
|
||||||
|
manager.shutdown();
|
||||||
|
manager = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isMinimalPutsEnabledByDefault() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,65 @@
|
||||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
|
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
|
<parent>
|
||||||
|
<groupId>org.hibernate</groupId>
|
||||||
|
<artifactId>hibernate-code</artifactId>
|
||||||
|
<version>3.3.0.beta1</version>
|
||||||
|
<relativePath>../pom.xml</relativePath>
|
||||||
|
</parent>
|
||||||
|
|
||||||
|
<groupId>org.hibernate</groupId>
|
||||||
|
<artifactId>hibernate-jbosscache</artifactId>
|
||||||
|
<packaging>jar</packaging>
|
||||||
|
|
||||||
|
<name>Hibernate JBossCache integration</name>
|
||||||
|
<description>Integration of Hibernate with JBossCache (based on JBossCache1.x APIs)</description>
|
||||||
|
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>${groupId}</groupId>
|
||||||
|
<artifactId>hibernate-core</artifactId>
|
||||||
|
<version>${hibernate.core.version}</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>jboss</groupId>
|
||||||
|
<artifactId>jboss-cache</artifactId>
|
||||||
|
<version>1.4.1.GA</version>
|
||||||
|
</dependency>
|
||||||
|
<!-- jboss-cache (the one from the jboss repo, anyway) does not properly define its dependencies -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>jboss</groupId>
|
||||||
|
<artifactId>jboss-system</artifactId>
|
||||||
|
<version>4.0.2</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>jboss</groupId>
|
||||||
|
<artifactId>jboss-common</artifactId>
|
||||||
|
<version>4.0.2</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>jboss</groupId>
|
||||||
|
<artifactId>jboss-minimal</artifactId>
|
||||||
|
<version>4.0.2</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>jboss</groupId>
|
||||||
|
<artifactId>jboss-j2se</artifactId>
|
||||||
|
<version>200504122039</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>concurrent</groupId>
|
||||||
|
<artifactId>concurrent</artifactId>
|
||||||
|
<version>1.3.4</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>jgroups</groupId>
|
||||||
|
<artifactId>jgroups-all</artifactId>
|
||||||
|
<version>2.2.7</version>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
|
|
||||||
|
</project>
|
63
code/cache-jbosscache/src/main/java/org/hibernate/cache/JndiBoundTreeCacheProvider.java
vendored
Normal file
63
code/cache-jbosscache/src/main/java/org/hibernate/cache/JndiBoundTreeCacheProvider.java
vendored
Normal file
|
@ -0,0 +1,63 @@
|
||||||
|
// $Id: JndiBoundTreeCacheProvider.java 6079 2005-03-16 06:01:18Z oneovthafew $
|
||||||
|
package org.hibernate.cache;
|
||||||
|
|
||||||
|
import java.util.Properties;
|
||||||
|
|
||||||
|
import javax.transaction.TransactionManager;
|
||||||
|
|
||||||
|
import org.hibernate.transaction.TransactionManagerLookup;
|
||||||
|
import org.hibernate.transaction.TransactionManagerLookupFactory;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Support for JBossCache (TreeCache), where the cache instance is available
|
||||||
|
* via JNDI lookup.
|
||||||
|
*
|
||||||
|
* @author Steve Ebersole
|
||||||
|
*/
|
||||||
|
public class JndiBoundTreeCacheProvider extends AbstractJndiBoundCacheProvider {
|
||||||
|
|
||||||
|
private TransactionManager transactionManager;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Construct a Cache representing the "region" within in the underlying cache
|
||||||
|
* provider.
|
||||||
|
*
|
||||||
|
* @param regionName the name of the cache region
|
||||||
|
* @param properties configuration settings
|
||||||
|
*
|
||||||
|
* @throws CacheException
|
||||||
|
*/
|
||||||
|
public Cache buildCache(String regionName, Properties properties) throws CacheException {
|
||||||
|
return new TreeCache( getTreeCacheInstance(), regionName, transactionManager );
|
||||||
|
}
|
||||||
|
|
||||||
|
public void prepare(Properties properties) throws CacheException {
|
||||||
|
TransactionManagerLookup transactionManagerLookup = TransactionManagerLookupFactory.getTransactionManagerLookup(properties);
|
||||||
|
if (transactionManagerLookup!=null) {
|
||||||
|
transactionManager = transactionManagerLookup.getTransactionManager(properties);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Generate a timestamp
|
||||||
|
*/
|
||||||
|
public long nextTimestamp() {
|
||||||
|
return System.currentTimeMillis() / 100;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* By default, should minimal-puts mode be enabled when using this cache.
|
||||||
|
* <p/>
|
||||||
|
* Since TreeCache is a clusterable cache and we are only getting a
|
||||||
|
* reference the instance from JNDI, safest to assume a clustered
|
||||||
|
* setup and return true here.
|
||||||
|
*
|
||||||
|
* @return True.
|
||||||
|
*/
|
||||||
|
public boolean isMinimalPutsEnabledByDefault() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
public org.jboss.cache.TreeCache getTreeCacheInstance() {
|
||||||
|
return ( org.jboss.cache.TreeCache ) super.getCache();
|
||||||
|
}
|
||||||
|
}
|
329
code/cache-jbosscache/src/main/java/org/hibernate/cache/OptimisticTreeCache.java
vendored
Normal file
329
code/cache-jbosscache/src/main/java/org/hibernate/cache/OptimisticTreeCache.java
vendored
Normal file
|
@ -0,0 +1,329 @@
|
||||||
|
//$Id: OptimisticTreeCache.java 10118 2006-07-13 21:38:41Z steve.ebersole@jboss.com $
|
||||||
|
package org.hibernate.cache;
|
||||||
|
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Iterator;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Set;
|
||||||
|
import java.util.Comparator;
|
||||||
|
|
||||||
|
|
||||||
|
import org.apache.commons.logging.Log;
|
||||||
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
import org.jboss.cache.Fqn;
|
||||||
|
import org.jboss.cache.optimistic.DataVersion;
|
||||||
|
import org.jboss.cache.config.Option;
|
||||||
|
import org.jboss.cache.lock.TimeoutException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Represents a particular region within the given JBossCache TreeCache
|
||||||
|
* utilizing TreeCache's optimistic locking capabilities.
|
||||||
|
*
|
||||||
|
* @see OptimisticTreeCacheProvider for more details
|
||||||
|
*
|
||||||
|
* @author Steve Ebersole
|
||||||
|
*/
|
||||||
|
public class OptimisticTreeCache implements OptimisticCache, TransactionAwareCache {
|
||||||
|
|
||||||
|
// todo : eventually merge this with TreeCache and just add optional opt-lock support there.
|
||||||
|
|
||||||
|
private static final Log log = LogFactory.getLog( OptimisticTreeCache.class);
|
||||||
|
|
||||||
|
private static final String ITEM = "item";
|
||||||
|
|
||||||
|
private org.jboss.cache.TreeCache cache;
|
||||||
|
private final String regionName;
|
||||||
|
private final Fqn regionFqn;
|
||||||
|
private OptimisticCacheSource source;
|
||||||
|
|
||||||
|
public OptimisticTreeCache(org.jboss.cache.TreeCache cache, String regionName)
|
||||||
|
throws CacheException {
|
||||||
|
this.cache = cache;
|
||||||
|
this.regionName = regionName;
|
||||||
|
this.regionFqn = Fqn.fromString( regionName.replace( '.', '/' ) );
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// OptimisticCache impl ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
public void setSource(OptimisticCacheSource source) {
|
||||||
|
this.source = source;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void writeInsert(Object key, Object value, Object currentVersion) {
|
||||||
|
writeUpdate( key, value, currentVersion, null );
|
||||||
|
}
|
||||||
|
|
||||||
|
public void writeUpdate(Object key, Object value, Object currentVersion, Object previousVersion) {
|
||||||
|
try {
|
||||||
|
Option option = new Option();
|
||||||
|
DataVersion dv = ( source != null && source.isVersioned() )
|
||||||
|
? new DataVersionAdapter( currentVersion, previousVersion, source.getVersionComparator(), source.toString() )
|
||||||
|
: NonLockingDataVersion.INSTANCE;
|
||||||
|
option.setDataVersion( dv );
|
||||||
|
cache.put( new Fqn( regionFqn, key ), ITEM, value, option );
|
||||||
|
}
|
||||||
|
catch ( Exception e ) {
|
||||||
|
throw new CacheException( e );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void writeLoad(Object key, Object value, Object currentVersion) {
|
||||||
|
try {
|
||||||
|
Option option = new Option();
|
||||||
|
option.setFailSilently( true );
|
||||||
|
option.setDataVersion( NonLockingDataVersion.INSTANCE );
|
||||||
|
cache.remove( new Fqn( regionFqn, key ), "ITEM", option );
|
||||||
|
|
||||||
|
option = new Option();
|
||||||
|
option.setFailSilently( true );
|
||||||
|
DataVersion dv = ( source != null && source.isVersioned() )
|
||||||
|
? new DataVersionAdapter( currentVersion, currentVersion, source.getVersionComparator(), source.toString() )
|
||||||
|
: NonLockingDataVersion.INSTANCE;
|
||||||
|
option.setDataVersion( dv );
|
||||||
|
cache.put( new Fqn( regionFqn, key ), ITEM, value, option );
|
||||||
|
}
|
||||||
|
catch (Exception e) {
|
||||||
|
throw new CacheException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// Cache impl ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
public Object get(Object key) throws CacheException {
|
||||||
|
try {
|
||||||
|
Option option = new Option();
|
||||||
|
option.setFailSilently( true );
|
||||||
|
// option.setDataVersion( NonLockingDataVersion.INSTANCE );
|
||||||
|
return cache.get( new Fqn( regionFqn, key ), ITEM, option );
|
||||||
|
}
|
||||||
|
catch (Exception e) {
|
||||||
|
throw new CacheException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public Object read(Object key) throws CacheException {
|
||||||
|
try {
|
||||||
|
return cache.get( new Fqn( regionFqn, key ), ITEM );
|
||||||
|
}
|
||||||
|
catch (Exception e) {
|
||||||
|
throw new CacheException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void update(Object key, Object value) throws CacheException {
|
||||||
|
try {
|
||||||
|
Option option = new Option();
|
||||||
|
option.setDataVersion( NonLockingDataVersion.INSTANCE );
|
||||||
|
cache.put( new Fqn( regionFqn, key ), ITEM, value, option );
|
||||||
|
}
|
||||||
|
catch (Exception e) {
|
||||||
|
throw new CacheException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void put(Object key, Object value) throws CacheException {
|
||||||
|
try {
|
||||||
|
log.trace( "performing put() into region [" + regionName + "]" );
|
||||||
|
// do the put outside the scope of the JTA txn
|
||||||
|
Option option = new Option();
|
||||||
|
option.setFailSilently( true );
|
||||||
|
option.setDataVersion( NonLockingDataVersion.INSTANCE );
|
||||||
|
cache.put( new Fqn( regionFqn, key ), ITEM, value, option );
|
||||||
|
}
|
||||||
|
catch (TimeoutException te) {
|
||||||
|
//ignore!
|
||||||
|
log.debug("ignoring write lock acquisition failure");
|
||||||
|
}
|
||||||
|
catch (Exception e) {
|
||||||
|
throw new CacheException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void remove(Object key) throws CacheException {
|
||||||
|
try {
|
||||||
|
// tree cache in optimistic mode seems to have as very difficult
|
||||||
|
// time with remove calls on non-existent nodes (NPEs)...
|
||||||
|
if ( cache.get( new Fqn( regionFqn, key ), ITEM ) != null ) {
|
||||||
|
Option option = new Option();
|
||||||
|
option.setDataVersion( NonLockingDataVersion.INSTANCE );
|
||||||
|
cache.remove( new Fqn( regionFqn, key ), option );
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
log.trace( "skipping remove() call as the underlying node did not seem to exist" );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (Exception e) {
|
||||||
|
throw new CacheException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void clear() throws CacheException {
|
||||||
|
try {
|
||||||
|
Option option = new Option();
|
||||||
|
option.setDataVersion( NonLockingDataVersion.INSTANCE );
|
||||||
|
cache.remove( regionFqn, option );
|
||||||
|
}
|
||||||
|
catch (Exception e) {
|
||||||
|
throw new CacheException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void destroy() throws CacheException {
|
||||||
|
try {
|
||||||
|
Option option = new Option();
|
||||||
|
option.setCacheModeLocal( true );
|
||||||
|
option.setFailSilently( true );
|
||||||
|
option.setDataVersion( NonLockingDataVersion.INSTANCE );
|
||||||
|
cache.remove( regionFqn, option );
|
||||||
|
}
|
||||||
|
catch( Exception e ) {
|
||||||
|
throw new CacheException( e );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void lock(Object key) throws CacheException {
|
||||||
|
throw new UnsupportedOperationException( "TreeCache is a fully transactional cache" + regionName );
|
||||||
|
}
|
||||||
|
|
||||||
|
public void unlock(Object key) throws CacheException {
|
||||||
|
throw new UnsupportedOperationException( "TreeCache is a fully transactional cache: " + regionName );
|
||||||
|
}
|
||||||
|
|
||||||
|
public long nextTimestamp() {
|
||||||
|
return System.currentTimeMillis() / 100;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getTimeout() {
|
||||||
|
return 600; //60 seconds
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getRegionName() {
|
||||||
|
return regionName;
|
||||||
|
}
|
||||||
|
|
||||||
|
public long getSizeInMemory() {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
public long getElementCountInMemory() {
|
||||||
|
try {
|
||||||
|
Set children = cache.getChildrenNames( regionFqn );
|
||||||
|
return children == null ? 0 : children.size();
|
||||||
|
}
|
||||||
|
catch (Exception e) {
|
||||||
|
throw new CacheException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public long getElementCountOnDisk() {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Map toMap() {
|
||||||
|
try {
|
||||||
|
Map result = new HashMap();
|
||||||
|
Set childrenNames = cache.getChildrenNames( regionFqn );
|
||||||
|
if (childrenNames != null) {
|
||||||
|
Iterator iter = childrenNames.iterator();
|
||||||
|
while ( iter.hasNext() ) {
|
||||||
|
Object key = iter.next();
|
||||||
|
result.put(
|
||||||
|
key,
|
||||||
|
cache.get( new Fqn( regionFqn, key ), ITEM )
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
catch (Exception e) {
|
||||||
|
throw new CacheException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public String toString() {
|
||||||
|
return "OptimisticTreeCache(" + regionName + ')';
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class DataVersionAdapter implements DataVersion {
|
||||||
|
private final Object currentVersion;
|
||||||
|
private final Object previousVersion;
|
||||||
|
private final Comparator versionComparator;
|
||||||
|
private final String sourceIdentifer;
|
||||||
|
|
||||||
|
public DataVersionAdapter(Object currentVersion, Object previousVersion, Comparator versionComparator, String sourceIdentifer) {
|
||||||
|
this.currentVersion = currentVersion;
|
||||||
|
this.previousVersion = previousVersion;
|
||||||
|
this.versionComparator = versionComparator;
|
||||||
|
this.sourceIdentifer = sourceIdentifer;
|
||||||
|
log.trace( "created " + this );
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* newerThan() call is dispatched against the DataVersion currently
|
||||||
|
* associated with the node; the passed dataVersion param is the
|
||||||
|
* DataVersion associated with the data we are trying to put into
|
||||||
|
* the node.
|
||||||
|
* <p/>
|
||||||
|
* we are expected to return true in the case where we (the current
|
||||||
|
* node DataVersion) are newer that then incoming value. Returning
|
||||||
|
* true here essentially means that a optimistic lock failure has
|
||||||
|
* occured (because conversely, the value we are trying to put into
|
||||||
|
* the node is "older than" the value already there...)
|
||||||
|
*/
|
||||||
|
public boolean newerThan(DataVersion dataVersion) {
|
||||||
|
log.trace( "checking [" + this + "] against [" + dataVersion + "]" );
|
||||||
|
if ( dataVersion instanceof CircumventChecksDataVersion ) {
|
||||||
|
log.trace( "skipping lock checks..." );
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
else if ( dataVersion instanceof NonLockingDataVersion ) {
|
||||||
|
// can happen because of the multiple ways Cache.remove()
|
||||||
|
// can be invoked :(
|
||||||
|
log.trace( "skipping lock checks..." );
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
DataVersionAdapter other = ( DataVersionAdapter ) dataVersion;
|
||||||
|
if ( other.previousVersion == null ) {
|
||||||
|
log.warn( "Unexpected optimistic lock check on inserting data" );
|
||||||
|
// work around the "feature" where tree cache is validating the
|
||||||
|
// inserted node during the next transaction. no idea...
|
||||||
|
if ( this == dataVersion ) {
|
||||||
|
log.trace( "skipping lock checks due to same DV instance" );
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return versionComparator.compare( currentVersion, other.previousVersion ) >= 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String toString() {
|
||||||
|
return super.toString() + " [current=" + currentVersion + ", previous=" + previousVersion + ", src=" + sourceIdentifer + "]";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Used in regions where no locking should ever occur. This includes query-caches,
|
||||||
|
* update-timestamps caches, collection caches, and entity caches where the entity
|
||||||
|
* is not versioned.
|
||||||
|
*/
|
||||||
|
public static class NonLockingDataVersion implements DataVersion {
|
||||||
|
public static final DataVersion INSTANCE = new NonLockingDataVersion();
|
||||||
|
public boolean newerThan(DataVersion dataVersion) {
|
||||||
|
log.trace( "non locking lock check...");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Used to signal to a DataVersionAdapter to simply not perform any checks. This
|
||||||
|
* is currently needed for proper handling of remove() calls for entity cache regions
|
||||||
|
* (we do not know the version info...).
|
||||||
|
*/
|
||||||
|
public static class CircumventChecksDataVersion implements DataVersion {
|
||||||
|
public static final DataVersion INSTANCE = new CircumventChecksDataVersion();
|
||||||
|
public boolean newerThan(DataVersion dataVersion) {
|
||||||
|
throw new CacheException( "optimistic locking checks should never happen on CircumventChecksDataVersion" );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
130
code/cache-jbosscache/src/main/java/org/hibernate/cache/OptimisticTreeCacheProvider.java
vendored
Normal file
130
code/cache-jbosscache/src/main/java/org/hibernate/cache/OptimisticTreeCacheProvider.java
vendored
Normal file
|
@ -0,0 +1,130 @@
|
||||||
|
//$Id: OptimisticTreeCacheProvider.java 9895 2006-05-05 19:27:17Z epbernard $
|
||||||
|
package org.hibernate.cache;
|
||||||
|
|
||||||
|
import java.util.Properties;
|
||||||
|
import javax.transaction.TransactionManager;
|
||||||
|
|
||||||
|
import org.apache.commons.logging.Log;
|
||||||
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
import org.hibernate.cfg.Environment;
|
||||||
|
import org.hibernate.transaction.TransactionManagerLookup;
|
||||||
|
import org.hibernate.transaction.TransactionManagerLookupFactory;
|
||||||
|
import org.jboss.cache.PropertyConfigurator;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Support for a standalone JBossCache TreeCache instance utilizing TreeCache's
|
||||||
|
* optimistic locking capabilities. This capability was added in JBossCache
|
||||||
|
* version 1.3.0; as such this provider will only work with that version or
|
||||||
|
* higher.
|
||||||
|
* <p/>
|
||||||
|
* The TreeCache instance is configured via a local config resource. The
|
||||||
|
* resource to be used for configuration can be controlled by specifying a value
|
||||||
|
* for the {@link #CONFIG_RESOURCE} config property.
|
||||||
|
*
|
||||||
|
* @author Steve Ebersole
|
||||||
|
*/
|
||||||
|
public class OptimisticTreeCacheProvider implements CacheProvider {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @deprecated use {@link Environment.CACHE_PROVIDER_CONFIG}
|
||||||
|
*/
|
||||||
|
public static final String CONFIG_RESOURCE = "hibernate.cache.opt_tree_cache.config";
|
||||||
|
public static final String DEFAULT_CONFIG = "treecache.xml";
|
||||||
|
|
||||||
|
private static final String NODE_LOCKING_SCHEME = "OPTIMISTIC";
|
||||||
|
private static final Log log = LogFactory.getLog( OptimisticTreeCacheProvider.class );
|
||||||
|
|
||||||
|
private org.jboss.cache.TreeCache cache;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Construct and configure the Cache representation of a named cache region.
|
||||||
|
*
|
||||||
|
* @param regionName the name of the cache region
|
||||||
|
* @param properties configuration settings
|
||||||
|
* @return The Cache representation of the named cache region.
|
||||||
|
* @throws CacheException
|
||||||
|
* Indicates an error building the cache region.
|
||||||
|
*/
|
||||||
|
public Cache buildCache(String regionName, Properties properties) throws CacheException {
|
||||||
|
return new OptimisticTreeCache( cache, regionName );
|
||||||
|
}
|
||||||
|
|
||||||
|
public long nextTimestamp() {
|
||||||
|
return System.currentTimeMillis() / 100;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Prepare the underlying JBossCache TreeCache instance.
|
||||||
|
*
|
||||||
|
* @param properties All current config settings.
|
||||||
|
* @throws CacheException
|
||||||
|
* Indicates a problem preparing cache for use.
|
||||||
|
*/
|
||||||
|
public void start(Properties properties) {
|
||||||
|
String resource = properties.getProperty( Environment.CACHE_PROVIDER_CONFIG );
|
||||||
|
if (resource == null) {
|
||||||
|
resource = properties.getProperty( CONFIG_RESOURCE );
|
||||||
|
}
|
||||||
|
if ( resource == null ) {
|
||||||
|
resource = DEFAULT_CONFIG;
|
||||||
|
}
|
||||||
|
log.debug( "Configuring TreeCache from resource [" + resource + "]" );
|
||||||
|
try {
|
||||||
|
cache = new org.jboss.cache.TreeCache();
|
||||||
|
PropertyConfigurator config = new PropertyConfigurator();
|
||||||
|
config.configure( cache, resource );
|
||||||
|
TransactionManagerLookup transactionManagerLookup =
|
||||||
|
TransactionManagerLookupFactory.getTransactionManagerLookup( properties );
|
||||||
|
if ( transactionManagerLookup == null ) {
|
||||||
|
throw new CacheException(
|
||||||
|
"JBossCache only supports optimisitc locking with a configured " +
|
||||||
|
"TransactionManagerLookup (" + Environment.TRANSACTION_MANAGER_STRATEGY + ")"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
cache.setTransactionManagerLookup(
|
||||||
|
new TransactionManagerLookupAdaptor(
|
||||||
|
transactionManagerLookup,
|
||||||
|
properties
|
||||||
|
)
|
||||||
|
);
|
||||||
|
if ( ! NODE_LOCKING_SCHEME.equalsIgnoreCase( cache.getNodeLockingScheme() ) ) {
|
||||||
|
log.info( "Overriding node-locking-scheme to : " + NODE_LOCKING_SCHEME );
|
||||||
|
cache.setNodeLockingScheme( NODE_LOCKING_SCHEME );
|
||||||
|
}
|
||||||
|
cache.start();
|
||||||
|
}
|
||||||
|
catch ( Exception e ) {
|
||||||
|
throw new CacheException( e );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void stop() {
|
||||||
|
if ( cache != null ) {
|
||||||
|
cache.stop();
|
||||||
|
cache.destroy();
|
||||||
|
cache = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isMinimalPutsEnabledByDefault() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
static final class TransactionManagerLookupAdaptor implements org.jboss.cache.TransactionManagerLookup {
|
||||||
|
private final TransactionManagerLookup tml;
|
||||||
|
private final Properties props;
|
||||||
|
|
||||||
|
TransactionManagerLookupAdaptor(TransactionManagerLookup tml, Properties props) {
|
||||||
|
this.tml = tml;
|
||||||
|
this.props = props;
|
||||||
|
}
|
||||||
|
|
||||||
|
public TransactionManager getTransactionManager() throws Exception {
|
||||||
|
return tml.getTransactionManager( props );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public org.jboss.cache.TreeCache getUnderlyingCache() {
|
||||||
|
return cache;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,205 @@
|
||||||
|
//$Id: TreeCache.java 9965 2006-05-30 18:00:28Z steve.ebersole@jboss.com $
|
||||||
|
package org.hibernate.cache;
|
||||||
|
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Iterator;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
import javax.transaction.SystemException;
|
||||||
|
import javax.transaction.Transaction;
|
||||||
|
import javax.transaction.TransactionManager;
|
||||||
|
|
||||||
|
import org.apache.commons.logging.Log;
|
||||||
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
import org.jboss.cache.Fqn;
|
||||||
|
import org.jboss.cache.lock.TimeoutException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Represents a particular region within the given JBossCache TreeCache.
|
||||||
|
*
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
public class TreeCache implements Cache, TransactionAwareCache {
|
||||||
|
|
||||||
|
private static final Log log = LogFactory.getLog(TreeCache.class);
|
||||||
|
|
||||||
|
private static final String ITEM = "item";
|
||||||
|
|
||||||
|
private org.jboss.cache.TreeCache cache;
|
||||||
|
private final String regionName;
|
||||||
|
private final Fqn regionFqn;
|
||||||
|
private final TransactionManager transactionManager;
|
||||||
|
|
||||||
|
public TreeCache(org.jboss.cache.TreeCache cache, String regionName, TransactionManager transactionManager)
|
||||||
|
throws CacheException {
|
||||||
|
this.cache = cache;
|
||||||
|
this.regionName = regionName;
|
||||||
|
this.regionFqn = Fqn.fromString( regionName.replace( '.', '/' ) );
|
||||||
|
this.transactionManager = transactionManager;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Object get(Object key) throws CacheException {
|
||||||
|
Transaction tx = suspend();
|
||||||
|
try {
|
||||||
|
return read(key);
|
||||||
|
}
|
||||||
|
finally {
|
||||||
|
resume( tx );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public Object read(Object key) throws CacheException {
|
||||||
|
try {
|
||||||
|
return cache.get( new Fqn( regionFqn, key ), ITEM );
|
||||||
|
}
|
||||||
|
catch (Exception e) {
|
||||||
|
throw new CacheException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void update(Object key, Object value) throws CacheException {
|
||||||
|
try {
|
||||||
|
cache.put( new Fqn( regionFqn, key ), ITEM, value );
|
||||||
|
}
|
||||||
|
catch (Exception e) {
|
||||||
|
throw new CacheException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void put(Object key, Object value) throws CacheException {
|
||||||
|
Transaction tx = suspend();
|
||||||
|
try {
|
||||||
|
//do the failfast put outside the scope of the JTA txn
|
||||||
|
cache.putFailFast( new Fqn( regionFqn, key ), ITEM, value, 0 );
|
||||||
|
}
|
||||||
|
catch (TimeoutException te) {
|
||||||
|
//ignore!
|
||||||
|
log.debug("ignoring write lock acquisition failure");
|
||||||
|
}
|
||||||
|
catch (Exception e) {
|
||||||
|
throw new CacheException(e);
|
||||||
|
}
|
||||||
|
finally {
|
||||||
|
resume( tx );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void resume(Transaction tx) {
|
||||||
|
try {
|
||||||
|
if (tx!=null) transactionManager.resume(tx);
|
||||||
|
}
|
||||||
|
catch (Exception e) {
|
||||||
|
throw new CacheException("Could not resume transaction", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private Transaction suspend() {
|
||||||
|
Transaction tx = null;
|
||||||
|
try {
|
||||||
|
if ( transactionManager!=null ) {
|
||||||
|
tx = transactionManager.suspend();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (SystemException se) {
|
||||||
|
throw new CacheException("Could not suspend transaction", se);
|
||||||
|
}
|
||||||
|
return tx;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void remove(Object key) throws CacheException {
|
||||||
|
try {
|
||||||
|
cache.remove( new Fqn( regionFqn, key ) );
|
||||||
|
}
|
||||||
|
catch (Exception e) {
|
||||||
|
throw new CacheException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void clear() throws CacheException {
|
||||||
|
try {
|
||||||
|
cache.remove( regionFqn );
|
||||||
|
}
|
||||||
|
catch (Exception e) {
|
||||||
|
throw new CacheException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void destroy() throws CacheException {
|
||||||
|
try {
|
||||||
|
// NOTE : evict() operates locally only (i.e., does not propogate
|
||||||
|
// to any other nodes in the potential cluster). This is
|
||||||
|
// exactly what is needed when we destroy() here; destroy() is used
|
||||||
|
// as part of the process of shutting down a SessionFactory; thus
|
||||||
|
// these removals should not be propogated
|
||||||
|
cache.evict( regionFqn );
|
||||||
|
}
|
||||||
|
catch( Exception e ) {
|
||||||
|
throw new CacheException( e );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void lock(Object key) throws CacheException {
|
||||||
|
throw new UnsupportedOperationException( "TreeCache is a fully transactional cache" + regionName );
|
||||||
|
}
|
||||||
|
|
||||||
|
public void unlock(Object key) throws CacheException {
|
||||||
|
throw new UnsupportedOperationException( "TreeCache is a fully transactional cache: " + regionName );
|
||||||
|
}
|
||||||
|
|
||||||
|
public long nextTimestamp() {
|
||||||
|
return System.currentTimeMillis() / 100;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getTimeout() {
|
||||||
|
return 600; //60 seconds
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getRegionName() {
|
||||||
|
return regionName;
|
||||||
|
}
|
||||||
|
|
||||||
|
public long getSizeInMemory() {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
public long getElementCountInMemory() {
|
||||||
|
try {
|
||||||
|
Set children = cache.getChildrenNames( regionFqn );
|
||||||
|
return children == null ? 0 : children.size();
|
||||||
|
}
|
||||||
|
catch (Exception e) {
|
||||||
|
throw new CacheException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public long getElementCountOnDisk() {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Map toMap() {
|
||||||
|
try {
|
||||||
|
Map result = new HashMap();
|
||||||
|
Set childrenNames = cache.getChildrenNames( regionFqn );
|
||||||
|
if (childrenNames != null) {
|
||||||
|
Iterator iter = childrenNames.iterator();
|
||||||
|
while ( iter.hasNext() ) {
|
||||||
|
Object key = iter.next();
|
||||||
|
result.put(
|
||||||
|
key,
|
||||||
|
cache.get( new Fqn( regionFqn, key ), ITEM )
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
catch (Exception e) {
|
||||||
|
throw new CacheException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public String toString() {
|
||||||
|
return "TreeCache(" + regionName + ')';
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
109
code/cache-jbosscache/src/main/java/org/hibernate/cache/TreeCacheProvider.java
vendored
Normal file
109
code/cache-jbosscache/src/main/java/org/hibernate/cache/TreeCacheProvider.java
vendored
Normal file
|
@ -0,0 +1,109 @@
|
||||||
|
//$Id: TreeCacheProvider.java 11398 2007-04-10 14:54:07Z steve.ebersole@jboss.com $
|
||||||
|
package org.hibernate.cache;
|
||||||
|
|
||||||
|
import java.util.Properties;
|
||||||
|
import javax.transaction.TransactionManager;
|
||||||
|
|
||||||
|
import org.apache.commons.logging.Log;
|
||||||
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
import org.hibernate.transaction.TransactionManagerLookup;
|
||||||
|
import org.hibernate.transaction.TransactionManagerLookupFactory;
|
||||||
|
import org.hibernate.cfg.Environment;
|
||||||
|
import org.jboss.cache.PropertyConfigurator;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Support for a standalone JBossCache (TreeCache) instance. The JBossCache is configured
|
||||||
|
* via a local config resource.
|
||||||
|
*
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
public class TreeCacheProvider implements CacheProvider {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @deprecated use {@link org.hibernate.cfg.Environment#CACHE_PROVIDER_CONFIG}
|
||||||
|
*/
|
||||||
|
public static final String CONFIG_RESOURCE = "hibernate.cache.tree_cache.config";
|
||||||
|
public static final String DEFAULT_CONFIG = "treecache.xml";
|
||||||
|
|
||||||
|
private static final Log log = LogFactory.getLog( TreeCacheProvider.class );
|
||||||
|
|
||||||
|
private org.jboss.cache.TreeCache cache;
|
||||||
|
private TransactionManager transactionManager;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Construct and configure the Cache representation of a named cache region.
|
||||||
|
*
|
||||||
|
* @param regionName the name of the cache region
|
||||||
|
* @param properties configuration settings
|
||||||
|
* @return The Cache representation of the named cache region.
|
||||||
|
* @throws CacheException Indicates an error building the cache region.
|
||||||
|
*/
|
||||||
|
public Cache buildCache(String regionName, Properties properties) throws CacheException {
|
||||||
|
return new TreeCache(cache, regionName, transactionManager);
|
||||||
|
}
|
||||||
|
|
||||||
|
public long nextTimestamp() {
|
||||||
|
return System.currentTimeMillis() / 100;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Prepare the underlying JBossCache TreeCache instance.
|
||||||
|
*
|
||||||
|
* @param properties All current config settings.
|
||||||
|
*
|
||||||
|
* @throws CacheException Indicates a problem preparing cache for use.
|
||||||
|
*/
|
||||||
|
public void start(Properties properties) {
|
||||||
|
String resource = properties.getProperty( Environment.CACHE_PROVIDER_CONFIG );
|
||||||
|
|
||||||
|
if ( resource == null ) {
|
||||||
|
resource = properties.getProperty( CONFIG_RESOURCE );
|
||||||
|
}
|
||||||
|
if ( resource == null ) {
|
||||||
|
resource = DEFAULT_CONFIG;
|
||||||
|
}
|
||||||
|
log.debug( "Configuring TreeCache from resource [" + resource + "]" );
|
||||||
|
try {
|
||||||
|
cache = new org.jboss.cache.TreeCache();
|
||||||
|
PropertyConfigurator config = new PropertyConfigurator();
|
||||||
|
config.configure( cache, resource );
|
||||||
|
TransactionManagerLookup transactionManagerLookup = TransactionManagerLookupFactory.getTransactionManagerLookup(properties);
|
||||||
|
if (transactionManagerLookup!=null) {
|
||||||
|
cache.setTransactionManagerLookup( new TransactionManagerLookupAdaptor(transactionManagerLookup, properties) );
|
||||||
|
transactionManager = transactionManagerLookup.getTransactionManager(properties);
|
||||||
|
}
|
||||||
|
cache.start();
|
||||||
|
}
|
||||||
|
catch (Exception e) {
|
||||||
|
throw new CacheException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void stop() {
|
||||||
|
if (cache!=null) {
|
||||||
|
cache.stop();
|
||||||
|
cache.destroy();
|
||||||
|
cache=null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isMinimalPutsEnabledByDefault() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
static final class TransactionManagerLookupAdaptor implements org.jboss.cache.TransactionManagerLookup {
|
||||||
|
private final TransactionManagerLookup tml;
|
||||||
|
private final Properties props;
|
||||||
|
TransactionManagerLookupAdaptor(TransactionManagerLookup tml, Properties props) {
|
||||||
|
this.tml=tml;
|
||||||
|
this.props=props;
|
||||||
|
}
|
||||||
|
public TransactionManager getTransactionManager() throws Exception {
|
||||||
|
return tml.getTransactionManager(props);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public org.jboss.cache.TreeCache getUnderlyingCache() {
|
||||||
|
return cache;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,60 @@
|
||||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
|
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
|
<parent>
|
||||||
|
<groupId>org.hibernate</groupId>
|
||||||
|
<artifactId>hibernate-code</artifactId>
|
||||||
|
<version>3.3.0.beta1</version>
|
||||||
|
<relativePath>../pom.xml</relativePath>
|
||||||
|
</parent>
|
||||||
|
|
||||||
|
<groupId>org.hibernate</groupId>
|
||||||
|
<artifactId>hibernate-jbosscache2</artifactId>
|
||||||
|
<packaging>jar</packaging>
|
||||||
|
|
||||||
|
<name>Hibernate JBossCache2.x integration</name>
|
||||||
|
<description>Integration of Hibernate with JBossCache (based on JBossCache2.x APIs)</description>
|
||||||
|
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>${groupId}</groupId>
|
||||||
|
<artifactId>hibernate-core</artifactId>
|
||||||
|
<version>${hibernate.core.version}</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>jboss</groupId>
|
||||||
|
<artifactId>jboss-cache</artifactId>
|
||||||
|
<!-- does not work with idea plugin :(
|
||||||
|
<version>[2.0.0.BETA2,)</version>
|
||||||
|
-->
|
||||||
|
<version>2.0.0.BETA2</version>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
|
|
||||||
|
<build>
|
||||||
|
<plugins>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
|
<artifactId>maven-idea-plugin</artifactId>
|
||||||
|
<configuration>
|
||||||
|
<jdkName>1.5</jdkName>
|
||||||
|
<jdkLevel>5.0</jdkLevel>
|
||||||
|
<!-- why this isnt the default I have no clue -->
|
||||||
|
<linkModules>true</linkModules>
|
||||||
|
</configuration>
|
||||||
|
</plugin>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
|
<artifactId>maven-compiler-plugin</artifactId>
|
||||||
|
<configuration>
|
||||||
|
<source>1.5</source>
|
||||||
|
<target>1.5</target>
|
||||||
|
</configuration>
|
||||||
|
</plugin>
|
||||||
|
</plugins>
|
||||||
|
</build>
|
||||||
|
|
||||||
|
</project>
|
|
@ -0,0 +1,25 @@
|
||||||
|
package org.hibernate.cache.impl.jbc;
|
||||||
|
|
||||||
|
import java.util.Properties;
|
||||||
|
import javax.transaction.TransactionManager;
|
||||||
|
|
||||||
|
import org.hibernate.transaction.TransactionManagerLookup;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An adapter between JBossCache's notion of a TM lookup and Hibernate's.
|
||||||
|
*
|
||||||
|
* @author Steve Ebersole
|
||||||
|
*/
|
||||||
|
public class TransactionManagerLookupAdaptor implements org.jboss.cache.transaction.TransactionManagerLookup {
|
||||||
|
private final TransactionManagerLookup tml;
|
||||||
|
private final Properties props;
|
||||||
|
|
||||||
|
TransactionManagerLookupAdaptor(TransactionManagerLookup tml, Properties props) {
|
||||||
|
this.tml = tml;
|
||||||
|
this.props = props;
|
||||||
|
}
|
||||||
|
|
||||||
|
public TransactionManager getTransactionManager() throws Exception {
|
||||||
|
return tml.getTransactionManager( props );
|
||||||
|
}
|
||||||
|
}
|
105
code/cache-jbosscache2/src/main/resources/org/hibernate/cache/jbc/TreeCacheRegionAdapter.java
vendored
Normal file
105
code/cache-jbosscache2/src/main/resources/org/hibernate/cache/jbc/TreeCacheRegionAdapter.java
vendored
Normal file
|
@ -0,0 +1,105 @@
|
||||||
|
package org.hibernate.cache.impl.jbc;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Set;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Iterator;
|
||||||
|
|
||||||
|
import org.jboss.cache.Fqn;
|
||||||
|
import org.jboss.cache.Cache;
|
||||||
|
import org.jboss.cache.Node;
|
||||||
|
import org.jboss.cache.config.Option;
|
||||||
|
|
||||||
|
import org.hibernate.cache.Region;
|
||||||
|
import org.hibernate.cache.CacheException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* {@inheritDoc}
|
||||||
|
*
|
||||||
|
* @author Steve Ebersole
|
||||||
|
*/
|
||||||
|
public class TreeCacheRegionAdapter implements Region {
|
||||||
|
private static final String ITEM = "item";
|
||||||
|
|
||||||
|
protected final Node jbcNode;
|
||||||
|
protected final String regionName;
|
||||||
|
|
||||||
|
public TreeCacheRegionAdapter(Cache jbcCache, String regionName) {
|
||||||
|
this.regionName = regionName;
|
||||||
|
Fqn fqn = Fqn.fromString( regionName.replace( '.', '/' ) );
|
||||||
|
this.jbcNode = jbcCache.getRoot().addChild( fqn );
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getName() {
|
||||||
|
return regionName;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void destroy() throws CacheException {
|
||||||
|
try {
|
||||||
|
// NOTE : this is being used from the process of shutting down a
|
||||||
|
// SessionFactory. Specific things to consider:
|
||||||
|
// (1) this clearing of the region should not propogate to
|
||||||
|
// other nodes on the cluster (if any); this is the
|
||||||
|
// cache-mode-local option bit...
|
||||||
|
// (2) really just trying a best effort to cleanup after
|
||||||
|
// ourselves; lock failures, etc are not critical here;
|
||||||
|
// this is the fail-silently option bit...
|
||||||
|
Option option = new Option();
|
||||||
|
option.setCacheModeLocal( true );
|
||||||
|
option.setFailSilently( true );
|
||||||
|
jbcNode.
|
||||||
|
jbcTreeCache.remove( regionFqn, option );
|
||||||
|
}
|
||||||
|
catch( Exception e ) {
|
||||||
|
throw new CacheException( e );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public long getSizeInMemory() {
|
||||||
|
// not supported
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
public long getElementCountInMemory() {
|
||||||
|
try {
|
||||||
|
Set children = jbcTreeCache.getChildrenNames( regionFqn );
|
||||||
|
return children == null ? 0 : children.size();
|
||||||
|
}
|
||||||
|
catch ( Exception e ) {
|
||||||
|
throw new CacheException( e );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public long getElementCountOnDisk() {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Map toMap() {
|
||||||
|
try {
|
||||||
|
Map result = new HashMap();
|
||||||
|
Set childrenNames = jbcTreeCache.getChildrenNames( regionFqn );
|
||||||
|
if (childrenNames != null) {
|
||||||
|
Iterator iter = childrenNames.iterator();
|
||||||
|
while ( iter.hasNext() ) {
|
||||||
|
Object key = iter.next();
|
||||||
|
result.put(
|
||||||
|
key,
|
||||||
|
jbcTreeCache.get( new Fqn( regionFqn, key ), ITEM )
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
catch (Exception e) {
|
||||||
|
throw new CacheException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public long nextTimestamp() {
|
||||||
|
return System.currentTimeMillis() / 100;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getTimeout() {
|
||||||
|
return 600; //60 seconds
|
||||||
|
}
|
||||||
|
}
|
226
code/cache-jbosscache2/src/main/resources/org/hibernate/cache/jbc/TreeCacheRegionFactory.java
vendored
Normal file
226
code/cache-jbosscache2/src/main/resources/org/hibernate/cache/jbc/TreeCacheRegionFactory.java
vendored
Normal file
|
@ -0,0 +1,226 @@
|
||||||
|
package org.hibernate.cache.impl.jbc;
|
||||||
|
|
||||||
|
import java.util.Properties;
|
||||||
|
|
||||||
|
import javax.transaction.TransactionManager;
|
||||||
|
|
||||||
|
import org.apache.commons.logging.Log;
|
||||||
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
import org.jboss.cache.Cache;
|
||||||
|
import org.jboss.cache.DefaultCacheFactory;
|
||||||
|
|
||||||
|
import org.hibernate.cache.RegionFactory;
|
||||||
|
import org.hibernate.cache.CacheException;
|
||||||
|
import org.hibernate.cache.EntityRegion;
|
||||||
|
import org.hibernate.cache.CacheDataDescription;
|
||||||
|
import org.hibernate.cache.CollectionRegion;
|
||||||
|
import org.hibernate.cache.QueryResultsRegion;
|
||||||
|
import org.hibernate.cache.TimestampsRegion;
|
||||||
|
import org.hibernate.cache.access.EntityRegionAccessStrategy;
|
||||||
|
import org.hibernate.cache.access.AccessType;
|
||||||
|
import org.hibernate.cfg.Settings;
|
||||||
|
import org.hibernate.util.PropertiesHelper;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A factory for building regions based on a JBossCache
|
||||||
|
* {@link org.jboss.cache.Node}. Here we are utilizing the
|
||||||
|
* same underlying {@link org.jboss.cache.Node} instance for each jbcTreeCache region.
|
||||||
|
*
|
||||||
|
* @author Steve Ebersole
|
||||||
|
*/
|
||||||
|
public class TreeCacheRegionFactory implements RegionFactory {
|
||||||
|
public static final String ENTITY_CACHE_RESOURCE_PROP = "hibernate.cache.region.jbc.cfg.entity";
|
||||||
|
public static final String COLL_CACHE_RESOURCE_PROP = "hibernate.cache.region.jbc.cfg.collection";
|
||||||
|
public static final String TS_CACHE_RESOURCE_PROP = "hibernate.cache.region.jbc.cfg.ts";
|
||||||
|
public static final String QUERY_CACHE_RESOURCE_PROP = "hibernate.cache.region.jbc.cfg.query";
|
||||||
|
|
||||||
|
public static final String DEF_ENTITY_RESOURCE = "entity-cache.xml";
|
||||||
|
public static final String DEF_COLL_RESOURCE = "collection-cache.xml";
|
||||||
|
public static final String DEF_TS_RESOURCE = "ts-cache.xml";
|
||||||
|
public static final String DEF_QUERY_RESOURCE = "query-cache.xml";
|
||||||
|
|
||||||
|
public static final String OPTIMISTIC_LOCKING_SCHEME = "OPTIMISTIC";
|
||||||
|
|
||||||
|
private static final Log log = LogFactory.getLog( TreeCacheRegionFactory.class );
|
||||||
|
|
||||||
|
private Cache jbcEntityCache;
|
||||||
|
private Cache jbcCollectionCache;
|
||||||
|
private Cache jbcTsCache;
|
||||||
|
private Cache jbcQueryCache;
|
||||||
|
private boolean useOptimisticLocking;
|
||||||
|
|
||||||
|
public void start(Settings settings, Properties properties) throws CacheException {
|
||||||
|
try {
|
||||||
|
TransactionManager tm = settings.getTransactionManagerLookup() == null
|
||||||
|
? null
|
||||||
|
: settings.getTransactionManagerLookup().getTransactionManager( properties );
|
||||||
|
if ( settings.isSecondLevelCacheEnabled() ) {
|
||||||
|
jbcEntityCache = buildEntityRegionCacheInstance( properties );
|
||||||
|
jbcCollectionCache = buildCollectionRegionCacheInstance( properties );
|
||||||
|
if ( tm != null ) {
|
||||||
|
jbcEntityCache.getConfiguration().getRuntimeConfig().setTransactionManager( tm );
|
||||||
|
jbcCollectionCache.getConfiguration().getRuntimeConfig().setTransactionManager( tm );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ( settings.isQueryCacheEnabled() ) {
|
||||||
|
jbcTsCache = buildTsRegionCacheInstance( properties );
|
||||||
|
jbcQueryCache = buildQueryRegionCacheInstance( properties );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch( CacheException ce ) {
|
||||||
|
throw ce;
|
||||||
|
}
|
||||||
|
catch( Throwable t ) {
|
||||||
|
throw new CacheException( "Unable to start region factory", t );
|
||||||
|
}
|
||||||
|
// String resource = PropertiesHelper.getString( Environment.CACHE_PROVIDER_CONFIG, properties, DEFAULT_CONFIG );
|
||||||
|
// log.debug( "Configuring basic TreeCache RegionFactory from resource [" + resource + "]" );
|
||||||
|
// try {
|
||||||
|
// jbcTreeCache = new TreeCache();
|
||||||
|
// PropertyConfigurator config = new PropertyConfigurator();
|
||||||
|
// config.configure( jbcTreeCache, resource );
|
||||||
|
// TransactionManagerLookup transactionManagerLookup = settings.getTransactionManagerLookup();
|
||||||
|
// if ( transactionManagerLookup != null ) {
|
||||||
|
// jbcTreeCache.setTransactionManagerLookup(
|
||||||
|
// new TransactionManagerLookupAdaptor( transactionManagerLookup, properties )
|
||||||
|
// );
|
||||||
|
// }
|
||||||
|
// jbcTreeCache.start();
|
||||||
|
// useOptimisticLocking = OPTIMISTIC_LOCKING_SCHEME.equalsIgnoreCase( jbcTreeCache.getNodeLockingScheme() );
|
||||||
|
// }
|
||||||
|
// catch ( Exception e ) {
|
||||||
|
// throw new CacheException( e );
|
||||||
|
// }
|
||||||
|
}
|
||||||
|
|
||||||
|
protected Cache buildEntityRegionCacheInstance(Properties properties) {
|
||||||
|
try {
|
||||||
|
String configResource = PropertiesHelper.getString( ENTITY_CACHE_RESOURCE_PROP, properties, DEF_ENTITY_RESOURCE );
|
||||||
|
return DefaultCacheFactory.getInstance().createCache( configResource );
|
||||||
|
}
|
||||||
|
catch( Throwable t ) {
|
||||||
|
throw new CacheException( "unable to build entity region cache instance", t );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
protected Cache buildCollectionRegionCacheInstance(Properties properties) {
|
||||||
|
try {
|
||||||
|
String configResource = PropertiesHelper.getString( COLL_CACHE_RESOURCE_PROP, properties, DEF_COLL_RESOURCE );
|
||||||
|
return DefaultCacheFactory.getInstance().createCache( configResource );
|
||||||
|
}
|
||||||
|
catch( Throwable t ) {
|
||||||
|
throw new CacheException( "unable to build collection region cache instance", t );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
protected Cache buildTsRegionCacheInstance(Properties properties) {
|
||||||
|
try {
|
||||||
|
String configResource = PropertiesHelper.getString( TS_CACHE_RESOURCE_PROP, properties, DEF_TS_RESOURCE );
|
||||||
|
return DefaultCacheFactory.getInstance().createCache( configResource );
|
||||||
|
}
|
||||||
|
catch( Throwable t ) {
|
||||||
|
throw new CacheException( "unable to build timestamps region cache instance", t );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
protected Cache buildQueryRegionCacheInstance(Properties properties) {
|
||||||
|
try {
|
||||||
|
String configResource = PropertiesHelper.getString( QUERY_CACHE_RESOURCE_PROP, properties, DEF_QUERY_RESOURCE );
|
||||||
|
return DefaultCacheFactory.getInstance().createCache( configResource );
|
||||||
|
}
|
||||||
|
catch( Throwable t ) {
|
||||||
|
throw new CacheException( "unable to build query region cache instance", t );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void stop() {
|
||||||
|
if ( jbcEntityCache != null ) {
|
||||||
|
try {
|
||||||
|
jbcEntityCache.stop();
|
||||||
|
}
|
||||||
|
catch( Throwable t ) {
|
||||||
|
log.info( "Unable to stop entity cache instance", t );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ( jbcCollectionCache != null ) {
|
||||||
|
try {
|
||||||
|
jbcCollectionCache.stop();
|
||||||
|
}
|
||||||
|
catch( Throwable t ) {
|
||||||
|
log.info( "Unable to stop collection cache instance", t );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ( jbcTsCache != null ) {
|
||||||
|
try {
|
||||||
|
jbcTsCache.stop();
|
||||||
|
}
|
||||||
|
catch( Throwable t ) {
|
||||||
|
log.info( "Unable to stop timestamp cache instance", t );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ( jbcQueryCache != null ) {
|
||||||
|
try {
|
||||||
|
jbcQueryCache.stop();
|
||||||
|
}
|
||||||
|
catch( Throwable t ) {
|
||||||
|
log.info( "Unable to stop query cache instance", t );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isMinimalPutsEnabledByDefault() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
public long nextTimestamp() {
|
||||||
|
return System.currentTimeMillis() / 100;
|
||||||
|
}
|
||||||
|
|
||||||
|
public EntityRegion buildEntityRegion(String regionName, Properties properties, CacheDataDescription metadata) {
|
||||||
|
if ( useOptimisticLocking && !metadata.isVersioned() ) {
|
||||||
|
log.warn( "JBossCache configured to use optimistic locking, but entity to be cached is not versioned [" + regionName + "]" );
|
||||||
|
}
|
||||||
|
else if ( !useOptimisticLocking && metadata.isVersioned() ) {
|
||||||
|
log.info( "Caching versioned entity without optimisitic locking; consider optimistic locking if all cached entities are versioned" );
|
||||||
|
}
|
||||||
|
return new EntityRegionAdapter( regionName, metadata );
|
||||||
|
}
|
||||||
|
|
||||||
|
public CollectionRegion buildCollectionRegion(String regionName, Properties properties, CacheDataDescription metadata)
|
||||||
|
throws CacheException {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
public QueryResultsRegion buildQueryResultsRegion(String regionName, Properties properties) throws CacheException {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
public TimestampsRegion buildTimestampsRegion(String regionName, Properties properties) throws CacheException {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
private class EntityRegionAdapter extends TreeCacheRegionAdapter implements EntityRegion {
|
||||||
|
private final CacheDataDescription metadata;
|
||||||
|
|
||||||
|
public EntityRegionAdapter(String regionName, CacheDataDescription metadata) {
|
||||||
|
super( TreeCacheRegionFactory.this.jbcTreeCache, regionName );
|
||||||
|
this.metadata = metadata;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isTransactionAware() {
|
||||||
|
return jbcTreeCache.getTransactionManager() != null;
|
||||||
|
}
|
||||||
|
|
||||||
|
public CacheDataDescription getCacheDataDescription() {
|
||||||
|
return metadata;
|
||||||
|
}
|
||||||
|
|
||||||
|
public EntityRegionAccessStrategy buildAccessStrategy(AccessType accessType) throws CacheException {
|
||||||
|
if ( ! ( AccessType.READ_ONLY.equals( accessType ) || AccessType.TRANSACTIONAL.equals( accessType ) ) ) {
|
||||||
|
throw new CacheException( "TreeCacheRegionFactory only supports ( " + AccessType.READ_ONLY.getName() + " | " + AccessType.TRANSACTIONAL + " ) access strategies [" + accessType.getName() + "]" );
|
||||||
|
}
|
||||||
|
// todo : implement :)
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,34 @@
|
||||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
|
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
|
<parent>
|
||||||
|
<groupId>org.hibernate</groupId>
|
||||||
|
<artifactId>hibernate-code</artifactId>
|
||||||
|
<version>3.3.0.beta1</version>
|
||||||
|
<relativePath>../pom.xml</relativePath>
|
||||||
|
</parent>
|
||||||
|
|
||||||
|
<groupId>org.hibernate</groupId>
|
||||||
|
<artifactId>hibernate-oscache</artifactId>
|
||||||
|
<packaging>jar</packaging>
|
||||||
|
|
||||||
|
<name>Hibernate OSCache integration</name>
|
||||||
|
<description>Integration of Hibernate with OSCache</description>
|
||||||
|
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>${groupId}</groupId>
|
||||||
|
<artifactId>hibernate-core</artifactId>
|
||||||
|
<version>${hibernate.core.version}</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>opensymphony</groupId>
|
||||||
|
<artifactId>oscache</artifactId>
|
||||||
|
<version>2.1</version>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
|
|
||||||
|
</project>
|
|
@ -0,0 +1,111 @@
|
||||||
|
//$Id: OSCache.java 6478 2005-04-21 07:57:19Z oneovthafew $
|
||||||
|
package org.hibernate.cache;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import com.opensymphony.oscache.base.NeedsRefreshException;
|
||||||
|
import com.opensymphony.oscache.general.GeneralCacheAdministrator;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author <a href="mailto:m.bogaert@intrasoft.be">Mathias Bogaert</a>
|
||||||
|
*/
|
||||||
|
public class OSCache implements Cache {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The OSCache 2.0 cache administrator.
|
||||||
|
*/
|
||||||
|
private GeneralCacheAdministrator cache = new GeneralCacheAdministrator();
|
||||||
|
|
||||||
|
private final int refreshPeriod;
|
||||||
|
private final String cron;
|
||||||
|
private final String regionName;
|
||||||
|
|
||||||
|
private String toString(Object key) {
|
||||||
|
return String.valueOf(key) + '.' + regionName;
|
||||||
|
}
|
||||||
|
|
||||||
|
public OSCache(int refreshPeriod, String cron, String region) {
|
||||||
|
this.refreshPeriod = refreshPeriod;
|
||||||
|
this.cron = cron;
|
||||||
|
this.regionName = region;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setCacheCapacity(int cacheCapacity) {
|
||||||
|
cache.setCacheCapacity(cacheCapacity);
|
||||||
|
}
|
||||||
|
|
||||||
|
public Object get(Object key) throws CacheException {
|
||||||
|
try {
|
||||||
|
return cache.getFromCache( toString(key), refreshPeriod, cron );
|
||||||
|
}
|
||||||
|
catch (NeedsRefreshException e) {
|
||||||
|
cache.cancelUpdate( toString(key) );
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public Object read(Object key) throws CacheException {
|
||||||
|
return get(key);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void update(Object key, Object value) throws CacheException {
|
||||||
|
put(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void put(Object key, Object value) throws CacheException {
|
||||||
|
cache.putInCache( toString(key), value );
|
||||||
|
}
|
||||||
|
|
||||||
|
public void remove(Object key) throws CacheException {
|
||||||
|
cache.flushEntry( toString(key) );
|
||||||
|
}
|
||||||
|
|
||||||
|
public void clear() throws CacheException {
|
||||||
|
cache.flushAll();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void destroy() throws CacheException {
|
||||||
|
cache.destroy();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void lock(Object key) throws CacheException {
|
||||||
|
// local cache, so we use synchronization
|
||||||
|
}
|
||||||
|
|
||||||
|
public void unlock(Object key) throws CacheException {
|
||||||
|
// local cache, so we use synchronization
|
||||||
|
}
|
||||||
|
|
||||||
|
public long nextTimestamp() {
|
||||||
|
return Timestamper.next();
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getTimeout() {
|
||||||
|
return Timestamper.ONE_MS * 60000; //ie. 60 seconds
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getRegionName() {
|
||||||
|
return regionName;
|
||||||
|
}
|
||||||
|
|
||||||
|
public long getSizeInMemory() {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
public long getElementCountInMemory() {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
public long getElementCountOnDisk() {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Map toMap() {
|
||||||
|
throw new UnsupportedOperationException();
|
||||||
|
}
|
||||||
|
|
||||||
|
public String toString() {
|
||||||
|
return "OSCache(" + regionName + ')';
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,87 @@
|
||||||
|
//$Id: OSCacheProvider.java 5685 2005-02-12 07:19:50Z steveebersole $
|
||||||
|
package org.hibernate.cache;
|
||||||
|
|
||||||
|
import java.util.Properties;
|
||||||
|
|
||||||
|
import org.hibernate.util.PropertiesHelper;
|
||||||
|
import org.hibernate.util.StringHelper;
|
||||||
|
|
||||||
|
import com.opensymphony.oscache.base.CacheEntry;
|
||||||
|
import com.opensymphony.oscache.base.Config;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Support for OpenSymphony OSCache. This implementation assumes
|
||||||
|
* that identifiers have well-behaved <tt>toString()</tt> methods.
|
||||||
|
*
|
||||||
|
* @author <a href="mailto:m.bogaert@intrasoft.be">Mathias Bogaert</a>
|
||||||
|
*/
|
||||||
|
public class OSCacheProvider implements CacheProvider {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The <tt>OSCache</tt> refresh period property suffix.
|
||||||
|
*/
|
||||||
|
public static final String OSCACHE_REFRESH_PERIOD = "refresh.period";
|
||||||
|
/**
|
||||||
|
* The <tt>OSCache</tt> CRON expression property suffix.
|
||||||
|
*/
|
||||||
|
public static final String OSCACHE_CRON = "cron";
|
||||||
|
/**
|
||||||
|
* The <tt>OSCache</tt> cache capacity property suffix.
|
||||||
|
*/
|
||||||
|
public static final String OSCACHE_CAPACITY = "capacity";
|
||||||
|
|
||||||
|
private static final Properties OSCACHE_PROPERTIES = new Config().getProperties();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Builds a new {@link Cache} instance, and gets it's properties from the OSCache {@link Config}
|
||||||
|
* which reads the properties file (<code>oscache.properties</code>) from the classpath.
|
||||||
|
* If the file cannot be found or loaded, an the defaults are used.
|
||||||
|
*
|
||||||
|
* @param region
|
||||||
|
* @param properties
|
||||||
|
* @return
|
||||||
|
* @throws CacheException
|
||||||
|
*/
|
||||||
|
public Cache buildCache(String region, Properties properties) throws CacheException {
|
||||||
|
|
||||||
|
int refreshPeriod = PropertiesHelper.getInt(
|
||||||
|
StringHelper.qualify(region, OSCACHE_REFRESH_PERIOD),
|
||||||
|
OSCACHE_PROPERTIES,
|
||||||
|
CacheEntry.INDEFINITE_EXPIRY
|
||||||
|
);
|
||||||
|
String cron = OSCACHE_PROPERTIES.getProperty( StringHelper.qualify(region, OSCACHE_CRON) );
|
||||||
|
|
||||||
|
// construct the cache
|
||||||
|
final OSCache cache = new OSCache(refreshPeriod, cron, region);
|
||||||
|
|
||||||
|
Integer capacity = PropertiesHelper.getInteger( StringHelper.qualify(region, OSCACHE_CAPACITY), OSCACHE_PROPERTIES );
|
||||||
|
if ( capacity!=null ) cache.setCacheCapacity( capacity.intValue() );
|
||||||
|
|
||||||
|
return cache;
|
||||||
|
}
|
||||||
|
|
||||||
|
public long nextTimestamp() {
|
||||||
|
return Timestamper.next();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Callback to perform any necessary initialization of the underlying cache implementation
|
||||||
|
* during SessionFactory construction.
|
||||||
|
*
|
||||||
|
* @param properties current configuration settings.
|
||||||
|
*/
|
||||||
|
public void start(Properties properties) throws CacheException {
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Callback to perform any necessary cleanup of the underlying cache implementation
|
||||||
|
* during SessionFactory.close().
|
||||||
|
*/
|
||||||
|
public void stop() {
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isMinimalPutsEnabledByDefault() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,34 @@
|
||||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
|
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
|
<parent>
|
||||||
|
<groupId>org.hibernate</groupId>
|
||||||
|
<artifactId>hibernate-code</artifactId>
|
||||||
|
<version>3.3.0.beta1</version>
|
||||||
|
<relativePath>../pom.xml</relativePath>
|
||||||
|
</parent>
|
||||||
|
|
||||||
|
<groupId>org.hibernate</groupId>
|
||||||
|
<artifactId>hibernate-swarmcache</artifactId>
|
||||||
|
<packaging>jar</packaging>
|
||||||
|
|
||||||
|
<name>Hibernate SwarmCache integration</name>
|
||||||
|
<description>Integration of Hibernate with SwarmCache</description>
|
||||||
|
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>${groupId}</groupId>
|
||||||
|
<artifactId>hibernate-core</artifactId>
|
||||||
|
<version>${hibernate.core.version}</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>swarmcache</groupId>
|
||||||
|
<artifactId>swarmcache</artifactId>
|
||||||
|
<version>1.0RC2</version>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
|
|
||||||
|
</project>
|
|
@ -0,0 +1,144 @@
|
||||||
|
//$Id: SwarmCache.java 6478 2005-04-21 07:57:19Z oneovthafew $
|
||||||
|
package org.hibernate.cache;
|
||||||
|
|
||||||
|
import net.sf.swarmcache.ObjectCache;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Jason Carreira, Gavin King
|
||||||
|
*/
|
||||||
|
public class SwarmCache implements Cache {
|
||||||
|
|
||||||
|
private final ObjectCache cache;
|
||||||
|
private final String regionName;
|
||||||
|
|
||||||
|
public SwarmCache(ObjectCache cache, String regionName) {
|
||||||
|
this.cache = cache;
|
||||||
|
this.regionName = regionName;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get an item from the cache
|
||||||
|
* @param key
|
||||||
|
* @return the cached object or <tt>null</tt>
|
||||||
|
* @throws CacheException
|
||||||
|
*/
|
||||||
|
public Object get(Object key) throws CacheException {
|
||||||
|
if (key instanceof Serializable) {
|
||||||
|
return cache.get( (Serializable) key );
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
throw new CacheException("Keys must implement Serializable");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public Object read(Object key) throws CacheException {
|
||||||
|
return get(key);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add an item to the cache
|
||||||
|
* @param key
|
||||||
|
* @param value
|
||||||
|
* @throws CacheException
|
||||||
|
*/
|
||||||
|
public void update(Object key, Object value) throws CacheException {
|
||||||
|
put(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add an item to the cache
|
||||||
|
* @param key
|
||||||
|
* @param value
|
||||||
|
* @throws CacheException
|
||||||
|
*/
|
||||||
|
public void put(Object key, Object value) throws CacheException {
|
||||||
|
if (key instanceof Serializable) {
|
||||||
|
cache.put( (Serializable) key, value );
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
throw new CacheException("Keys must implement Serializable");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove an item from the cache
|
||||||
|
*/
|
||||||
|
public void remove(Object key) throws CacheException {
|
||||||
|
if (key instanceof Serializable) {
|
||||||
|
cache.clear( (Serializable) key );
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
throw new CacheException("Keys must implement Serializable");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clear the cache
|
||||||
|
*/
|
||||||
|
public void clear() throws CacheException {
|
||||||
|
cache.clearAll();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clean up
|
||||||
|
*/
|
||||||
|
public void destroy() throws CacheException {
|
||||||
|
cache.clearAll();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If this is a clustered cache, lock the item
|
||||||
|
*/
|
||||||
|
public void lock(Object key) throws CacheException {
|
||||||
|
throw new UnsupportedOperationException("SwarmCache does not support locking (use nonstrict-read-write)");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If this is a clustered cache, unlock the item
|
||||||
|
*/
|
||||||
|
public void unlock(Object key) throws CacheException {
|
||||||
|
throw new UnsupportedOperationException("SwarmCache does not support locking (use nonstrict-read-write)");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate a (coarse) timestamp
|
||||||
|
*/
|
||||||
|
public long nextTimestamp() {
|
||||||
|
return System.currentTimeMillis() / 100;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a reasonable "lock timeout"
|
||||||
|
*/
|
||||||
|
public int getTimeout() {
|
||||||
|
return 600;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getRegionName() {
|
||||||
|
return regionName;
|
||||||
|
}
|
||||||
|
|
||||||
|
public long getSizeInMemory() {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
public long getElementCountInMemory() {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
public long getElementCountOnDisk() {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Map toMap() {
|
||||||
|
throw new UnsupportedOperationException();
|
||||||
|
}
|
||||||
|
|
||||||
|
public String toString() {
|
||||||
|
return "SwarmCache(" + regionName + ')';
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
58
code/cache-swarmcache/src/main/java/org/hibernate/cache/SwarmCacheProvider.java
vendored
Normal file
58
code/cache-swarmcache/src/main/java/org/hibernate/cache/SwarmCacheProvider.java
vendored
Normal file
|
@ -0,0 +1,58 @@
|
||||||
|
//$Id: SwarmCacheProvider.java 5685 2005-02-12 07:19:50Z steveebersole $
|
||||||
|
package org.hibernate.cache;
|
||||||
|
|
||||||
|
import net.sf.swarmcache.CacheConfiguration;
|
||||||
|
import net.sf.swarmcache.CacheConfigurationManager;
|
||||||
|
import net.sf.swarmcache.CacheFactory;
|
||||||
|
import net.sf.swarmcache.ObjectCache;
|
||||||
|
|
||||||
|
import java.util.Properties;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Support for SwarmCache replicated cache. SwarmCache does not support
|
||||||
|
* locking, so strict "read-write" semantics are unsupported.
|
||||||
|
* @author Jason Carreira
|
||||||
|
*/
|
||||||
|
public class SwarmCacheProvider implements CacheProvider {
|
||||||
|
|
||||||
|
private CacheFactory factory;
|
||||||
|
|
||||||
|
public Cache buildCache(String regionName, Properties properties) throws CacheException {
|
||||||
|
ObjectCache cache = factory.createCache(regionName);
|
||||||
|
if (cache==null) {
|
||||||
|
throw new CacheException("SwarmCache did not create a cache: " + regionName);
|
||||||
|
}
|
||||||
|
return new SwarmCache(cache, regionName);
|
||||||
|
}
|
||||||
|
|
||||||
|
public long nextTimestamp() {
|
||||||
|
return System.currentTimeMillis() / 100;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Callback to perform any necessary initialization of the underlying cache implementation
|
||||||
|
* during SessionFactory construction.
|
||||||
|
*
|
||||||
|
* @param properties current configuration settings.
|
||||||
|
*/
|
||||||
|
public void start(Properties properties) throws CacheException {
|
||||||
|
CacheConfiguration config = CacheConfigurationManager.getConfig(properties);
|
||||||
|
factory = new CacheFactory(config);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Callback to perform any necessary cleanup of the underlying cache implementation
|
||||||
|
* during SessionFactory.close().
|
||||||
|
*/
|
||||||
|
public void stop() {
|
||||||
|
if (factory != null) {
|
||||||
|
factory.shutdown();
|
||||||
|
factory = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isMinimalPutsEnabledByDefault() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,34 @@
|
||||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
|
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
|
<parent>
|
||||||
|
<groupId>org.hibernate</groupId>
|
||||||
|
<artifactId>hibernate-code</artifactId>
|
||||||
|
<version>3.3.0.beta1</version>
|
||||||
|
<relativePath>../pom.xml</relativePath>
|
||||||
|
</parent>
|
||||||
|
|
||||||
|
<groupId>org.hibernate</groupId>
|
||||||
|
<artifactId>hibernate-c3p0</artifactId>
|
||||||
|
<packaging>jar</packaging>
|
||||||
|
|
||||||
|
<name>Hibernate C3P0 ConnectionProvider</name>
|
||||||
|
<description>C3P0-based implementation of the Hibernate ConnectionProvder contract</description>
|
||||||
|
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>${groupId}</groupId>
|
||||||
|
<artifactId>hibernate-core</artifactId>
|
||||||
|
<version>${hibernate.core.version}</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>c3p0</groupId>
|
||||||
|
<artifactId>c3p0</artifactId>
|
||||||
|
<version>0.9.1</version>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
|
|
||||||
|
</project>
|
|
@ -0,0 +1,218 @@
|
||||||
|
//$Id: C3P0ConnectionProvider.java 11066 2007-01-19 15:14:31Z steve.ebersole@jboss.com $
|
||||||
|
package org.hibernate.connection;
|
||||||
|
|
||||||
|
import java.sql.Connection;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.util.Iterator;
|
||||||
|
import java.util.Properties;
|
||||||
|
|
||||||
|
import javax.sql.DataSource;
|
||||||
|
|
||||||
|
import org.apache.commons.logging.Log;
|
||||||
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
|
||||||
|
import com.mchange.v2.c3p0.DataSources;
|
||||||
|
|
||||||
|
import org.hibernate.HibernateException;
|
||||||
|
import org.hibernate.cfg.Environment;
|
||||||
|
import org.hibernate.util.PropertiesHelper;
|
||||||
|
import org.hibernate.util.ReflectHelper;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A connection provider that uses a C3P0 connection pool. Hibernate will use this by
|
||||||
|
* default if the <tt>hibernate.c3p0.*</tt> properties are set.
|
||||||
|
*
|
||||||
|
* @author various people
|
||||||
|
* @see ConnectionProvider
|
||||||
|
*/
|
||||||
|
public class C3P0ConnectionProvider implements ConnectionProvider {
|
||||||
|
|
||||||
|
private static final Log log = LogFactory.getLog( C3P0ConnectionProvider.class );
|
||||||
|
|
||||||
|
//swaldman 2006-08-28: define c3p0-style configuration parameters for properties with
|
||||||
|
// hibernate-specific overrides to detect and warn about conflicting
|
||||||
|
// declarations
|
||||||
|
private final static String C3P0_STYLE_MIN_POOL_SIZE = "c3p0.minPoolSize";
|
||||||
|
private final static String C3P0_STYLE_MAX_POOL_SIZE = "c3p0.maxPoolSize";
|
||||||
|
private final static String C3P0_STYLE_MAX_IDLE_TIME = "c3p0.maxIdleTime";
|
||||||
|
private final static String C3P0_STYLE_MAX_STATEMENTS = "c3p0.maxStatements";
|
||||||
|
private final static String C3P0_STYLE_ACQUIRE_INCREMENT = "c3p0.acquireIncrement";
|
||||||
|
private final static String C3P0_STYLE_IDLE_CONNECTION_TEST_PERIOD = "c3p0.idleConnectionTestPeriod";
|
||||||
|
private final static String C3P0_STYLE_TEST_CONNECTION_ON_CHECKOUT = "c3p0.testConnectionOnCheckout";
|
||||||
|
|
||||||
|
//swaldman 2006-08-28: define c3p0-style configuration parameters for initialPoolSize, which
|
||||||
|
// hibernate sensibly lets default to minPoolSize, but we'll let users
|
||||||
|
// override it with the c3p0-style property if they want.
|
||||||
|
private final static String C3P0_STYLE_INITIAL_POOL_SIZE = "c3p0.initialPoolSize";
|
||||||
|
|
||||||
|
private DataSource ds;
|
||||||
|
private Integer isolation;
|
||||||
|
private boolean autocommit;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* {@inheritDoc}
|
||||||
|
*/
|
||||||
|
public Connection getConnection() throws SQLException {
|
||||||
|
final Connection c = ds.getConnection();
|
||||||
|
if ( isolation != null ) {
|
||||||
|
c.setTransactionIsolation( isolation.intValue() );
|
||||||
|
}
|
||||||
|
if ( c.getAutoCommit() != autocommit ) {
|
||||||
|
c.setAutoCommit( autocommit );
|
||||||
|
}
|
||||||
|
return c;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* {@inheritDoc}
|
||||||
|
*/
|
||||||
|
public void closeConnection(Connection conn) throws SQLException {
|
||||||
|
conn.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* {@inheritDoc}
|
||||||
|
*/
|
||||||
|
public void configure(Properties props) throws HibernateException {
|
||||||
|
String jdbcDriverClass = props.getProperty( Environment.DRIVER );
|
||||||
|
String jdbcUrl = props.getProperty( Environment.URL );
|
||||||
|
Properties connectionProps = ConnectionProviderFactory.getConnectionProperties( props );
|
||||||
|
|
||||||
|
log.info( "C3P0 using driver: " + jdbcDriverClass + " at URL: " + jdbcUrl );
|
||||||
|
log.info( "Connection properties: " + PropertiesHelper.maskOut( connectionProps, "password" ) );
|
||||||
|
|
||||||
|
autocommit = PropertiesHelper.getBoolean( Environment.AUTOCOMMIT, props );
|
||||||
|
log.info( "autocommit mode: " + autocommit );
|
||||||
|
|
||||||
|
if ( jdbcDriverClass == null ) {
|
||||||
|
log.warn( "No JDBC Driver class was specified by property " + Environment.DRIVER );
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
try {
|
||||||
|
Class.forName( jdbcDriverClass );
|
||||||
|
}
|
||||||
|
catch ( ClassNotFoundException cnfe ) {
|
||||||
|
try {
|
||||||
|
ReflectHelper.classForName( jdbcDriverClass );
|
||||||
|
}
|
||||||
|
catch ( ClassNotFoundException e ) {
|
||||||
|
String msg = "JDBC Driver class not found: " + jdbcDriverClass;
|
||||||
|
log.fatal( msg, e );
|
||||||
|
throw new HibernateException( msg, e );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
|
||||||
|
//swaldman 2004-02-07: modify to allow null values to signify fall through to c3p0 PoolConfig defaults
|
||||||
|
Integer minPoolSize = PropertiesHelper.getInteger( Environment.C3P0_MIN_SIZE, props );
|
||||||
|
Integer maxPoolSize = PropertiesHelper.getInteger( Environment.C3P0_MAX_SIZE, props );
|
||||||
|
Integer maxIdleTime = PropertiesHelper.getInteger( Environment.C3P0_TIMEOUT, props );
|
||||||
|
Integer maxStatements = PropertiesHelper.getInteger( Environment.C3P0_MAX_STATEMENTS, props );
|
||||||
|
Integer acquireIncrement = PropertiesHelper.getInteger( Environment.C3P0_ACQUIRE_INCREMENT, props );
|
||||||
|
Integer idleTestPeriod = PropertiesHelper.getInteger( Environment.C3P0_IDLE_TEST_PERIOD, props );
|
||||||
|
|
||||||
|
Properties c3props = new Properties();
|
||||||
|
|
||||||
|
// turn hibernate.c3p0.* into c3p0.*, so c3p0
|
||||||
|
// gets a chance to see all hibernate.c3p0.*
|
||||||
|
for ( Iterator ii = props.keySet().iterator(); ii.hasNext(); ) {
|
||||||
|
String key = ( String ) ii.next();
|
||||||
|
if ( key.startsWith( "hibernate.c3p0." ) ) {
|
||||||
|
String newKey = key.substring( 10 );
|
||||||
|
if ( props.containsKey( newKey ) ) {
|
||||||
|
warnPropertyConflict( key, newKey );
|
||||||
|
}
|
||||||
|
c3props.put( newKey, props.get( key ) );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
setOverwriteProperty( Environment.C3P0_MIN_SIZE, C3P0_STYLE_MIN_POOL_SIZE, props, c3props, minPoolSize );
|
||||||
|
setOverwriteProperty( Environment.C3P0_MAX_SIZE, C3P0_STYLE_MAX_POOL_SIZE, props, c3props, maxPoolSize );
|
||||||
|
setOverwriteProperty( Environment.C3P0_TIMEOUT, C3P0_STYLE_MAX_IDLE_TIME, props, c3props, maxIdleTime );
|
||||||
|
setOverwriteProperty(
|
||||||
|
Environment.C3P0_MAX_STATEMENTS, C3P0_STYLE_MAX_STATEMENTS, props, c3props, maxStatements
|
||||||
|
);
|
||||||
|
setOverwriteProperty(
|
||||||
|
Environment.C3P0_ACQUIRE_INCREMENT, C3P0_STYLE_ACQUIRE_INCREMENT, props, c3props, acquireIncrement
|
||||||
|
);
|
||||||
|
setOverwriteProperty(
|
||||||
|
Environment.C3P0_IDLE_TEST_PERIOD, C3P0_STYLE_IDLE_CONNECTION_TEST_PERIOD, props, c3props, idleTestPeriod
|
||||||
|
);
|
||||||
|
|
||||||
|
// revert to traditional hibernate behavior of setting initialPoolSize to minPoolSize
|
||||||
|
// unless otherwise specified with a c3p0.*-style parameter.
|
||||||
|
Integer initialPoolSize = PropertiesHelper.getInteger( C3P0_STYLE_INITIAL_POOL_SIZE, props );
|
||||||
|
if ( initialPoolSize == null && minPoolSize != null ) {
|
||||||
|
c3props.put( C3P0_STYLE_INITIAL_POOL_SIZE, String.valueOf( minPoolSize ).trim() );
|
||||||
|
}
|
||||||
|
|
||||||
|
/*DataSource unpooled = DataSources.unpooledDataSource(
|
||||||
|
jdbcUrl, props.getProperty(Environment.USER), props.getProperty(Environment.PASS)
|
||||||
|
);*/
|
||||||
|
DataSource unpooled = DataSources.unpooledDataSource( jdbcUrl, connectionProps );
|
||||||
|
|
||||||
|
Properties allProps = ( Properties ) props.clone();
|
||||||
|
allProps.putAll( c3props );
|
||||||
|
|
||||||
|
ds = DataSources.pooledDataSource( unpooled, allProps );
|
||||||
|
}
|
||||||
|
catch ( Exception e ) {
|
||||||
|
log.fatal( "could not instantiate C3P0 connection pool", e );
|
||||||
|
throw new HibernateException( "Could not instantiate C3P0 connection pool", e );
|
||||||
|
}
|
||||||
|
|
||||||
|
String i = props.getProperty( Environment.ISOLATION );
|
||||||
|
if ( i == null ) {
|
||||||
|
isolation = null;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
isolation = new Integer( i );
|
||||||
|
log.info( "JDBC isolation level: " + Environment.isolationLevelToString( isolation.intValue() ) );
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* {@inheritDoc}
|
||||||
|
*/
|
||||||
|
public void close() {
|
||||||
|
try {
|
||||||
|
DataSources.destroy( ds );
|
||||||
|
}
|
||||||
|
catch ( SQLException sqle ) {
|
||||||
|
log.warn( "could not destroy C3P0 connection pool", sqle );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* {@inheritDoc}
|
||||||
|
*/
|
||||||
|
public boolean supportsAggressiveRelease() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void setOverwriteProperty(String hibernateStyleKey, String c3p0StyleKey, Properties hibp, Properties c3p, Integer value) {
|
||||||
|
if ( value != null ) {
|
||||||
|
c3p.put( c3p0StyleKey, String.valueOf( value ).trim() );
|
||||||
|
if ( hibp.getProperty( c3p0StyleKey ) != null ) {
|
||||||
|
warnPropertyConflict( hibernateStyleKey, c3p0StyleKey );
|
||||||
|
}
|
||||||
|
String longC3p0StyleKey = "hibernate." + c3p0StyleKey;
|
||||||
|
if ( hibp.getProperty( longC3p0StyleKey ) != null ) {
|
||||||
|
warnPropertyConflict( hibernateStyleKey, longC3p0StyleKey );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void warnPropertyConflict(String hibernateStyle, String c3p0Style) {
|
||||||
|
log.warn(
|
||||||
|
"Both hibernate-style property '" + hibernateStyle +
|
||||||
|
"' and c3p0-style property '" + c3p0Style +
|
||||||
|
"' have been set in hibernate.properties. " +
|
||||||
|
"Hibernate-style property '" + hibernateStyle + "' will be used " +
|
||||||
|
"and c3p0-style property '" + c3p0Style + "' will be ignored!"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,34 @@
|
||||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
|
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
|
<parent>
|
||||||
|
<groupId>org.hibernate</groupId>
|
||||||
|
<artifactId>hibernate-code</artifactId>
|
||||||
|
<version>3.3.0.beta1</version>
|
||||||
|
<relativePath>../pom.xml</relativePath>
|
||||||
|
</parent>
|
||||||
|
|
||||||
|
<groupId>org.hibernate</groupId>
|
||||||
|
<artifactId>hibernate-proxool</artifactId>
|
||||||
|
<packaging>jar</packaging>
|
||||||
|
|
||||||
|
<name>Hibernate Proxool ConnectionProvider</name>
|
||||||
|
<description>Proxool-based implementation of the Hibernate ConnectionProvder contract</description>
|
||||||
|
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>${groupId}</groupId>
|
||||||
|
<artifactId>hibernate-core</artifactId>
|
||||||
|
<version>${hibernate.core.version}</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>proxool</groupId>
|
||||||
|
<artifactId>proxool</artifactId>
|
||||||
|
<version>0.8.3</version>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
|
|
||||||
|
</project>
|
|
@ -0,0 +1,199 @@
|
||||||
|
//$Id: ProxoolConnectionProvider.java 6463 2005-04-19 15:39:07Z steveebersole $
|
||||||
|
package org.hibernate.connection;
|
||||||
|
|
||||||
|
import java.sql.Connection;
|
||||||
|
import java.sql.DriverManager;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.util.Properties;
|
||||||
|
|
||||||
|
import org.hibernate.HibernateException;
|
||||||
|
import org.hibernate.cfg.Environment;
|
||||||
|
import org.hibernate.util.PropertiesHelper;
|
||||||
|
import org.hibernate.util.StringHelper;
|
||||||
|
import org.hibernate.util.ConfigHelper;
|
||||||
|
|
||||||
|
import org.apache.commons.logging.Log;
|
||||||
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
|
||||||
|
import org.logicalcobwebs.proxool.ProxoolException;
|
||||||
|
import org.logicalcobwebs.proxool.ProxoolFacade;
|
||||||
|
import org.logicalcobwebs.proxool.configuration.JAXPConfigurator;
|
||||||
|
import org.logicalcobwebs.proxool.configuration.PropertyConfigurator;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A connection provider that uses a Proxool connection pool. Hibernate will use this by
|
||||||
|
* default if the <tt>hibernate.proxool.*</tt> properties are set.
|
||||||
|
* @see ConnectionProvider
|
||||||
|
*/
|
||||||
|
public class ProxoolConnectionProvider implements ConnectionProvider {
|
||||||
|
|
||||||
|
|
||||||
|
private static final String PROXOOL_JDBC_STEM = "proxool.";
|
||||||
|
|
||||||
|
private static final Log log = LogFactory.getLog(ProxoolConnectionProvider.class);
|
||||||
|
|
||||||
|
private String proxoolAlias;
|
||||||
|
|
||||||
|
// TRUE if the pool is borrowed from the outside, FALSE if we used to create it
|
||||||
|
private boolean existingPool;
|
||||||
|
|
||||||
|
// Not null if the Isolation level has been specified in the configuration file.
|
||||||
|
// Otherwise, it is left to the Driver's default value.
|
||||||
|
private Integer isolation;
|
||||||
|
|
||||||
|
private boolean autocommit;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Grab a connection
|
||||||
|
* @return a JDBC connection
|
||||||
|
* @throws SQLException
|
||||||
|
*/
|
||||||
|
public Connection getConnection() throws SQLException {
|
||||||
|
// get a connection from the pool (thru DriverManager, cfr. Proxool doc)
|
||||||
|
Connection c = DriverManager.getConnection(proxoolAlias);
|
||||||
|
|
||||||
|
// set the Transaction Isolation if defined
|
||||||
|
if (isolation!=null) c.setTransactionIsolation( isolation.intValue() );
|
||||||
|
|
||||||
|
// toggle autoCommit to false if set
|
||||||
|
if ( c.getAutoCommit()!=autocommit ) c.setAutoCommit(autocommit);
|
||||||
|
|
||||||
|
// return the connection
|
||||||
|
return c;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Dispose of a used connection.
|
||||||
|
* @param conn a JDBC connection
|
||||||
|
* @throws SQLException
|
||||||
|
*/
|
||||||
|
public void closeConnection(Connection conn) throws SQLException {
|
||||||
|
conn.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize the connection provider from given properties.
|
||||||
|
* @param props <tt>SessionFactory</tt> properties
|
||||||
|
*/
|
||||||
|
public void configure(Properties props) throws HibernateException {
|
||||||
|
|
||||||
|
// Get the configurator files (if available)
|
||||||
|
String jaxpFile = props.getProperty(Environment.PROXOOL_XML);
|
||||||
|
String propFile = props.getProperty(Environment.PROXOOL_PROPERTIES);
|
||||||
|
String externalConfig = props.getProperty(Environment.PROXOOL_EXISTING_POOL);
|
||||||
|
|
||||||
|
// Default the Proxool alias setting
|
||||||
|
proxoolAlias = props.getProperty(Environment.PROXOOL_POOL_ALIAS);
|
||||||
|
|
||||||
|
// Configured outside of Hibernate (i.e. Servlet container, or Java Bean Container
|
||||||
|
// already has Proxool pools running, and this provider is to just borrow one of these
|
||||||
|
if ( "true".equals(externalConfig) ) {
|
||||||
|
|
||||||
|
// Validate that an alias name was provided to determine which pool to use
|
||||||
|
if ( !StringHelper.isNotEmpty(proxoolAlias) ) {
|
||||||
|
String msg = "Cannot configure Proxool Provider to use an existing in memory pool without the " + Environment.PROXOOL_POOL_ALIAS + " property set.";
|
||||||
|
log.fatal(msg);
|
||||||
|
throw new HibernateException(msg);
|
||||||
|
}
|
||||||
|
// Append the stem to the proxool pool alias
|
||||||
|
proxoolAlias = PROXOOL_JDBC_STEM + proxoolAlias;
|
||||||
|
|
||||||
|
// Set the existing pool flag to true
|
||||||
|
existingPool = true;
|
||||||
|
|
||||||
|
log.info("Configuring Proxool Provider using existing pool in memory: " + proxoolAlias);
|
||||||
|
|
||||||
|
// Configured using the JAXP Configurator
|
||||||
|
}
|
||||||
|
else if ( StringHelper.isNotEmpty(jaxpFile) ) {
|
||||||
|
|
||||||
|
log.info("Configuring Proxool Provider using JAXPConfigurator: " + jaxpFile);
|
||||||
|
|
||||||
|
// Validate that an alias name was provided to determine which pool to use
|
||||||
|
if ( !StringHelper.isNotEmpty(proxoolAlias) ) {
|
||||||
|
String msg = "Cannot configure Proxool Provider to use JAXP without the " + Environment.PROXOOL_POOL_ALIAS + " property set.";
|
||||||
|
log.fatal(msg);
|
||||||
|
throw new HibernateException(msg);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
JAXPConfigurator.configure( ConfigHelper.getConfigStreamReader(jaxpFile), false );
|
||||||
|
}
|
||||||
|
catch (ProxoolException e) {
|
||||||
|
String msg = "Proxool Provider unable to load JAXP configurator file: " + jaxpFile;
|
||||||
|
log.fatal(msg, e);
|
||||||
|
throw new HibernateException(msg, e);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Append the stem to the proxool pool alias
|
||||||
|
proxoolAlias = PROXOOL_JDBC_STEM + proxoolAlias;
|
||||||
|
log.info("Configuring Proxool Provider to use pool alias: " + proxoolAlias);
|
||||||
|
|
||||||
|
// Configured using the Properties File Configurator
|
||||||
|
}
|
||||||
|
else if ( StringHelper.isNotEmpty(propFile) ) {
|
||||||
|
|
||||||
|
log.info("Configuring Proxool Provider using Properties File: " + propFile);
|
||||||
|
|
||||||
|
// Validate that an alias name was provided to determine which pool to use
|
||||||
|
if ( !StringHelper.isNotEmpty(proxoolAlias) ) {
|
||||||
|
String msg = "Cannot configure Proxool Provider to use Properties File without the " + Environment.PROXOOL_POOL_ALIAS + " property set.";
|
||||||
|
log.fatal(msg);
|
||||||
|
throw new HibernateException(msg);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
PropertyConfigurator.configure( ConfigHelper.getConfigProperties(propFile) );
|
||||||
|
}
|
||||||
|
catch (ProxoolException e) {
|
||||||
|
String msg = "Proxool Provider unable to load load Property configurator file: " + propFile;
|
||||||
|
log.fatal(msg, e);
|
||||||
|
throw new HibernateException(msg, e);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Append the stem to the proxool pool alias
|
||||||
|
proxoolAlias = PROXOOL_JDBC_STEM + proxoolAlias;
|
||||||
|
log.info("Configuring Proxool Provider to use pool alias: " + proxoolAlias);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remember Isolation level
|
||||||
|
isolation = PropertiesHelper.getInteger(Environment.ISOLATION, props);
|
||||||
|
if (isolation!=null) {
|
||||||
|
log.info("JDBC isolation level: " + Environment.isolationLevelToString( isolation.intValue() ) );
|
||||||
|
}
|
||||||
|
|
||||||
|
autocommit = PropertiesHelper.getBoolean(Environment.AUTOCOMMIT, props);
|
||||||
|
log.info("autocommit mode: " + autocommit);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Release all resources held by this provider. JavaDoc requires a second sentence.
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public void close() throws HibernateException {
|
||||||
|
|
||||||
|
// If the provider was leeching off an existing pool don't close it
|
||||||
|
if (existingPool) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// We have created the pool ourselves, so shut it down
|
||||||
|
try {
|
||||||
|
ProxoolFacade.shutdown(0);
|
||||||
|
}
|
||||||
|
catch (Exception e) {
|
||||||
|
// If you're closing down the ConnectionProvider chances are an
|
||||||
|
// is not a real big deal, just warn
|
||||||
|
log.warn("Exception occured when closing the Proxool pool", e);
|
||||||
|
throw new HibernateException("Exception occured when closing the Proxool pool", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @see ConnectionProvider#supportsAggressiveRelease()
|
||||||
|
*/
|
||||||
|
public boolean supportsAggressiveRelease() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,147 @@
|
||||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
|
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
|
<parent>
|
||||||
|
<groupId>org.hibernate</groupId>
|
||||||
|
<artifactId>hibernate-code</artifactId>
|
||||||
|
<version>3.3.0.beta1</version>
|
||||||
|
<relativePath>../pom.xml</relativePath>
|
||||||
|
</parent>
|
||||||
|
|
||||||
|
<groupId>org.hibernate</groupId>
|
||||||
|
<artifactId>hibernate-core</artifactId>
|
||||||
|
<packaging>jar</packaging>
|
||||||
|
|
||||||
|
<name>Hibernate Core</name>
|
||||||
|
<description>The core functionality of Hibernate</description>
|
||||||
|
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>antlr</groupId>
|
||||||
|
<artifactId>antlr</artifactId>
|
||||||
|
<version>2.7.6</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>commons-collections</groupId>
|
||||||
|
<artifactId>commons-collections</artifactId>
|
||||||
|
<version>3.1</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>commons-logging</groupId>
|
||||||
|
<artifactId>commons-logging</artifactId>
|
||||||
|
<version>1.0.4</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>dom4j</groupId>
|
||||||
|
<artifactId>dom4j</artifactId>
|
||||||
|
<version>1.6.1</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>javax.transaction</groupId>
|
||||||
|
<artifactId>jta</artifactId>
|
||||||
|
<version>1.1</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>javax.security</groupId>
|
||||||
|
<artifactId>jaas</artifactId>
|
||||||
|
<version>1.0.01</version>
|
||||||
|
<scope>provided</scope>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>javax.security</groupId>
|
||||||
|
<artifactId>jacc</artifactId>
|
||||||
|
<version>1.0</version>
|
||||||
|
<scope>provided</scope>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>ant</groupId>
|
||||||
|
<artifactId>ant</artifactId>
|
||||||
|
<version>1.6.5</version>
|
||||||
|
<scope>provided</scope>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<!-- optional deps for bytecode providers until those are finally properly scoped -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>javassist</groupId>
|
||||||
|
<artifactId>javassist</artifactId>
|
||||||
|
<version>3.4.GA</version>
|
||||||
|
<optional>true</optional>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>cglib</groupId>
|
||||||
|
<artifactId>cglib</artifactId>
|
||||||
|
<version>2.1_3</version>
|
||||||
|
<optional>true</optional>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>asm</groupId>
|
||||||
|
<artifactId>asm-attrs</artifactId>
|
||||||
|
<version>1.5.3</version>
|
||||||
|
<optional>true</optional>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
|
|
||||||
|
<build>
|
||||||
|
<plugins>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
|
<artifactId>maven-antlr-plugin</artifactId>
|
||||||
|
<configuration>
|
||||||
|
<grammars>hql.g,hql-sql.g,sql-gen.g</grammars>
|
||||||
|
</configuration>
|
||||||
|
<executions>
|
||||||
|
<execution>
|
||||||
|
<goals>
|
||||||
|
<goal>generate</goal>
|
||||||
|
</goals>
|
||||||
|
</execution>
|
||||||
|
</executions>
|
||||||
|
</plugin>
|
||||||
|
</plugins>
|
||||||
|
</build>
|
||||||
|
|
||||||
|
<reporting>
|
||||||
|
<plugins>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
|
<artifactId>maven-antlr-plugin</artifactId>
|
||||||
|
<configuration>
|
||||||
|
<!-- eventually should be based on the second phase grammar -->
|
||||||
|
<grammars>hql.g</grammars>
|
||||||
|
</configuration>
|
||||||
|
</plugin>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
|
<artifactId>maven-javadoc-plugin</artifactId>
|
||||||
|
<configuration>
|
||||||
|
<!--
|
||||||
|
for the time being, gonna ignore the custom stylesheet (what did it do anyway???)
|
||||||
|
<stylesheetfile>xyz</stylesheetfile>
|
||||||
|
-->
|
||||||
|
<groups>
|
||||||
|
<group>
|
||||||
|
<title>Core API</title>
|
||||||
|
<packages>org.hibernate:org.hibernate.classic:org.hibernate.criterion:org.hibernate.metadata:org.hibernate.cfg:org.hibernate.usertype</packages>
|
||||||
|
</group>
|
||||||
|
<group>
|
||||||
|
<title>Extension API</title>
|
||||||
|
<packages>org.hibernate.id:org.hibernate.connection:org.hibernate.transaction:org.hibernate.type:org.hibernate.dialect*:org.hibernate.cache*:org.hibernate.event*:org.hibernate.action:org.hibernate.property:org.hibernate.loader*:org.hibernate.persister*:org.hibernate.proxy:org.hibernate.tuple:org.hibernate.transform:org.hibernate.collection:org.hibernate.jdbc</packages>
|
||||||
|
</group>
|
||||||
|
<group>
|
||||||
|
<title>Miscellaneous API</title>
|
||||||
|
<packages>org.hibernate.stat:org.hibernate.tool.hbm2ddl:org.hibernate.jmx:org.hibernate.mapping:org.hibernate.tool.instrument</packages>
|
||||||
|
</group>
|
||||||
|
<group>
|
||||||
|
<title>Internal Implementation</title>
|
||||||
|
<packages>org.hibernate.engine:org.hibernate.impl:org.hibernate.sql:org.hibernate.lob:org.hibernate.util:org.hibernate.exception:org.hibernate.hql:org.hibernate.hql.ast:org.hibernate.hql.antlr:org.hibernate.hql.classic:org.hibernate.intercept:org.hibernate.secure:org.hibernate.pretty</packages>
|
||||||
|
</group>
|
||||||
|
</groups>
|
||||||
|
</configuration>
|
||||||
|
</plugin>
|
||||||
|
</plugins>
|
||||||
|
</reporting>
|
||||||
|
|
||||||
|
</project>
|
|
@ -0,0 +1,699 @@
|
||||||
|
header
|
||||||
|
{
|
||||||
|
// $Id: hql-sql.g 10001 2006-06-08 21:08:04Z steve.ebersole@jboss.com $
|
||||||
|
package org.hibernate.hql.antlr;
|
||||||
|
|
||||||
|
import org.apache.commons.logging.Log;
|
||||||
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Hibernate Query Language to SQL Tree Transform.<br>
|
||||||
|
* This is a tree grammar that transforms an HQL AST into a intermediate SQL AST
|
||||||
|
* with bindings to Hibernate interfaces (Queryable, etc.). The Hibernate specific methods
|
||||||
|
* are all implemented in the HqlSqlWalker subclass, allowing the ANTLR-generated class
|
||||||
|
* to have only the minimum dependencies on the Hibernate code base. This will also allow
|
||||||
|
* the sub-class to be easily edited using an IDE (most IDE's don't support ANTLR).
|
||||||
|
* <br>
|
||||||
|
* <i>NOTE:</i> The java class is generated from hql-sql.g by ANTLR.
|
||||||
|
* <i>DO NOT EDIT THE GENERATED JAVA SOURCE CODE.</i>
|
||||||
|
* @author Joshua Davis (joshua@hibernate.org)
|
||||||
|
*/
|
||||||
|
class HqlSqlBaseWalker extends TreeParser;
|
||||||
|
|
||||||
|
options
|
||||||
|
{
|
||||||
|
// Note: importVocab and exportVocab cause ANTLR to share the token type numbers between the
|
||||||
|
// two grammars. This means that the token type constants from the source tree are the same
|
||||||
|
// as those in the target tree. If this is not the case, tree translation can result in
|
||||||
|
// token types from the *source* tree being present in the target tree.
|
||||||
|
importVocab=Hql; // import definitions from "Hql"
|
||||||
|
exportVocab=HqlSql; // Call the resulting definitions "HqlSql"
|
||||||
|
buildAST=true;
|
||||||
|
}
|
||||||
|
|
||||||
|
tokens
|
||||||
|
{
|
||||||
|
FROM_FRAGMENT; // A fragment of SQL that represents a table reference in a FROM clause.
|
||||||
|
IMPLIED_FROM; // An implied FROM element.
|
||||||
|
JOIN_FRAGMENT; // A JOIN fragment.
|
||||||
|
SELECT_CLAUSE;
|
||||||
|
LEFT_OUTER;
|
||||||
|
RIGHT_OUTER;
|
||||||
|
ALIAS_REF; // An IDENT that is a reference to an entity via it's alias.
|
||||||
|
PROPERTY_REF; // A DOT that is a reference to a property in an entity.
|
||||||
|
SQL_TOKEN; // A chunk of SQL that is 'rendered' already.
|
||||||
|
SELECT_COLUMNS; // A chunk of SQL representing a bunch of select columns.
|
||||||
|
SELECT_EXPR; // A select expression, generated from a FROM element.
|
||||||
|
THETA_JOINS; // Root of theta join condition subtree.
|
||||||
|
FILTERS; // Root of the filters condition subtree.
|
||||||
|
METHOD_NAME; // An IDENT that is a method name.
|
||||||
|
NAMED_PARAM; // A named parameter (:foo).
|
||||||
|
BOGUS; // Used for error state detection, etc.
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- Declarations --
|
||||||
|
{
|
||||||
|
private static Log log = LogFactory.getLog( HqlSqlBaseWalker.class );
|
||||||
|
|
||||||
|
private int level = 0;
|
||||||
|
private boolean inSelect = false;
|
||||||
|
private boolean inFunctionCall = false;
|
||||||
|
private boolean inCase = false;
|
||||||
|
private boolean inFrom = false;
|
||||||
|
private int statementType;
|
||||||
|
private String statementTypeName;
|
||||||
|
// Note: currentClauseType tracks the current clause within the current
|
||||||
|
// statement, regardless of level; currentTopLevelClauseType, on the other
|
||||||
|
// hand, tracks the current clause within the top (or primary) statement.
|
||||||
|
// Thus, currentTopLevelClauseType ignores the clauses from any subqueries.
|
||||||
|
private int currentClauseType;
|
||||||
|
private int currentTopLevelClauseType;
|
||||||
|
private int currentStatementType;
|
||||||
|
|
||||||
|
public final boolean isSubQuery() {
|
||||||
|
return level > 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
public final boolean isInFrom() {
|
||||||
|
return inFrom;
|
||||||
|
}
|
||||||
|
|
||||||
|
public final boolean isInFunctionCall() {
|
||||||
|
return inFunctionCall;
|
||||||
|
}
|
||||||
|
|
||||||
|
public final boolean isInSelect() {
|
||||||
|
return inSelect;
|
||||||
|
}
|
||||||
|
|
||||||
|
public final boolean isInCase() {
|
||||||
|
return inCase;
|
||||||
|
}
|
||||||
|
|
||||||
|
public final int getStatementType() {
|
||||||
|
return statementType;
|
||||||
|
}
|
||||||
|
|
||||||
|
public final int getCurrentClauseType() {
|
||||||
|
return currentClauseType;
|
||||||
|
}
|
||||||
|
|
||||||
|
public final int getCurrentTopLevelClauseType() {
|
||||||
|
return currentTopLevelClauseType;
|
||||||
|
}
|
||||||
|
|
||||||
|
public final int getCurrentStatementType() {
|
||||||
|
return currentStatementType;
|
||||||
|
}
|
||||||
|
|
||||||
|
public final boolean isComparativeExpressionClause() {
|
||||||
|
// Note: once we add support for "JOIN ... ON ...",
|
||||||
|
// the ON clause needs to get included here
|
||||||
|
return getCurrentClauseType() == WHERE ||
|
||||||
|
getCurrentClauseType() == WITH ||
|
||||||
|
isInCase();
|
||||||
|
}
|
||||||
|
|
||||||
|
public final boolean isSelectStatement() {
|
||||||
|
return statementType == SELECT;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void beforeStatement(String statementName, int statementType) {
|
||||||
|
inFunctionCall = false;
|
||||||
|
level++;
|
||||||
|
if ( level == 1 ) {
|
||||||
|
this.statementTypeName = statementName;
|
||||||
|
this.statementType = statementType;
|
||||||
|
}
|
||||||
|
currentStatementType = statementType;
|
||||||
|
if ( log.isDebugEnabled() ) {
|
||||||
|
log.debug( statementName + " << begin [level=" + level + ", statement=" + this.statementTypeName + "]" );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void beforeStatementCompletion(String statementName) {
|
||||||
|
if ( log.isDebugEnabled() ) {
|
||||||
|
log.debug( statementName + " : finishing up [level=" + level + ", statement=" + statementTypeName + "]" );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void afterStatementCompletion(String statementName) {
|
||||||
|
if ( log.isDebugEnabled() ) {
|
||||||
|
log.debug( statementName + " >> end [level=" + level + ", statement=" + statementTypeName + "]" );
|
||||||
|
}
|
||||||
|
level--;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void handleClauseStart(int clauseType) {
|
||||||
|
currentClauseType = clauseType;
|
||||||
|
if ( level == 1 ) {
|
||||||
|
currentTopLevelClauseType = clauseType;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
///////////////////////////////////////////////////////////////////////////
|
||||||
|
// NOTE: The real implementations for the following are in the subclass.
|
||||||
|
|
||||||
|
protected void evaluateAssignment(AST eq) throws SemanticException { }
|
||||||
|
|
||||||
|
/** Pre-process the from clause input tree. **/
|
||||||
|
protected void prepareFromClauseInputTree(AST fromClauseInput) {}
|
||||||
|
|
||||||
|
/** Sets the current 'FROM' context. **/
|
||||||
|
protected void pushFromClause(AST fromClause,AST inputFromNode) {}
|
||||||
|
|
||||||
|
protected AST createFromElement(String path,AST alias,AST propertyFetch) throws SemanticException {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void createFromJoinElement(AST path,AST alias,int joinType,AST fetch,AST propertyFetch,AST with) throws SemanticException {}
|
||||||
|
|
||||||
|
protected AST createFromFilterElement(AST filterEntity,AST alias) throws SemanticException {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void processQuery(AST select,AST query) throws SemanticException { }
|
||||||
|
|
||||||
|
protected void postProcessUpdate(AST update) throws SemanticException { }
|
||||||
|
|
||||||
|
protected void postProcessDelete(AST delete) throws SemanticException { }
|
||||||
|
|
||||||
|
protected void postProcessInsert(AST insert) throws SemanticException { }
|
||||||
|
|
||||||
|
protected void beforeSelectClause() throws SemanticException { }
|
||||||
|
|
||||||
|
protected void processIndex(AST indexOp) throws SemanticException { }
|
||||||
|
|
||||||
|
protected void processConstant(AST constant) throws SemanticException { }
|
||||||
|
|
||||||
|
protected void processBoolean(AST constant) throws SemanticException { }
|
||||||
|
|
||||||
|
protected void processNumericLiteral(AST literal) throws SemanticException { }
|
||||||
|
|
||||||
|
protected void resolve(AST node) throws SemanticException { }
|
||||||
|
|
||||||
|
protected void resolveSelectExpression(AST dotNode) throws SemanticException { }
|
||||||
|
|
||||||
|
protected void processFunction(AST functionCall,boolean inSelect) throws SemanticException { }
|
||||||
|
|
||||||
|
protected void processConstructor(AST constructor) throws SemanticException { }
|
||||||
|
|
||||||
|
protected AST generateNamedParameter(AST delimiterNode, AST nameNode) throws SemanticException {
|
||||||
|
return #( [NAMED_PARAM, nameNode.getText()] );
|
||||||
|
}
|
||||||
|
|
||||||
|
protected AST generatePositionalParameter(AST inputNode) throws SemanticException {
|
||||||
|
return #( [PARAM, "?"] );
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void lookupAlias(AST ident) throws SemanticException { }
|
||||||
|
|
||||||
|
protected void setAlias(AST selectExpr, AST ident) { }
|
||||||
|
|
||||||
|
protected AST lookupProperty(AST dot,boolean root,boolean inSelect) throws SemanticException {
|
||||||
|
return dot;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected boolean isNonQualifiedPropertyRef(AST ident) { return false; }
|
||||||
|
|
||||||
|
protected AST lookupNonQualifiedProperty(AST property) throws SemanticException { return property; }
|
||||||
|
|
||||||
|
protected void setImpliedJoinType(int joinType) { }
|
||||||
|
|
||||||
|
protected AST createIntoClause(String path, AST propertySpec) throws SemanticException {
|
||||||
|
return null;
|
||||||
|
};
|
||||||
|
|
||||||
|
protected void prepareVersioned(AST updateNode, AST versionedNode) throws SemanticException {}
|
||||||
|
|
||||||
|
protected void prepareLogicOperator(AST operator) throws SemanticException { }
|
||||||
|
|
||||||
|
protected void prepareArithmeticOperator(AST operator) throws SemanticException { }
|
||||||
|
}
|
||||||
|
|
||||||
|
// The main statement rule.
|
||||||
|
statement
|
||||||
|
: selectStatement | updateStatement | deleteStatement | insertStatement
|
||||||
|
;
|
||||||
|
|
||||||
|
selectStatement
|
||||||
|
: query
|
||||||
|
;
|
||||||
|
|
||||||
|
// Cannot use just the fromElement rule here in the update and delete queries
|
||||||
|
// because fromElement essentially relies on a FromClause already having been
|
||||||
|
// built :(
|
||||||
|
updateStatement!
|
||||||
|
: #( u:UPDATE { beforeStatement( "update", UPDATE ); } (v:VERSIONED)? f:fromClause s:setClause (w:whereClause)? ) {
|
||||||
|
#updateStatement = #(#u, #f, #s, #w);
|
||||||
|
beforeStatementCompletion( "update" );
|
||||||
|
prepareVersioned( #updateStatement, #v );
|
||||||
|
postProcessUpdate( #updateStatement );
|
||||||
|
afterStatementCompletion( "update" );
|
||||||
|
}
|
||||||
|
;
|
||||||
|
|
||||||
|
deleteStatement
|
||||||
|
: #( DELETE { beforeStatement( "delete", DELETE ); } fromClause (whereClause)? ) {
|
||||||
|
beforeStatementCompletion( "delete" );
|
||||||
|
postProcessDelete( #deleteStatement );
|
||||||
|
afterStatementCompletion( "delete" );
|
||||||
|
}
|
||||||
|
;
|
||||||
|
|
||||||
|
insertStatement
|
||||||
|
// currently only "INSERT ... SELECT ..." statements supported;
|
||||||
|
// do we also need support for "INSERT ... VALUES ..."?
|
||||||
|
//
|
||||||
|
: #( INSERT { beforeStatement( "insert", INSERT ); } intoClause query ) {
|
||||||
|
beforeStatementCompletion( "insert" );
|
||||||
|
postProcessInsert( #insertStatement );
|
||||||
|
afterStatementCompletion( "insert" );
|
||||||
|
}
|
||||||
|
;
|
||||||
|
|
||||||
|
intoClause! {
|
||||||
|
String p = null;
|
||||||
|
}
|
||||||
|
: #( INTO { handleClauseStart( INTO ); } (p=path) ps:insertablePropertySpec ) {
|
||||||
|
#intoClause = createIntoClause(p, ps);
|
||||||
|
}
|
||||||
|
;
|
||||||
|
|
||||||
|
insertablePropertySpec
|
||||||
|
: #( RANGE (IDENT)+ )
|
||||||
|
;
|
||||||
|
|
||||||
|
setClause
|
||||||
|
: #( SET { handleClauseStart( SET ); } (assignment)* )
|
||||||
|
;
|
||||||
|
|
||||||
|
assignment
|
||||||
|
// Note: the propertyRef here needs to be resolved
|
||||||
|
// *before* we evaluate the newValue rule...
|
||||||
|
: #( EQ (p:propertyRef) { resolve(#p); } (newValue) ) {
|
||||||
|
evaluateAssignment( #assignment );
|
||||||
|
}
|
||||||
|
;
|
||||||
|
|
||||||
|
// For now, just use expr. Revisit after ejb3 solidifies this.
|
||||||
|
newValue
|
||||||
|
: expr | query
|
||||||
|
;
|
||||||
|
|
||||||
|
// The query / subquery rule. Pops the current 'from node' context
|
||||||
|
// (list of aliases).
|
||||||
|
query!
|
||||||
|
: #( QUERY { beforeStatement( "select", SELECT ); }
|
||||||
|
// The first phase places the FROM first to make processing the SELECT simpler.
|
||||||
|
#(SELECT_FROM
|
||||||
|
f:fromClause
|
||||||
|
(s:selectClause)?
|
||||||
|
)
|
||||||
|
(w:whereClause)?
|
||||||
|
(g:groupClause)?
|
||||||
|
(o:orderClause)?
|
||||||
|
) {
|
||||||
|
// Antlr note: #x_in refers to the input AST, #x refers to the output AST
|
||||||
|
#query = #([SELECT,"SELECT"], #s, #f, #w, #g, #o);
|
||||||
|
beforeStatementCompletion( "select" );
|
||||||
|
processQuery( #s, #query );
|
||||||
|
afterStatementCompletion( "select" );
|
||||||
|
}
|
||||||
|
;
|
||||||
|
|
||||||
|
orderClause
|
||||||
|
: #(ORDER { handleClauseStart( ORDER ); } orderExprs)
|
||||||
|
;
|
||||||
|
|
||||||
|
orderExprs
|
||||||
|
: expr ( ASCENDING | DESCENDING )? (orderExprs)?
|
||||||
|
;
|
||||||
|
|
||||||
|
groupClause
|
||||||
|
: #(GROUP { handleClauseStart( GROUP ); } (expr)+ ( #(HAVING logicalExpr) )? )
|
||||||
|
;
|
||||||
|
|
||||||
|
selectClause!
|
||||||
|
: #(SELECT { handleClauseStart( SELECT ); beforeSelectClause(); } (d:DISTINCT)? x:selectExprList ) {
|
||||||
|
#selectClause = #([SELECT_CLAUSE,"{select clause}"], #d, #x);
|
||||||
|
}
|
||||||
|
;
|
||||||
|
|
||||||
|
selectExprList {
|
||||||
|
boolean oldInSelect = inSelect;
|
||||||
|
inSelect = true;
|
||||||
|
}
|
||||||
|
: ( selectExpr | aliasedSelectExpr )+ {
|
||||||
|
inSelect = oldInSelect;
|
||||||
|
}
|
||||||
|
;
|
||||||
|
|
||||||
|
aliasedSelectExpr!
|
||||||
|
: #(AS se:selectExpr i:identifier) {
|
||||||
|
setAlias(#se,#i);
|
||||||
|
#aliasedSelectExpr = #se;
|
||||||
|
}
|
||||||
|
;
|
||||||
|
|
||||||
|
selectExpr
|
||||||
|
: p:propertyRef { resolveSelectExpression(#p); }
|
||||||
|
| #(ALL ar2:aliasRef) { resolveSelectExpression(#ar2); #selectExpr = #ar2; }
|
||||||
|
| #(OBJECT ar3:aliasRef) { resolveSelectExpression(#ar3); #selectExpr = #ar3; }
|
||||||
|
| con:constructor { processConstructor(#con); }
|
||||||
|
| functionCall
|
||||||
|
| count
|
||||||
|
| collectionFunction // elements() or indices()
|
||||||
|
| literal
|
||||||
|
| arithmeticExpr
|
||||||
|
| query
|
||||||
|
;
|
||||||
|
|
||||||
|
count
|
||||||
|
: #(COUNT ( DISTINCT | ALL )? ( aggregateExpr | ROW_STAR ) )
|
||||||
|
;
|
||||||
|
|
||||||
|
constructor
|
||||||
|
{ String className = null; }
|
||||||
|
: #(CONSTRUCTOR className=path ( selectExpr | aliasedSelectExpr )* )
|
||||||
|
;
|
||||||
|
|
||||||
|
aggregateExpr
|
||||||
|
: expr //p:propertyRef { resolve(#p); }
|
||||||
|
| collectionFunction
|
||||||
|
;
|
||||||
|
|
||||||
|
// Establishes the list of aliases being used by this query.
|
||||||
|
fromClause {
|
||||||
|
// NOTE: This references the INPUT AST! (see http://www.antlr.org/doc/trees.html#Action%20Translation)
|
||||||
|
// the ouput AST (#fromClause) has not been built yet.
|
||||||
|
prepareFromClauseInputTree(#fromClause_in);
|
||||||
|
}
|
||||||
|
: #(f:FROM { pushFromClause(#fromClause,f); handleClauseStart( FROM ); } fromElementList )
|
||||||
|
;
|
||||||
|
|
||||||
|
fromElementList {
|
||||||
|
boolean oldInFrom = inFrom;
|
||||||
|
inFrom = true;
|
||||||
|
}
|
||||||
|
: (fromElement)+ {
|
||||||
|
inFrom = oldInFrom;
|
||||||
|
}
|
||||||
|
;
|
||||||
|
|
||||||
|
fromElement! {
|
||||||
|
String p = null;
|
||||||
|
}
|
||||||
|
// A simple class name, alias element.
|
||||||
|
: #(RANGE p=path (a:ALIAS)? (pf:FETCH)? ) {
|
||||||
|
#fromElement = createFromElement(p,a, pf);
|
||||||
|
}
|
||||||
|
| je:joinElement {
|
||||||
|
#fromElement = #je;
|
||||||
|
}
|
||||||
|
// A from element created due to filter compilation
|
||||||
|
| fe:FILTER_ENTITY a3:ALIAS {
|
||||||
|
#fromElement = createFromFilterElement(fe,a3);
|
||||||
|
}
|
||||||
|
;
|
||||||
|
|
||||||
|
joinElement! {
|
||||||
|
int j = INNER;
|
||||||
|
}
|
||||||
|
// A from element with a join. This time, the 'path' should be treated as an AST
|
||||||
|
// and resolved (like any path in a WHERE clause). Make sure all implied joins
|
||||||
|
// generated by the property ref use the join type, if it was specified.
|
||||||
|
: #(JOIN (j=joinType { setImpliedJoinType(j); } )? (f:FETCH)? ref:propertyRef (a:ALIAS)? (pf:FETCH)? (with:WITH)? ) {
|
||||||
|
//createFromJoinElement(#ref,a,j,f, pf);
|
||||||
|
createFromJoinElement(#ref,a,j,f, pf, with);
|
||||||
|
setImpliedJoinType(INNER); // Reset the implied join type.
|
||||||
|
}
|
||||||
|
;
|
||||||
|
|
||||||
|
// Returns an node type integer that represents the join type
|
||||||
|
// tokens.
|
||||||
|
joinType returns [int j] {
|
||||||
|
j = INNER;
|
||||||
|
}
|
||||||
|
: ( (left:LEFT | right:RIGHT) (outer:OUTER)? ) {
|
||||||
|
if (left != null) j = LEFT_OUTER;
|
||||||
|
else if (right != null) j = RIGHT_OUTER;
|
||||||
|
else if (outer != null) j = RIGHT_OUTER;
|
||||||
|
}
|
||||||
|
| FULL {
|
||||||
|
j = FULL;
|
||||||
|
}
|
||||||
|
| INNER {
|
||||||
|
j = INNER;
|
||||||
|
}
|
||||||
|
;
|
||||||
|
|
||||||
|
// Matches a path and returns the normalized string for the path (usually
|
||||||
|
// fully qualified a class name).
|
||||||
|
path returns [String p] {
|
||||||
|
p = "???";
|
||||||
|
String x = "?x?";
|
||||||
|
}
|
||||||
|
: a:identifier { p = a.getText(); }
|
||||||
|
| #(DOT x=path y:identifier) {
|
||||||
|
StringBuffer buf = new StringBuffer();
|
||||||
|
buf.append(x).append(".").append(y.getText());
|
||||||
|
p = buf.toString();
|
||||||
|
}
|
||||||
|
;
|
||||||
|
|
||||||
|
// Returns a path as a single identifier node.
|
||||||
|
pathAsIdent {
|
||||||
|
String text = "?text?";
|
||||||
|
}
|
||||||
|
: text=path {
|
||||||
|
#pathAsIdent = #([IDENT,text]);
|
||||||
|
}
|
||||||
|
;
|
||||||
|
|
||||||
|
withClause
|
||||||
|
// Note : this is used internally from the HqlSqlWalker to
|
||||||
|
// parse the node recognized with the with keyword earlier.
|
||||||
|
// Done this way because it relies on the join it "qualifies"
|
||||||
|
// already having been processed, which would not be the case
|
||||||
|
// if withClause was simply referenced from the joinElement
|
||||||
|
// rule during recognition...
|
||||||
|
: #(w:WITH { handleClauseStart( WITH ); } b:logicalExpr ) {
|
||||||
|
#withClause = #(w , #b);
|
||||||
|
}
|
||||||
|
;
|
||||||
|
|
||||||
|
whereClause
|
||||||
|
: #(w:WHERE { handleClauseStart( WHERE ); } b:logicalExpr ) {
|
||||||
|
// Use the *output* AST for the boolean expression!
|
||||||
|
#whereClause = #(w , #b);
|
||||||
|
}
|
||||||
|
;
|
||||||
|
|
||||||
|
logicalExpr
|
||||||
|
: #(AND logicalExpr logicalExpr)
|
||||||
|
| #(OR logicalExpr logicalExpr)
|
||||||
|
| #(NOT logicalExpr)
|
||||||
|
| comparisonExpr
|
||||||
|
;
|
||||||
|
|
||||||
|
// TODO: Add any other comparison operators here.
|
||||||
|
comparisonExpr
|
||||||
|
:
|
||||||
|
( #(EQ exprOrSubquery exprOrSubquery)
|
||||||
|
| #(NE exprOrSubquery exprOrSubquery)
|
||||||
|
| #(LT exprOrSubquery exprOrSubquery)
|
||||||
|
| #(GT exprOrSubquery exprOrSubquery)
|
||||||
|
| #(LE exprOrSubquery exprOrSubquery)
|
||||||
|
| #(GE exprOrSubquery exprOrSubquery)
|
||||||
|
| #(LIKE exprOrSubquery expr ( #(ESCAPE expr) )? )
|
||||||
|
| #(NOT_LIKE exprOrSubquery expr ( #(ESCAPE expr) )? )
|
||||||
|
| #(BETWEEN exprOrSubquery exprOrSubquery exprOrSubquery)
|
||||||
|
| #(NOT_BETWEEN exprOrSubquery exprOrSubquery exprOrSubquery)
|
||||||
|
| #(IN exprOrSubquery inRhs )
|
||||||
|
| #(NOT_IN exprOrSubquery inRhs )
|
||||||
|
| #(IS_NULL exprOrSubquery)
|
||||||
|
| #(IS_NOT_NULL exprOrSubquery)
|
||||||
|
// | #(IS_TRUE expr)
|
||||||
|
// | #(IS_FALSE expr)
|
||||||
|
| #(EXISTS ( expr | collectionFunctionOrSubselect ) )
|
||||||
|
) {
|
||||||
|
prepareLogicOperator( #comparisonExpr );
|
||||||
|
}
|
||||||
|
;
|
||||||
|
|
||||||
|
inRhs
|
||||||
|
: #(IN_LIST ( collectionFunctionOrSubselect | ( (expr)* ) ) )
|
||||||
|
;
|
||||||
|
|
||||||
|
exprOrSubquery
|
||||||
|
: expr
|
||||||
|
| query
|
||||||
|
| #(ANY collectionFunctionOrSubselect)
|
||||||
|
| #(ALL collectionFunctionOrSubselect)
|
||||||
|
| #(SOME collectionFunctionOrSubselect)
|
||||||
|
;
|
||||||
|
|
||||||
|
collectionFunctionOrSubselect
|
||||||
|
: collectionFunction
|
||||||
|
| query
|
||||||
|
;
|
||||||
|
|
||||||
|
expr
|
||||||
|
: ae:addrExpr [ true ] { resolve(#ae); } // Resolve the top level 'address expression'
|
||||||
|
| #( VECTOR_EXPR (expr)* )
|
||||||
|
| constant
|
||||||
|
| arithmeticExpr
|
||||||
|
| functionCall // Function call, not in the SELECT clause.
|
||||||
|
| parameter
|
||||||
|
| count // Count, not in the SELECT clause.
|
||||||
|
;
|
||||||
|
|
||||||
|
arithmeticExpr
|
||||||
|
: #(PLUS expr expr) { prepareArithmeticOperator( #arithmeticExpr ); }
|
||||||
|
| #(MINUS expr expr) { prepareArithmeticOperator( #arithmeticExpr ); }
|
||||||
|
| #(DIV expr expr) { prepareArithmeticOperator( #arithmeticExpr ); }
|
||||||
|
| #(STAR expr expr) { prepareArithmeticOperator( #arithmeticExpr ); }
|
||||||
|
// | #(CONCAT expr (expr)+ ) { prepareArithmeticOperator( #arithmeticExpr ); }
|
||||||
|
| #(UNARY_MINUS expr) { prepareArithmeticOperator( #arithmeticExpr ); }
|
||||||
|
| caseExpr
|
||||||
|
;
|
||||||
|
|
||||||
|
caseExpr
|
||||||
|
: #(CASE { inCase = true; } (#(WHEN logicalExpr expr))+ (#(ELSE expr))?) { inCase = false; }
|
||||||
|
| #(CASE2 { inCase = true; } expr (#(WHEN expr expr))+ (#(ELSE expr))?) { inCase = false; }
|
||||||
|
;
|
||||||
|
|
||||||
|
//TODO: I don't think we need this anymore .. how is it different to
|
||||||
|
// maxelements, etc, which are handled by functionCall
|
||||||
|
collectionFunction
|
||||||
|
: #(e:ELEMENTS {inFunctionCall=true;} p1:propertyRef { resolve(#p1); } )
|
||||||
|
{ processFunction(#e,inSelect); } {inFunctionCall=false;}
|
||||||
|
| #(i:INDICES {inFunctionCall=true;} p2:propertyRef { resolve(#p2); } )
|
||||||
|
{ processFunction(#i,inSelect); } {inFunctionCall=false;}
|
||||||
|
;
|
||||||
|
|
||||||
|
functionCall
|
||||||
|
: #(METHOD_CALL {inFunctionCall=true;} pathAsIdent ( #(EXPR_LIST (expr)* ) )? )
|
||||||
|
{ processFunction(#functionCall,inSelect); } {inFunctionCall=false;}
|
||||||
|
| #(AGGREGATE aggregateExpr )
|
||||||
|
;
|
||||||
|
|
||||||
|
constant
|
||||||
|
: literal
|
||||||
|
| NULL
|
||||||
|
| TRUE { processBoolean(#constant); }
|
||||||
|
| FALSE { processBoolean(#constant); }
|
||||||
|
| JAVA_CONSTANT
|
||||||
|
;
|
||||||
|
|
||||||
|
literal
|
||||||
|
: NUM_INT { processNumericLiteral( #literal ); }
|
||||||
|
| NUM_LONG { processNumericLiteral( #literal ); }
|
||||||
|
| NUM_FLOAT { processNumericLiteral( #literal ); }
|
||||||
|
| NUM_DOUBLE { processNumericLiteral( #literal ); }
|
||||||
|
| QUOTED_STRING
|
||||||
|
;
|
||||||
|
|
||||||
|
identifier
|
||||||
|
: (IDENT | WEIRD_IDENT)
|
||||||
|
;
|
||||||
|
|
||||||
|
addrExpr! [ boolean root ]
|
||||||
|
: #(d:DOT lhs:addrExprLhs rhs:propertyName ) {
|
||||||
|
// This gives lookupProperty() a chance to transform the tree
|
||||||
|
// to process collection properties (.elements, etc).
|
||||||
|
#addrExpr = #(#d, #lhs, #rhs);
|
||||||
|
#addrExpr = lookupProperty(#addrExpr,root,false);
|
||||||
|
}
|
||||||
|
| #(i:INDEX_OP lhs2:addrExprLhs rhs2:expr) {
|
||||||
|
#addrExpr = #(#i, #lhs2, #rhs2);
|
||||||
|
processIndex(#addrExpr);
|
||||||
|
}
|
||||||
|
| p:identifier {
|
||||||
|
// #addrExpr = #p;
|
||||||
|
// resolve(#addrExpr);
|
||||||
|
// In many cases, things other than property-refs are recognized
|
||||||
|
// by this addrExpr rule. Some of those I have seen:
|
||||||
|
// 1) select-clause from-aliases
|
||||||
|
// 2) sql-functions
|
||||||
|
if ( isNonQualifiedPropertyRef(#p) ) {
|
||||||
|
#addrExpr = lookupNonQualifiedProperty(#p);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
resolve(#p);
|
||||||
|
#addrExpr = #p;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
;
|
||||||
|
|
||||||
|
addrExprLhs
|
||||||
|
: addrExpr [ false ]
|
||||||
|
;
|
||||||
|
|
||||||
|
propertyName
|
||||||
|
: identifier
|
||||||
|
| CLASS
|
||||||
|
| ELEMENTS
|
||||||
|
| INDICES
|
||||||
|
;
|
||||||
|
|
||||||
|
propertyRef!
|
||||||
|
: #(d:DOT lhs:propertyRefLhs rhs:propertyName ) {
|
||||||
|
// This gives lookupProperty() a chance to transform the tree to process collection properties (.elements, etc).
|
||||||
|
#propertyRef = #(#d, #lhs, #rhs);
|
||||||
|
#propertyRef = lookupProperty(#propertyRef,false,true);
|
||||||
|
}
|
||||||
|
|
|
||||||
|
p:identifier {
|
||||||
|
// In many cases, things other than property-refs are recognized
|
||||||
|
// by this propertyRef rule. Some of those I have seen:
|
||||||
|
// 1) select-clause from-aliases
|
||||||
|
// 2) sql-functions
|
||||||
|
if ( isNonQualifiedPropertyRef(#p) ) {
|
||||||
|
#propertyRef = lookupNonQualifiedProperty(#p);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
resolve(#p);
|
||||||
|
#propertyRef = #p;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
;
|
||||||
|
|
||||||
|
propertyRefLhs
|
||||||
|
: propertyRef
|
||||||
|
;
|
||||||
|
|
||||||
|
aliasRef!
|
||||||
|
: i:identifier {
|
||||||
|
#aliasRef = #([ALIAS_REF,i.getText()]); // Create an ALIAS_REF node instead of an IDENT node.
|
||||||
|
lookupAlias(#aliasRef);
|
||||||
|
}
|
||||||
|
;
|
||||||
|
|
||||||
|
parameter!
|
||||||
|
: #(c:COLON a:identifier) {
|
||||||
|
// Create a NAMED_PARAM node instead of (COLON IDENT).
|
||||||
|
#parameter = generateNamedParameter( c, a );
|
||||||
|
// #parameter = #([NAMED_PARAM,a.getText()]);
|
||||||
|
// namedParameter(#parameter);
|
||||||
|
}
|
||||||
|
| #(p:PARAM (n:NUM_INT)?) {
|
||||||
|
if ( n != null ) {
|
||||||
|
// An ejb3-style "positional parameter", which we handle internally as a named-param
|
||||||
|
#parameter = generateNamedParameter( p, n );
|
||||||
|
// #parameter = #([NAMED_PARAM,n.getText()]);
|
||||||
|
// namedParameter(#parameter);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
#parameter = generatePositionalParameter( p );
|
||||||
|
// #parameter = #([PARAM,"?"]);
|
||||||
|
// positionalParameter(#parameter);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
;
|
||||||
|
|
||||||
|
numericInteger
|
||||||
|
: NUM_INT
|
||||||
|
;
|
|
@ -0,0 +1,883 @@
|
||||||
|
header
|
||||||
|
{
|
||||||
|
// $Id: hql.g 10163 2006-07-26 15:07:50Z steve.ebersole@jboss.com $
|
||||||
|
|
||||||
|
package org.hibernate.hql.antlr;
|
||||||
|
|
||||||
|
import org.hibernate.hql.ast.*;
|
||||||
|
import org.hibernate.hql.ast.util.*;
|
||||||
|
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Hibernate Query Language Grammar
|
||||||
|
* <br>
|
||||||
|
* This grammar parses the query language for Hibernate (an Open Source, Object-Relational
|
||||||
|
* mapping library). A partial BNF grammar description is available for reference here:
|
||||||
|
* http://www.hibernate.org/Documentation/HQLBNF
|
||||||
|
*
|
||||||
|
* Text from the original reference BNF is prefixed with '//##'.
|
||||||
|
* @author Joshua Davis (pgmjsd@sourceforge.net)
|
||||||
|
*/
|
||||||
|
class HqlBaseParser extends Parser;
|
||||||
|
|
||||||
|
options
|
||||||
|
{
|
||||||
|
exportVocab=Hql;
|
||||||
|
buildAST=true;
|
||||||
|
k=3; // For 'not like', 'not in', etc.
|
||||||
|
}
|
||||||
|
|
||||||
|
tokens
|
||||||
|
{
|
||||||
|
// -- HQL Keyword tokens --
|
||||||
|
ALL="all";
|
||||||
|
ANY="any";
|
||||||
|
AND="and";
|
||||||
|
AS="as";
|
||||||
|
ASCENDING="asc";
|
||||||
|
AVG="avg";
|
||||||
|
BETWEEN="between";
|
||||||
|
CLASS="class";
|
||||||
|
COUNT="count";
|
||||||
|
DELETE="delete";
|
||||||
|
DESCENDING="desc";
|
||||||
|
DOT;
|
||||||
|
DISTINCT="distinct";
|
||||||
|
ELEMENTS="elements";
|
||||||
|
ESCAPE="escape";
|
||||||
|
EXISTS="exists";
|
||||||
|
FALSE="false";
|
||||||
|
FETCH="fetch";
|
||||||
|
FROM="from";
|
||||||
|
FULL="full";
|
||||||
|
GROUP="group";
|
||||||
|
HAVING="having";
|
||||||
|
IN="in";
|
||||||
|
INDICES="indices";
|
||||||
|
INNER="inner";
|
||||||
|
INSERT="insert";
|
||||||
|
INTO="into";
|
||||||
|
IS="is";
|
||||||
|
JOIN="join";
|
||||||
|
LEFT="left";
|
||||||
|
LIKE="like";
|
||||||
|
MAX="max";
|
||||||
|
MIN="min";
|
||||||
|
NEW="new";
|
||||||
|
NOT="not";
|
||||||
|
NULL="null";
|
||||||
|
OR="or";
|
||||||
|
ORDER="order";
|
||||||
|
OUTER="outer";
|
||||||
|
PROPERTIES="properties";
|
||||||
|
RIGHT="right";
|
||||||
|
SELECT="select";
|
||||||
|
SET="set";
|
||||||
|
SOME="some";
|
||||||
|
SUM="sum";
|
||||||
|
TRUE="true";
|
||||||
|
UNION="union";
|
||||||
|
UPDATE="update";
|
||||||
|
VERSIONED="versioned";
|
||||||
|
WHERE="where";
|
||||||
|
|
||||||
|
// -- SQL tokens --
|
||||||
|
// These aren't part of HQL, but the SQL fragment parser uses the HQL lexer, so they need to be declared here.
|
||||||
|
CASE="case";
|
||||||
|
END="end";
|
||||||
|
ELSE="else";
|
||||||
|
THEN="then";
|
||||||
|
WHEN="when";
|
||||||
|
ON="on";
|
||||||
|
WITH="with";
|
||||||
|
|
||||||
|
// -- EJBQL tokens --
|
||||||
|
BOTH="both";
|
||||||
|
EMPTY="empty";
|
||||||
|
LEADING="leading";
|
||||||
|
MEMBER="member";
|
||||||
|
OBJECT="object";
|
||||||
|
OF="of";
|
||||||
|
TRAILING="trailing";
|
||||||
|
|
||||||
|
// -- Synthetic token types --
|
||||||
|
AGGREGATE; // One of the aggregate functions (e.g. min, max, avg)
|
||||||
|
ALIAS;
|
||||||
|
CONSTRUCTOR;
|
||||||
|
CASE2;
|
||||||
|
EXPR_LIST;
|
||||||
|
FILTER_ENTITY; // FROM element injected because of a filter expression (happens during compilation phase 2)
|
||||||
|
IN_LIST;
|
||||||
|
INDEX_OP;
|
||||||
|
IS_NOT_NULL;
|
||||||
|
IS_NULL; // Unary 'is null' operator.
|
||||||
|
METHOD_CALL;
|
||||||
|
NOT_BETWEEN;
|
||||||
|
NOT_IN;
|
||||||
|
NOT_LIKE;
|
||||||
|
ORDER_ELEMENT;
|
||||||
|
QUERY;
|
||||||
|
RANGE;
|
||||||
|
ROW_STAR;
|
||||||
|
SELECT_FROM;
|
||||||
|
UNARY_MINUS;
|
||||||
|
UNARY_PLUS;
|
||||||
|
VECTOR_EXPR; // ( x, y, z )
|
||||||
|
WEIRD_IDENT; // Identifiers that were keywords when they came in.
|
||||||
|
|
||||||
|
// Literal tokens.
|
||||||
|
CONSTANT;
|
||||||
|
NUM_DOUBLE;
|
||||||
|
NUM_FLOAT;
|
||||||
|
NUM_LONG;
|
||||||
|
JAVA_CONSTANT;
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
/** True if this is a filter query (allow no FROM clause). **/
|
||||||
|
private boolean filter = false;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sets the filter flag.
|
||||||
|
* @param f True for a filter query, false for a normal query.
|
||||||
|
*/
|
||||||
|
public void setFilter(boolean f) {
|
||||||
|
filter = f;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns true if this is a filter query, false if not.
|
||||||
|
* @return true if this is a filter query, false if not.
|
||||||
|
*/
|
||||||
|
public boolean isFilter() {
|
||||||
|
return filter;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method is overriden in the sub class in order to provide the
|
||||||
|
* 'keyword as identifier' hack.
|
||||||
|
* @param token The token to retry as an identifier.
|
||||||
|
* @param ex The exception to throw if it cannot be retried as an identifier.
|
||||||
|
*/
|
||||||
|
public AST handleIdentifierError(Token token,RecognitionException ex) throws RecognitionException, TokenStreamException {
|
||||||
|
// Base implementation: Just re-throw the exception.
|
||||||
|
throw ex;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method looks ahead and converts . <token> into . IDENT when
|
||||||
|
* appropriate.
|
||||||
|
*/
|
||||||
|
public void handleDotIdent() throws TokenStreamException {
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the negated equivalent of the expression.
|
||||||
|
* @param x The expression to negate.
|
||||||
|
*/
|
||||||
|
public AST negateNode(AST x) {
|
||||||
|
// Just create a 'not' parent for the default behavior.
|
||||||
|
return ASTUtil.createParent(astFactory, NOT, "not", x);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the 'cleaned up' version of a comparison operator sub-tree.
|
||||||
|
* @param x The comparison operator to clean up.
|
||||||
|
*/
|
||||||
|
public AST processEqualityExpression(AST x) throws RecognitionException {
|
||||||
|
return x;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void weakKeywords() throws TokenStreamException { }
|
||||||
|
|
||||||
|
public void processMemberOf(Token n,AST p,ASTPair currentAST) { }
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
statement
|
||||||
|
: ( updateStatement | deleteStatement | selectStatement | insertStatement )
|
||||||
|
;
|
||||||
|
|
||||||
|
updateStatement
|
||||||
|
: UPDATE^ (VERSIONED)?
|
||||||
|
optionalFromTokenFromClause
|
||||||
|
setClause
|
||||||
|
(whereClause)?
|
||||||
|
;
|
||||||
|
|
||||||
|
setClause
|
||||||
|
: (SET^ assignment (COMMA! assignment)*)
|
||||||
|
;
|
||||||
|
|
||||||
|
assignment
|
||||||
|
: stateField EQ^ newValue
|
||||||
|
;
|
||||||
|
|
||||||
|
// "state_field" is the term used in the EJB3 sample grammar; used here for easy reference.
|
||||||
|
// it is basically a property ref
|
||||||
|
stateField
|
||||||
|
: path
|
||||||
|
;
|
||||||
|
|
||||||
|
// this still needs to be defined in the ejb3 spec; additiveExpression is currently just a best guess,
|
||||||
|
// although it is highly likely I would think that the spec may limit this even more tightly.
|
||||||
|
newValue
|
||||||
|
: concatenation
|
||||||
|
;
|
||||||
|
|
||||||
|
deleteStatement
|
||||||
|
: DELETE^
|
||||||
|
(optionalFromTokenFromClause)
|
||||||
|
(whereClause)?
|
||||||
|
;
|
||||||
|
|
||||||
|
optionalFromTokenFromClause!
|
||||||
|
: (FROM!)? f:path (a:asAlias)? {
|
||||||
|
AST #range = #([RANGE, "RANGE"], #f, #a);
|
||||||
|
#optionalFromTokenFromClause = #([FROM, "FROM"], #range);
|
||||||
|
}
|
||||||
|
;
|
||||||
|
|
||||||
|
selectStatement
|
||||||
|
: queryRule {
|
||||||
|
#selectStatement = #([QUERY,"query"], #selectStatement);
|
||||||
|
}
|
||||||
|
;
|
||||||
|
|
||||||
|
insertStatement
|
||||||
|
// Would be nice if we could abstract the FromClause/FromElement logic
|
||||||
|
// out such that it could be reused here; something analogous to
|
||||||
|
// a "table" rule in sql-grammars
|
||||||
|
: INSERT^ intoClause selectStatement
|
||||||
|
;
|
||||||
|
|
||||||
|
intoClause
|
||||||
|
: INTO^ path { weakKeywords(); } insertablePropertySpec
|
||||||
|
;
|
||||||
|
|
||||||
|
insertablePropertySpec
|
||||||
|
: OPEN! primaryExpression ( COMMA! primaryExpression )* CLOSE! {
|
||||||
|
// Just need *something* to distinguish this on the hql-sql.g side
|
||||||
|
#insertablePropertySpec = #([RANGE, "column-spec"], #insertablePropertySpec);
|
||||||
|
}
|
||||||
|
;
|
||||||
|
|
||||||
|
union
|
||||||
|
: queryRule (UNION queryRule)*
|
||||||
|
;
|
||||||
|
|
||||||
|
//## query:
|
||||||
|
//## [selectClause] fromClause [whereClause] [groupByClause] [havingClause] [orderByClause];
|
||||||
|
|
||||||
|
queryRule
|
||||||
|
: selectFrom
|
||||||
|
(whereClause)?
|
||||||
|
(groupByClause)?
|
||||||
|
(orderByClause)?
|
||||||
|
;
|
||||||
|
|
||||||
|
selectFrom!
|
||||||
|
: (s:selectClause)? (f:fromClause)? {
|
||||||
|
// If there was no FROM clause and this is a filter query, create a from clause. Otherwise, throw
|
||||||
|
// an exception because non-filter queries must have a FROM clause.
|
||||||
|
if (#f == null) {
|
||||||
|
if (filter) {
|
||||||
|
#f = #([FROM,"{filter-implied FROM}"]);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
throw new SemanticException("FROM expected (non-filter queries must contain a FROM clause)");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create an artificial token so the 'FROM' can be placed
|
||||||
|
// before the SELECT in the tree to make tree processing
|
||||||
|
// simpler.
|
||||||
|
#selectFrom = #([SELECT_FROM,"SELECT_FROM"],f,s);
|
||||||
|
}
|
||||||
|
;
|
||||||
|
|
||||||
|
//## selectClause:
|
||||||
|
//## SELECT DISTINCT? selectedPropertiesList | ( NEW className OPEN selectedPropertiesList CLOSE );
|
||||||
|
|
||||||
|
selectClause
|
||||||
|
: SELECT^ // NOTE: The '^' after a token causes the corresponding AST node to be the root of the sub-tree.
|
||||||
|
{ weakKeywords(); } // Weak keywords can appear immediately after a SELECT token.
|
||||||
|
(DISTINCT)? ( selectedPropertiesList | newExpression | selectObject )
|
||||||
|
;
|
||||||
|
|
||||||
|
newExpression
|
||||||
|
: (NEW! path) op:OPEN^ {#op.setType(CONSTRUCTOR);} selectedPropertiesList CLOSE!
|
||||||
|
;
|
||||||
|
|
||||||
|
selectObject
|
||||||
|
: OBJECT^ OPEN! identifier CLOSE!
|
||||||
|
;
|
||||||
|
|
||||||
|
//## fromClause:
|
||||||
|
//## FROM className AS? identifier ( ( COMMA className AS? identifier ) | ( joinType path AS? identifier ) )*;
|
||||||
|
|
||||||
|
// NOTE: This *must* begin with the "FROM" token, otherwise the sub-query rule will be ambiguous
|
||||||
|
// with the expression rule.
|
||||||
|
// Also note: after a comma weak keywords are allowed and should be treated as identifiers.
|
||||||
|
|
||||||
|
fromClause
|
||||||
|
: FROM^ { weakKeywords(); } fromRange ( fromJoin | COMMA! { weakKeywords(); } fromRange )*
|
||||||
|
;
|
||||||
|
|
||||||
|
//## joinType:
|
||||||
|
//## ( ( 'left'|'right' 'outer'? ) | 'full' | 'inner' )? JOIN FETCH?;
|
||||||
|
|
||||||
|
fromJoin
|
||||||
|
: ( ( ( LEFT | RIGHT ) (OUTER)? ) | FULL | INNER )? JOIN^ (FETCH)?
|
||||||
|
path (asAlias)? (propertyFetch)? (withClause)?
|
||||||
|
;
|
||||||
|
|
||||||
|
withClause
|
||||||
|
: WITH^ logicalExpression
|
||||||
|
;
|
||||||
|
|
||||||
|
fromRange
|
||||||
|
: fromClassOrOuterQueryPath
|
||||||
|
| inClassDeclaration
|
||||||
|
| inCollectionDeclaration
|
||||||
|
| inCollectionElementsDeclaration
|
||||||
|
;
|
||||||
|
|
||||||
|
fromClassOrOuterQueryPath!
|
||||||
|
: c:path { weakKeywords(); } (a:asAlias)? (p:propertyFetch)? {
|
||||||
|
#fromClassOrOuterQueryPath = #([RANGE, "RANGE"], #c, #a, #p);
|
||||||
|
}
|
||||||
|
;
|
||||||
|
|
||||||
|
inClassDeclaration!
|
||||||
|
: a:alias IN! CLASS! c:path {
|
||||||
|
#inClassDeclaration = #([RANGE, "RANGE"], #c, #a);
|
||||||
|
}
|
||||||
|
;
|
||||||
|
|
||||||
|
inCollectionDeclaration!
|
||||||
|
: IN! OPEN! p:path CLOSE! a:alias {
|
||||||
|
#inCollectionDeclaration = #([JOIN, "join"], [INNER, "inner"], #p, #a);
|
||||||
|
}
|
||||||
|
;
|
||||||
|
|
||||||
|
inCollectionElementsDeclaration!
|
||||||
|
: a:alias IN! ELEMENTS! OPEN! p:path CLOSE! {
|
||||||
|
#inCollectionElementsDeclaration = #([JOIN, "join"], [INNER, "inner"], #p, #a);
|
||||||
|
}
|
||||||
|
;
|
||||||
|
|
||||||
|
// Alias rule - Parses the optional 'as' token and forces an AST identifier node.
|
||||||
|
asAlias
|
||||||
|
: (AS!)? alias
|
||||||
|
;
|
||||||
|
|
||||||
|
alias
|
||||||
|
: a:identifier { #a.setType(ALIAS); }
|
||||||
|
;
|
||||||
|
|
||||||
|
propertyFetch
|
||||||
|
: FETCH ALL! PROPERTIES!
|
||||||
|
;
|
||||||
|
|
||||||
|
//## groupByClause:
|
||||||
|
//## GROUP_BY path ( COMMA path )*;
|
||||||
|
|
||||||
|
groupByClause
|
||||||
|
: GROUP^
|
||||||
|
"by"! expression ( COMMA! expression )*
|
||||||
|
(havingClause)?
|
||||||
|
;
|
||||||
|
|
||||||
|
//## orderByClause:
|
||||||
|
//## ORDER_BY selectedPropertiesList;
|
||||||
|
|
||||||
|
orderByClause
|
||||||
|
: ORDER^ "by"! orderElement ( COMMA! orderElement )*
|
||||||
|
;
|
||||||
|
|
||||||
|
orderElement
|
||||||
|
: expression ( ascendingOrDescending )?
|
||||||
|
;
|
||||||
|
|
||||||
|
ascendingOrDescending
|
||||||
|
: ( "asc" | "ascending" ) { #ascendingOrDescending.setType(ASCENDING); }
|
||||||
|
| ( "desc" | "descending") { #ascendingOrDescending.setType(DESCENDING); }
|
||||||
|
;
|
||||||
|
|
||||||
|
//## havingClause:
|
||||||
|
//## HAVING logicalExpression;
|
||||||
|
|
||||||
|
havingClause
|
||||||
|
: HAVING^ logicalExpression
|
||||||
|
;
|
||||||
|
|
||||||
|
//## whereClause:
|
||||||
|
//## WHERE logicalExpression;
|
||||||
|
|
||||||
|
whereClause
|
||||||
|
: WHERE^ logicalExpression
|
||||||
|
;
|
||||||
|
|
||||||
|
//## selectedPropertiesList:
|
||||||
|
//## ( path | aggregate ) ( COMMA path | aggregate )*;
|
||||||
|
|
||||||
|
selectedPropertiesList
|
||||||
|
: aliasedExpression ( COMMA! aliasedExpression )*
|
||||||
|
;
|
||||||
|
|
||||||
|
aliasedExpression
|
||||||
|
: expression ( AS^ identifier )?
|
||||||
|
;
|
||||||
|
|
||||||
|
// expressions
|
||||||
|
// Note that most of these expressions follow the pattern
|
||||||
|
// thisLevelExpression :
|
||||||
|
// nextHigherPrecedenceExpression
|
||||||
|
// (OPERATOR nextHigherPrecedenceExpression)*
|
||||||
|
// which is a standard recursive definition for a parsing an expression.
|
||||||
|
//
|
||||||
|
// Operator precedence in HQL
|
||||||
|
// lowest --> ( 7) OR
|
||||||
|
// ( 6) AND, NOT
|
||||||
|
// ( 5) equality: ==, <>, !=, is
|
||||||
|
// ( 4) relational: <, <=, >, >=,
|
||||||
|
// LIKE, NOT LIKE, BETWEEN, NOT BETWEEN, IN, NOT IN
|
||||||
|
// ( 3) addition and subtraction: +(binary) -(binary)
|
||||||
|
// ( 2) multiplication: * / %, concatenate: ||
|
||||||
|
// highest --> ( 1) +(unary) -(unary)
|
||||||
|
// [] () (method call) . (dot -- identifier qualification)
|
||||||
|
// aggregate function
|
||||||
|
// () (explicit parenthesis)
|
||||||
|
//
|
||||||
|
// Note that the above precedence levels map to the rules below...
|
||||||
|
// Once you have a precedence chart, writing the appropriate rules as below
|
||||||
|
// is usually very straightfoward
|
||||||
|
|
||||||
|
logicalExpression
|
||||||
|
: expression
|
||||||
|
;
|
||||||
|
|
||||||
|
// Main expression rule
|
||||||
|
expression
|
||||||
|
: logicalOrExpression
|
||||||
|
;
|
||||||
|
|
||||||
|
// level 7 - OR
|
||||||
|
logicalOrExpression
|
||||||
|
: logicalAndExpression ( OR^ logicalAndExpression )*
|
||||||
|
;
|
||||||
|
|
||||||
|
// level 6 - AND, NOT
|
||||||
|
logicalAndExpression
|
||||||
|
: negatedExpression ( AND^ negatedExpression )*
|
||||||
|
;
|
||||||
|
|
||||||
|
// NOT nodes aren't generated. Instead, the operator in the sub-tree will be
|
||||||
|
// negated, if possible. Expressions without a NOT parent are passed through.
|
||||||
|
negatedExpression!
|
||||||
|
{ weakKeywords(); } // Weak keywords can appear in an expression, so look ahead.
|
||||||
|
: NOT^ x:negatedExpression { #negatedExpression = negateNode(#x); }
|
||||||
|
| y:equalityExpression { #negatedExpression = #y; }
|
||||||
|
;
|
||||||
|
|
||||||
|
//## OP: EQ | LT | GT | LE | GE | NE | SQL_NE | LIKE;
|
||||||
|
|
||||||
|
// level 5 - EQ, NE
|
||||||
|
equalityExpression
|
||||||
|
: x:relationalExpression (
|
||||||
|
( EQ^
|
||||||
|
| is:IS^ { #is.setType(EQ); } (NOT! { #is.setType(NE); } )?
|
||||||
|
| NE^
|
||||||
|
| ne:SQL_NE^ { #ne.setType(NE); }
|
||||||
|
) y:relationalExpression)* {
|
||||||
|
// Post process the equality expression to clean up 'is null', etc.
|
||||||
|
#equalityExpression = processEqualityExpression(#equalityExpression);
|
||||||
|
}
|
||||||
|
;
|
||||||
|
|
||||||
|
// level 4 - LT, GT, LE, GE, LIKE, NOT LIKE, BETWEEN, NOT BETWEEN
|
||||||
|
// NOTE: The NOT prefix for LIKE and BETWEEN will be represented in the
|
||||||
|
// token type. When traversing the AST, use the token type, and not the
|
||||||
|
// token text to interpret the semantics of these nodes.
|
||||||
|
relationalExpression
|
||||||
|
: concatenation (
|
||||||
|
( ( ( LT^ | GT^ | LE^ | GE^ ) additiveExpression )* )
|
||||||
|
// Disable node production for the optional 'not'.
|
||||||
|
| (n:NOT!)? (
|
||||||
|
// Represent the optional NOT prefix using the token type by
|
||||||
|
// testing 'n' and setting the token type accordingly.
|
||||||
|
(i:IN^ {
|
||||||
|
#i.setType( (n == null) ? IN : NOT_IN);
|
||||||
|
#i.setText( (n == null) ? "in" : "not in");
|
||||||
|
}
|
||||||
|
inList)
|
||||||
|
| (b:BETWEEN^ {
|
||||||
|
#b.setType( (n == null) ? BETWEEN : NOT_BETWEEN);
|
||||||
|
#b.setText( (n == null) ? "between" : "not between");
|
||||||
|
}
|
||||||
|
betweenList )
|
||||||
|
| (l:LIKE^ {
|
||||||
|
#l.setType( (n == null) ? LIKE : NOT_LIKE);
|
||||||
|
#l.setText( (n == null) ? "like" : "not like");
|
||||||
|
}
|
||||||
|
concatenation likeEscape)
|
||||||
|
| (MEMBER! (OF!)? p:path! {
|
||||||
|
processMemberOf(n,#p,currentAST);
|
||||||
|
} ) )
|
||||||
|
)
|
||||||
|
;
|
||||||
|
|
||||||
|
likeEscape
|
||||||
|
: (ESCAPE^ concatenation)?
|
||||||
|
;
|
||||||
|
|
||||||
|
inList
|
||||||
|
: x:compoundExpr
|
||||||
|
{ #inList = #([IN_LIST,"inList"], #inList); }
|
||||||
|
;
|
||||||
|
|
||||||
|
betweenList
|
||||||
|
: concatenation AND! concatenation
|
||||||
|
;
|
||||||
|
|
||||||
|
//level 4 - string concatenation
|
||||||
|
concatenation
|
||||||
|
: additiveExpression
|
||||||
|
( c:CONCAT^ { #c.setType(EXPR_LIST); #c.setText("concatList"); }
|
||||||
|
additiveExpression
|
||||||
|
( CONCAT! additiveExpression )*
|
||||||
|
{ #concatenation = #([METHOD_CALL, "||"], #([IDENT, "concat"]), #c ); } )?
|
||||||
|
;
|
||||||
|
|
||||||
|
// level 3 - binary plus and minus
|
||||||
|
additiveExpression
|
||||||
|
: multiplyExpression ( ( PLUS^ | MINUS^ ) multiplyExpression )*
|
||||||
|
;
|
||||||
|
|
||||||
|
// level 2 - binary multiply and divide
|
||||||
|
multiplyExpression
|
||||||
|
: unaryExpression ( ( STAR^ | DIV^ ) unaryExpression )*
|
||||||
|
;
|
||||||
|
|
||||||
|
// level 1 - unary minus, unary plus, not
|
||||||
|
unaryExpression
|
||||||
|
: MINUS^ {#MINUS.setType(UNARY_MINUS);} unaryExpression
|
||||||
|
| PLUS^ {#PLUS.setType(UNARY_PLUS);} unaryExpression
|
||||||
|
| caseExpression
|
||||||
|
| quantifiedExpression
|
||||||
|
| atom
|
||||||
|
;
|
||||||
|
|
||||||
|
caseExpression
|
||||||
|
: CASE^ (whenClause)+ (elseClause)? END!
|
||||||
|
| CASE^ { #CASE.setType(CASE2); } unaryExpression (altWhenClause)+ (elseClause)? END!
|
||||||
|
;
|
||||||
|
|
||||||
|
whenClause
|
||||||
|
: (WHEN^ logicalExpression THEN! unaryExpression)
|
||||||
|
;
|
||||||
|
|
||||||
|
altWhenClause
|
||||||
|
: (WHEN^ unaryExpression THEN! unaryExpression)
|
||||||
|
;
|
||||||
|
|
||||||
|
elseClause
|
||||||
|
: (ELSE^ unaryExpression)
|
||||||
|
;
|
||||||
|
|
||||||
|
quantifiedExpression
|
||||||
|
: ( SOME^ | EXISTS^ | ALL^ | ANY^ )
|
||||||
|
( identifier | collectionExpr | (OPEN! ( subQuery ) CLOSE!) )
|
||||||
|
;
|
||||||
|
|
||||||
|
// level 0 - expression atom
|
||||||
|
// ident qualifier ('.' ident ), array index ( [ expr ] ),
|
||||||
|
// method call ( '.' ident '(' exprList ') )
|
||||||
|
atom
|
||||||
|
: primaryExpression
|
||||||
|
(
|
||||||
|
DOT^ identifier
|
||||||
|
( options { greedy=true; } :
|
||||||
|
( op:OPEN^ {#op.setType(METHOD_CALL);} exprList CLOSE! ) )?
|
||||||
|
| lb:OPEN_BRACKET^ {#lb.setType(INDEX_OP);} expression CLOSE_BRACKET!
|
||||||
|
)*
|
||||||
|
;
|
||||||
|
|
||||||
|
// level 0 - the basic element of an expression
|
||||||
|
primaryExpression
|
||||||
|
: identPrimary ( options {greedy=true;} : DOT^ "class" )?
|
||||||
|
| constant
|
||||||
|
| COLON^ identifier
|
||||||
|
// TODO: Add parens to the tree so the user can control the operator evaluation order.
|
||||||
|
| OPEN! (expressionOrVector | subQuery) CLOSE!
|
||||||
|
| PARAM^ (NUM_INT)?
|
||||||
|
;
|
||||||
|
|
||||||
|
// This parses normal expression and a list of expressions separated by commas. If a comma is encountered
|
||||||
|
// a parent VECTOR_EXPR node will be created for the list.
|
||||||
|
expressionOrVector!
|
||||||
|
: e:expression ( v:vectorExpr )? {
|
||||||
|
// If this is a vector expression, create a parent node for it.
|
||||||
|
if (#v != null)
|
||||||
|
#expressionOrVector = #([VECTOR_EXPR,"{vector}"], #e, #v);
|
||||||
|
else
|
||||||
|
#expressionOrVector = #e;
|
||||||
|
}
|
||||||
|
;
|
||||||
|
|
||||||
|
vectorExpr
|
||||||
|
: COMMA! expression (COMMA! expression)*
|
||||||
|
;
|
||||||
|
|
||||||
|
// identifier, followed by member refs (dot ident), or method calls.
|
||||||
|
// NOTE: handleDotIdent() is called immediately after the first IDENT is recognized because
|
||||||
|
// the method looks a head to find keywords after DOT and turns them into identifiers.
|
||||||
|
identPrimary
|
||||||
|
: identifier { handleDotIdent(); }
|
||||||
|
( options { greedy=true; } : DOT^ ( identifier | ELEMENTS | o:OBJECT { #o.setType(IDENT); } ) )*
|
||||||
|
( options { greedy=true; } :
|
||||||
|
( op:OPEN^ { #op.setType(METHOD_CALL);} exprList CLOSE! )
|
||||||
|
)?
|
||||||
|
// Also allow special 'aggregate functions' such as count(), avg(), etc.
|
||||||
|
| aggregate
|
||||||
|
;
|
||||||
|
|
||||||
|
//## aggregate:
|
||||||
|
//## ( aggregateFunction OPEN path CLOSE ) | ( COUNT OPEN STAR CLOSE ) | ( COUNT OPEN (DISTINCT | ALL) path CLOSE );
|
||||||
|
|
||||||
|
//## aggregateFunction:
|
||||||
|
//## COUNT | 'sum' | 'avg' | 'max' | 'min';
|
||||||
|
|
||||||
|
aggregate
|
||||||
|
: ( SUM^ | AVG^ | MAX^ | MIN^ ) OPEN! additiveExpression CLOSE! { #aggregate.setType(AGGREGATE); }
|
||||||
|
// Special case for count - It's 'parameters' can be keywords.
|
||||||
|
| COUNT^ OPEN! ( STAR { #STAR.setType(ROW_STAR); } | ( ( DISTINCT | ALL )? ( path | collectionExpr ) ) ) CLOSE!
|
||||||
|
| collectionExpr
|
||||||
|
;
|
||||||
|
|
||||||
|
//## collection: ( OPEN query CLOSE ) | ( 'elements'|'indices' OPEN path CLOSE );
|
||||||
|
|
||||||
|
collectionExpr
|
||||||
|
: (ELEMENTS^ | INDICES^) OPEN! path CLOSE!
|
||||||
|
;
|
||||||
|
|
||||||
|
// NOTE: compoundExpr can be a 'path' where the last token in the path is '.elements' or '.indicies'
|
||||||
|
compoundExpr
|
||||||
|
: collectionExpr
|
||||||
|
| path
|
||||||
|
| (OPEN! ( (expression (COMMA! expression)*) | subQuery ) CLOSE!)
|
||||||
|
;
|
||||||
|
|
||||||
|
subQuery
|
||||||
|
: union
|
||||||
|
{ #subQuery = #([QUERY,"query"], #subQuery); }
|
||||||
|
;
|
||||||
|
|
||||||
|
exprList
|
||||||
|
{
|
||||||
|
AST trimSpec = null;
|
||||||
|
}
|
||||||
|
: (t:TRAILING {#trimSpec = #t;} | l:LEADING {#trimSpec = #l;} | b:BOTH {#trimSpec = #b;})?
|
||||||
|
{ if(#trimSpec != null) #trimSpec.setType(IDENT); }
|
||||||
|
(
|
||||||
|
expression ( (COMMA! expression)+ | FROM { #FROM.setType(IDENT); } expression | AS! identifier )?
|
||||||
|
| FROM { #FROM.setType(IDENT); } expression
|
||||||
|
)?
|
||||||
|
{ #exprList = #([EXPR_LIST,"exprList"], #exprList); }
|
||||||
|
;
|
||||||
|
|
||||||
|
constant
|
||||||
|
: NUM_INT
|
||||||
|
| NUM_FLOAT
|
||||||
|
| NUM_LONG
|
||||||
|
| NUM_DOUBLE
|
||||||
|
| QUOTED_STRING
|
||||||
|
| NULL
|
||||||
|
| TRUE
|
||||||
|
| FALSE
|
||||||
|
| EMPTY
|
||||||
|
;
|
||||||
|
|
||||||
|
//## quantifiedExpression: 'exists' | ( expression 'in' ) | ( expression OP 'any' | 'some' ) collection;
|
||||||
|
|
||||||
|
//## compoundPath: path ( OPEN_BRACKET expression CLOSE_BRACKET ( '.' path )? )*;
|
||||||
|
|
||||||
|
//## path: identifier ( '.' identifier )*;
|
||||||
|
|
||||||
|
path
|
||||||
|
: identifier ( DOT^ { weakKeywords(); } identifier )*
|
||||||
|
;
|
||||||
|
|
||||||
|
// Wraps the IDENT token from the lexer, in order to provide
|
||||||
|
// 'keyword as identifier' trickery.
|
||||||
|
identifier
|
||||||
|
: IDENT
|
||||||
|
exception
|
||||||
|
catch [RecognitionException ex]
|
||||||
|
{
|
||||||
|
identifier_AST = handleIdentifierError(LT(1),ex);
|
||||||
|
}
|
||||||
|
;
|
||||||
|
|
||||||
|
// **** LEXER ******************************************************************
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Hibernate Query Language Lexer
|
||||||
|
* <br>
|
||||||
|
* This lexer provides the HQL parser with tokens.
|
||||||
|
* @author Joshua Davis (pgmjsd@sourceforge.net)
|
||||||
|
*/
|
||||||
|
class HqlBaseLexer extends Lexer;
|
||||||
|
|
||||||
|
options {
|
||||||
|
exportVocab=Hql; // call the vocabulary "Hql"
|
||||||
|
testLiterals = false;
|
||||||
|
k=2; // needed for newline, and to distinguish '>' from '>='.
|
||||||
|
// HHH-241 : Quoted strings don't allow unicode chars - This should fix it.
|
||||||
|
charVocabulary='\u0000'..'\uFFFE'; // Allow any char but \uFFFF (16 bit -1, ANTLR's EOF character)
|
||||||
|
caseSensitive = false;
|
||||||
|
caseSensitiveLiterals = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- Declarations --
|
||||||
|
{
|
||||||
|
// NOTE: The real implementations are in the subclass.
|
||||||
|
protected void setPossibleID(boolean possibleID) {}
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- Keywords --
|
||||||
|
|
||||||
|
EQ: '=';
|
||||||
|
LT: '<';
|
||||||
|
GT: '>';
|
||||||
|
SQL_NE: "<>";
|
||||||
|
NE: "!=" | "^=";
|
||||||
|
LE: "<=";
|
||||||
|
GE: ">=";
|
||||||
|
|
||||||
|
COMMA: ',';
|
||||||
|
|
||||||
|
OPEN: '(';
|
||||||
|
CLOSE: ')';
|
||||||
|
OPEN_BRACKET: '[';
|
||||||
|
CLOSE_BRACKET: ']';
|
||||||
|
|
||||||
|
CONCAT: "||";
|
||||||
|
PLUS: '+';
|
||||||
|
MINUS: '-';
|
||||||
|
STAR: '*';
|
||||||
|
DIV: '/';
|
||||||
|
COLON: ':';
|
||||||
|
PARAM: '?';
|
||||||
|
|
||||||
|
IDENT options { testLiterals=true; }
|
||||||
|
: ID_START_LETTER ( ID_LETTER )*
|
||||||
|
{
|
||||||
|
// Setting this flag allows the grammar to use keywords as identifiers, if necessary.
|
||||||
|
setPossibleID(true);
|
||||||
|
}
|
||||||
|
;
|
||||||
|
|
||||||
|
protected
|
||||||
|
ID_START_LETTER
|
||||||
|
: '_'
|
||||||
|
| '$'
|
||||||
|
| 'a'..'z'
|
||||||
|
| '\u0080'..'\ufffe' // HHH-558 : Allow unicode chars in identifiers
|
||||||
|
;
|
||||||
|
|
||||||
|
protected
|
||||||
|
ID_LETTER
|
||||||
|
: ID_START_LETTER
|
||||||
|
| '0'..'9'
|
||||||
|
;
|
||||||
|
|
||||||
|
QUOTED_STRING
|
||||||
|
: '\'' ( (ESCqs)=> ESCqs | ~'\'' )* '\''
|
||||||
|
;
|
||||||
|
|
||||||
|
protected
|
||||||
|
ESCqs
|
||||||
|
:
|
||||||
|
'\'' '\''
|
||||||
|
;
|
||||||
|
|
||||||
|
WS : ( ' '
|
||||||
|
| '\t'
|
||||||
|
| '\r' '\n' { newline(); }
|
||||||
|
| '\n' { newline(); }
|
||||||
|
| '\r' { newline(); }
|
||||||
|
)
|
||||||
|
{$setType(Token.SKIP);} //ignore this token
|
||||||
|
;
|
||||||
|
|
||||||
|
//--- From the Java example grammar ---
|
||||||
|
// a numeric literal
|
||||||
|
NUM_INT
|
||||||
|
{boolean isDecimal=false; Token t=null;}
|
||||||
|
: '.' {_ttype = DOT;}
|
||||||
|
( ('0'..'9')+ (EXPONENT)? (f1:FLOAT_SUFFIX {t=f1;})?
|
||||||
|
{
|
||||||
|
if (t != null && t.getText().toUpperCase().indexOf('F')>=0)
|
||||||
|
{
|
||||||
|
_ttype = NUM_FLOAT;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
_ttype = NUM_DOUBLE; // assume double
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)?
|
||||||
|
| ( '0' {isDecimal = true;} // special case for just '0'
|
||||||
|
( ('x')
|
||||||
|
( // hex
|
||||||
|
// the 'e'|'E' and float suffix stuff look
|
||||||
|
// like hex digits, hence the (...)+ doesn't
|
||||||
|
// know when to stop: ambig. ANTLR resolves
|
||||||
|
// it correctly by matching immediately. It
|
||||||
|
// is therefore ok to hush warning.
|
||||||
|
options { warnWhenFollowAmbig=false; }
|
||||||
|
: HEX_DIGIT
|
||||||
|
)+
|
||||||
|
| ('0'..'7')+ // octal
|
||||||
|
)?
|
||||||
|
| ('1'..'9') ('0'..'9')* {isDecimal=true;} // non-zero decimal
|
||||||
|
)
|
||||||
|
( ('l') { _ttype = NUM_LONG; }
|
||||||
|
|
||||||
|
// only check to see if it's a float if looks like decimal so far
|
||||||
|
| {isDecimal}?
|
||||||
|
( '.' ('0'..'9')* (EXPONENT)? (f2:FLOAT_SUFFIX {t=f2;})?
|
||||||
|
| EXPONENT (f3:FLOAT_SUFFIX {t=f3;})?
|
||||||
|
| f4:FLOAT_SUFFIX {t=f4;}
|
||||||
|
)
|
||||||
|
{
|
||||||
|
if (t != null && t.getText().toUpperCase() .indexOf('F') >= 0)
|
||||||
|
{
|
||||||
|
_ttype = NUM_FLOAT;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
_ttype = NUM_DOUBLE; // assume double
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)?
|
||||||
|
;
|
||||||
|
|
||||||
|
// hexadecimal digit (again, note it's protected!)
|
||||||
|
protected
|
||||||
|
HEX_DIGIT
|
||||||
|
: ('0'..'9'|'a'..'f')
|
||||||
|
;
|
||||||
|
|
||||||
|
// a couple protected methods to assist in matching floating point numbers
|
||||||
|
protected
|
||||||
|
EXPONENT
|
||||||
|
: ('e') ('+'|'-')? ('0'..'9')+
|
||||||
|
;
|
||||||
|
|
||||||
|
protected
|
||||||
|
FLOAT_SUFFIX
|
||||||
|
: 'f'|'d'
|
||||||
|
;
|
||||||
|
|
|
@ -0,0 +1,427 @@
|
||||||
|
header
|
||||||
|
{
|
||||||
|
// $Id: sql-gen.g 10001 2006-06-08 21:08:04Z steve.ebersole@jboss.com $
|
||||||
|
package org.hibernate.hql.antlr;
|
||||||
|
|
||||||
|
import org.apache.commons.logging.Log;
|
||||||
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* SQL Generator Tree Parser, providing SQL rendering of SQL ASTs produced by the previous phase, HqlSqlWalker. All
|
||||||
|
* syntax decoration such as extra spaces, lack of spaces, extra parens, etc. should be added by this class.
|
||||||
|
* <br>
|
||||||
|
* This grammar processes the HQL/SQL AST and produces an SQL string. The intent is to move dialect-specific
|
||||||
|
* code into a sub-class that will override some of the methods, just like the other two grammars in this system.
|
||||||
|
* @author Joshua Davis (joshua@hibernate.org)
|
||||||
|
*/
|
||||||
|
class SqlGeneratorBase extends TreeParser;
|
||||||
|
|
||||||
|
options {
|
||||||
|
// Note: importVocab and exportVocab cause ANTLR to share the token type numbers between the
|
||||||
|
// two grammars. This means that the token type constants from the source tree are the same
|
||||||
|
// as those in the target tree. If this is not the case, tree translation can result in
|
||||||
|
// token types from the *source* tree being present in the target tree.
|
||||||
|
importVocab=HqlSql; // import definitions from "HqlSql"
|
||||||
|
exportVocab=Sql; // Call the resulting definitions "Sql"
|
||||||
|
buildAST=false; // Don't build an AST.
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
private static Log log = LogFactory.getLog(SqlGeneratorBase.class);
|
||||||
|
|
||||||
|
/** the buffer resulting SQL statement is written to */
|
||||||
|
private StringBuffer buf = new StringBuffer();
|
||||||
|
|
||||||
|
protected void out(String s) {
|
||||||
|
buf.append(s);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the last character written to the output, or -1 if there isn't one.
|
||||||
|
*/
|
||||||
|
protected int getLastChar() {
|
||||||
|
int len = buf.length();
|
||||||
|
if ( len == 0 )
|
||||||
|
return -1;
|
||||||
|
else
|
||||||
|
return buf.charAt( len - 1 );
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a aspace if the previous token was not a space or a parenthesis.
|
||||||
|
*/
|
||||||
|
protected void optionalSpace() {
|
||||||
|
// Implemented in the sub-class.
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void out(AST n) {
|
||||||
|
out(n.getText());
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void separator(AST n, String sep) {
|
||||||
|
if (n.getNextSibling() != null)
|
||||||
|
out(sep);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected boolean hasText(AST a) {
|
||||||
|
String t = a.getText();
|
||||||
|
return t != null && t.length() > 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void fromFragmentSeparator(AST a) {
|
||||||
|
// moved this impl into the subclass...
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void nestedFromFragment(AST d,AST parent) {
|
||||||
|
// moved this impl into the subclass...
|
||||||
|
}
|
||||||
|
|
||||||
|
protected StringBuffer getStringBuffer() {
|
||||||
|
return buf;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void nyi(AST n) {
|
||||||
|
throw new UnsupportedOperationException("Unsupported node: " + n);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void beginFunctionTemplate(AST m,AST i) {
|
||||||
|
// if template is null we just write the function out as it appears in the hql statement
|
||||||
|
out(i);
|
||||||
|
out("(");
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void endFunctionTemplate(AST m) {
|
||||||
|
out(")");
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void commaBetweenParameters(String comma) {
|
||||||
|
out(comma);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
statement
|
||||||
|
: selectStatement
|
||||||
|
| updateStatement
|
||||||
|
| deleteStatement
|
||||||
|
| insertStatement
|
||||||
|
;
|
||||||
|
|
||||||
|
selectStatement
|
||||||
|
: #(SELECT { out("select "); }
|
||||||
|
selectClause
|
||||||
|
from
|
||||||
|
( #(WHERE { out(" where "); } whereExpr ) )?
|
||||||
|
( #(GROUP { out(" group by "); } groupExprs ( #(HAVING { out(" having "); } booleanExpr[false]) )? ) )?
|
||||||
|
( #(ORDER { out(" order by "); } orderExprs ) )?
|
||||||
|
)
|
||||||
|
;
|
||||||
|
|
||||||
|
// Note: eats the FROM token node, as it is not valid in an update statement.
|
||||||
|
// It's outlived its usefulness after analysis phase :)
|
||||||
|
// TODO : needed to use conditionList directly here and deleteStatement, as whereExprs no longer works for this stuff
|
||||||
|
updateStatement
|
||||||
|
: #(UPDATE { out("update "); }
|
||||||
|
#( FROM fromTable )
|
||||||
|
setClause
|
||||||
|
(whereClause)?
|
||||||
|
)
|
||||||
|
;
|
||||||
|
|
||||||
|
deleteStatement
|
||||||
|
// Note: not space needed at end of "delete" because the from rule included one before the "from" it outputs
|
||||||
|
: #(DELETE { out("delete"); }
|
||||||
|
from
|
||||||
|
(whereClause)?
|
||||||
|
)
|
||||||
|
;
|
||||||
|
|
||||||
|
insertStatement
|
||||||
|
: #(INSERT { out( "insert " ); }
|
||||||
|
i:INTO { out( i ); out( " " ); }
|
||||||
|
selectStatement
|
||||||
|
)
|
||||||
|
;
|
||||||
|
|
||||||
|
setClause
|
||||||
|
// Simply re-use comparisionExpr, because it already correctly defines the EQ rule the
|
||||||
|
// way it is needed here; not the most aptly named, but ah
|
||||||
|
: #( SET { out(" set "); } comparisonExpr[false] ( { out(", "); } comparisonExpr[false] )* )
|
||||||
|
;
|
||||||
|
|
||||||
|
whereClause
|
||||||
|
: #(WHERE { out(" where "); } whereClauseExpr )
|
||||||
|
;
|
||||||
|
|
||||||
|
whereClauseExpr
|
||||||
|
: (SQL_TOKEN) => conditionList
|
||||||
|
| booleanExpr[ false ]
|
||||||
|
;
|
||||||
|
|
||||||
|
orderExprs
|
||||||
|
// TODO: remove goofy space before the comma when we don't have to regression test anymore.
|
||||||
|
: ( expr ) (dir:orderDirection { out(" "); out(dir); })? ( {out(", "); } orderExprs)?
|
||||||
|
;
|
||||||
|
|
||||||
|
groupExprs
|
||||||
|
// TODO: remove goofy space before the comma when we don't have to regression test anymore.
|
||||||
|
: expr ( {out(" , "); } groupExprs)?
|
||||||
|
;
|
||||||
|
|
||||||
|
orderDirection
|
||||||
|
: ASCENDING
|
||||||
|
| DESCENDING
|
||||||
|
;
|
||||||
|
|
||||||
|
whereExpr
|
||||||
|
// Expect the filter subtree, followed by the theta join subtree, followed by the HQL condition subtree.
|
||||||
|
// Might need parens around the HQL condition if there is more than one subtree.
|
||||||
|
// Put 'and' between each subtree.
|
||||||
|
: filters
|
||||||
|
( { out(" and "); } thetaJoins )?
|
||||||
|
( { out(" and "); } booleanExpr [ true ] )?
|
||||||
|
| thetaJoins
|
||||||
|
( { out(" and "); } booleanExpr [ true ] )?
|
||||||
|
| booleanExpr[false]
|
||||||
|
;
|
||||||
|
|
||||||
|
filters
|
||||||
|
: #(FILTERS conditionList )
|
||||||
|
;
|
||||||
|
|
||||||
|
thetaJoins
|
||||||
|
: #(THETA_JOINS conditionList )
|
||||||
|
;
|
||||||
|
|
||||||
|
conditionList
|
||||||
|
: sqlToken ( { out(" and "); } conditionList )?
|
||||||
|
;
|
||||||
|
|
||||||
|
selectClause
|
||||||
|
: #(SELECT_CLAUSE (distinctOrAll)? ( selectColumn )+ )
|
||||||
|
;
|
||||||
|
|
||||||
|
selectColumn
|
||||||
|
: p:selectExpr (sc:SELECT_COLUMNS { out(sc); } )? { separator( (sc != null) ? sc : p,", "); }
|
||||||
|
;
|
||||||
|
|
||||||
|
selectExpr
|
||||||
|
: e:selectAtom { out(e); }
|
||||||
|
| count
|
||||||
|
| #(CONSTRUCTOR (DOT | IDENT) ( selectColumn )+ )
|
||||||
|
| methodCall
|
||||||
|
| aggregate
|
||||||
|
| c:constant { out(c); }
|
||||||
|
| arithmeticExpr
|
||||||
|
| PARAM { out("?"); }
|
||||||
|
| sn:SQL_NODE { out(sn); }
|
||||||
|
| { out("("); } selectStatement { out(")"); }
|
||||||
|
;
|
||||||
|
|
||||||
|
count
|
||||||
|
: #(COUNT { out("count("); } ( distinctOrAll ) ? countExpr { out(")"); } )
|
||||||
|
;
|
||||||
|
|
||||||
|
distinctOrAll
|
||||||
|
: DISTINCT { out("distinct "); }
|
||||||
|
| ALL { out("all "); }
|
||||||
|
;
|
||||||
|
|
||||||
|
countExpr
|
||||||
|
// Syntacitic predicate resolves star all by itself, avoiding a conflict with STAR in expr.
|
||||||
|
: ROW_STAR { out("*"); }
|
||||||
|
| simpleExpr
|
||||||
|
;
|
||||||
|
|
||||||
|
selectAtom
|
||||||
|
: DOT
|
||||||
|
| SQL_TOKEN
|
||||||
|
| ALIAS_REF
|
||||||
|
| SELECT_EXPR
|
||||||
|
;
|
||||||
|
|
||||||
|
// The from-clause piece is all goofed up. Currently, nodes of type FROM_FRAGMENT
|
||||||
|
// and JOIN_FRAGMENT can occur at any level in the FromClause sub-tree. We really
|
||||||
|
// should come back and clean this up at some point; which I think will require
|
||||||
|
// a post-HqlSqlWalker phase to "re-align" the FromElements in a more sensible
|
||||||
|
// manner.
|
||||||
|
from
|
||||||
|
: #(f:FROM { out(" from "); }
|
||||||
|
(fromTable)* )
|
||||||
|
;
|
||||||
|
|
||||||
|
fromTable
|
||||||
|
// Write the table node (from fragment) and all the join fragments associated with it.
|
||||||
|
: #( a:FROM_FRAGMENT { out(a); } (tableJoin [ a ])* { fromFragmentSeparator(a); } )
|
||||||
|
| #( b:JOIN_FRAGMENT { out(b); } (tableJoin [ b ])* { fromFragmentSeparator(b); } )
|
||||||
|
;
|
||||||
|
|
||||||
|
tableJoin [ AST parent ]
|
||||||
|
: #( c:JOIN_FRAGMENT { out(" "); out(c); } (tableJoin [ c ] )* )
|
||||||
|
| #( d:FROM_FRAGMENT { nestedFromFragment(d,parent); } (tableJoin [ d ] )* )
|
||||||
|
;
|
||||||
|
|
||||||
|
booleanOp[ boolean parens ]
|
||||||
|
: #(AND booleanExpr[true] { out(" and "); } booleanExpr[true])
|
||||||
|
| #(OR { if (parens) out("("); } booleanExpr[false] { out(" or "); } booleanExpr[false] { if (parens) out(")"); })
|
||||||
|
| #(NOT { out(" not ("); } booleanExpr[false] { out(")"); } )
|
||||||
|
;
|
||||||
|
|
||||||
|
booleanExpr[ boolean parens ]
|
||||||
|
: booleanOp [ parens ]
|
||||||
|
| comparisonExpr [ parens ]
|
||||||
|
| st:SQL_TOKEN { out(st); } // solely for the purpose of mapping-defined where-fragments
|
||||||
|
;
|
||||||
|
|
||||||
|
comparisonExpr[ boolean parens ]
|
||||||
|
: binaryComparisonExpression
|
||||||
|
| { if (parens) out("("); } exoticComparisonExpression { if (parens) out(")"); }
|
||||||
|
;
|
||||||
|
|
||||||
|
binaryComparisonExpression
|
||||||
|
: #(EQ expr { out("="); } expr)
|
||||||
|
| #(NE expr { out("<>"); } expr)
|
||||||
|
| #(GT expr { out(">"); } expr)
|
||||||
|
| #(GE expr { out(">="); } expr)
|
||||||
|
| #(LT expr { out("<"); } expr)
|
||||||
|
| #(LE expr { out("<="); } expr)
|
||||||
|
;
|
||||||
|
|
||||||
|
exoticComparisonExpression
|
||||||
|
: #(LIKE expr { out(" like "); } expr likeEscape )
|
||||||
|
| #(NOT_LIKE expr { out(" not like "); } expr likeEscape)
|
||||||
|
| #(BETWEEN expr { out(" between "); } expr { out(" and "); } expr)
|
||||||
|
| #(NOT_BETWEEN expr { out(" not between "); } expr { out(" and "); } expr)
|
||||||
|
| #(IN expr { out(" in"); } inList )
|
||||||
|
| #(NOT_IN expr { out(" not in "); } inList )
|
||||||
|
| #(EXISTS { optionalSpace(); out("exists "); } quantified )
|
||||||
|
| #(IS_NULL expr) { out(" is null"); }
|
||||||
|
| #(IS_NOT_NULL expr) { out(" is not null"); }
|
||||||
|
;
|
||||||
|
|
||||||
|
likeEscape
|
||||||
|
: ( #(ESCAPE { out(" escape "); } expr) )?
|
||||||
|
;
|
||||||
|
|
||||||
|
inList
|
||||||
|
: #(IN_LIST { out(" "); } ( parenSelect | simpleExprList ) )
|
||||||
|
;
|
||||||
|
|
||||||
|
simpleExprList
|
||||||
|
: { out("("); } (e:simpleExpr { separator(e," , "); } )* { out(")"); }
|
||||||
|
;
|
||||||
|
|
||||||
|
// A simple expression, or a sub-select with parens around it.
|
||||||
|
expr
|
||||||
|
: simpleExpr
|
||||||
|
| #( VECTOR_EXPR { out("("); } (e:expr { separator(e," , "); } )* { out(")"); } )
|
||||||
|
| parenSelect
|
||||||
|
| #(ANY { out("any "); } quantified )
|
||||||
|
| #(ALL { out("all "); } quantified )
|
||||||
|
| #(SOME { out("some "); } quantified )
|
||||||
|
;
|
||||||
|
|
||||||
|
quantified
|
||||||
|
: { out("("); } ( sqlToken | selectStatement ) { out(")"); }
|
||||||
|
;
|
||||||
|
|
||||||
|
parenSelect
|
||||||
|
: { out("("); } selectStatement { out(")"); }
|
||||||
|
;
|
||||||
|
|
||||||
|
simpleExpr
|
||||||
|
: c:constant { out(c); }
|
||||||
|
| NULL { out("null"); }
|
||||||
|
| addrExpr
|
||||||
|
| sqlToken
|
||||||
|
| aggregate
|
||||||
|
| methodCall
|
||||||
|
| count
|
||||||
|
| parameter
|
||||||
|
| arithmeticExpr
|
||||||
|
;
|
||||||
|
|
||||||
|
constant
|
||||||
|
: NUM_DOUBLE
|
||||||
|
| NUM_FLOAT
|
||||||
|
| NUM_INT
|
||||||
|
| NUM_LONG
|
||||||
|
| QUOTED_STRING
|
||||||
|
| CONSTANT
|
||||||
|
| JAVA_CONSTANT
|
||||||
|
| TRUE
|
||||||
|
| FALSE
|
||||||
|
| IDENT
|
||||||
|
;
|
||||||
|
|
||||||
|
arithmeticExpr
|
||||||
|
: additiveExpr
|
||||||
|
| multiplicativeExpr
|
||||||
|
// | #(CONCAT { out("("); } expr ( { out("||"); } expr )+ { out(")"); } )
|
||||||
|
| #(UNARY_MINUS { out("-"); } expr)
|
||||||
|
| caseExpr
|
||||||
|
;
|
||||||
|
|
||||||
|
additiveExpr
|
||||||
|
: #(PLUS expr { out("+"); } expr)
|
||||||
|
| #(MINUS expr { out("-"); } nestedExprAfterMinusDiv)
|
||||||
|
;
|
||||||
|
|
||||||
|
multiplicativeExpr
|
||||||
|
: #(STAR nestedExpr { out("*"); } nestedExpr)
|
||||||
|
| #(DIV nestedExpr { out("/"); } nestedExprAfterMinusDiv)
|
||||||
|
;
|
||||||
|
|
||||||
|
nestedExpr
|
||||||
|
// Generate parens around nested additive expressions, use a syntactic predicate to avoid conflicts with 'expr'.
|
||||||
|
: (additiveExpr) => { out("("); } additiveExpr { out(")"); }
|
||||||
|
| expr
|
||||||
|
;
|
||||||
|
|
||||||
|
nestedExprAfterMinusDiv
|
||||||
|
// Generate parens around nested arithmetic expressions, use a syntactic predicate to avoid conflicts with 'expr'.
|
||||||
|
: (arithmeticExpr) => { out("("); } arithmeticExpr { out(")"); }
|
||||||
|
| expr
|
||||||
|
;
|
||||||
|
|
||||||
|
caseExpr
|
||||||
|
: #(CASE { out("case"); }
|
||||||
|
( #(WHEN { out( " when "); } booleanExpr[false] { out(" then "); } expr) )+
|
||||||
|
( #(ELSE { out(" else "); } expr) )?
|
||||||
|
{ out(" end"); } )
|
||||||
|
| #(CASE2 { out("case "); } expr
|
||||||
|
( #(WHEN { out( " when "); } expr { out(" then "); } expr) )+
|
||||||
|
( #(ELSE { out(" else "); } expr) )?
|
||||||
|
{ out(" end"); } )
|
||||||
|
;
|
||||||
|
|
||||||
|
aggregate
|
||||||
|
: #(a:AGGREGATE { out(a); out("("); } expr { out(")"); } )
|
||||||
|
;
|
||||||
|
|
||||||
|
|
||||||
|
methodCall
|
||||||
|
: #(m:METHOD_CALL i:METHOD_NAME { beginFunctionTemplate(m,i); }
|
||||||
|
( #(EXPR_LIST (arguments)? ) )?
|
||||||
|
{ endFunctionTemplate(m); } )
|
||||||
|
;
|
||||||
|
|
||||||
|
arguments
|
||||||
|
: expr ( { commaBetweenParameters(", "); } expr )*
|
||||||
|
;
|
||||||
|
|
||||||
|
parameter
|
||||||
|
: n:NAMED_PARAM { out(n); }
|
||||||
|
| p:PARAM { out(p); }
|
||||||
|
;
|
||||||
|
|
||||||
|
addrExpr
|
||||||
|
: #(r:DOT . .) { out(r); }
|
||||||
|
| i:ALIAS_REF { out(i); }
|
||||||
|
| j:INDEX_OP { out(j); }
|
||||||
|
;
|
||||||
|
|
||||||
|
sqlToken
|
||||||
|
: t:SQL_TOKEN { out(t); }
|
||||||
|
;
|
||||||
|
|
|
@ -0,0 +1,36 @@
|
||||||
|
//$Id: AssertionFailure.java 3890 2004-06-03 16:31:32Z steveebersole $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
import org.hibernate.exception.NestableRuntimeException;
|
||||||
|
import org.apache.commons.logging.Log;
|
||||||
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Indicates failure of an assertion: a possible bug in Hibernate.
|
||||||
|
*
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
|
||||||
|
public class AssertionFailure extends NestableRuntimeException {
|
||||||
|
|
||||||
|
private static final Log log = LogFactory.getLog(AssertionFailure.class);
|
||||||
|
|
||||||
|
private static final String MESSAGE = "an assertion failure occured (this may indicate a bug in Hibernate, but is more likely due to unsafe use of the session)";
|
||||||
|
|
||||||
|
public AssertionFailure(String s) {
|
||||||
|
super(s);
|
||||||
|
log.error(MESSAGE, this);
|
||||||
|
}
|
||||||
|
|
||||||
|
public AssertionFailure(String s, Throwable t) {
|
||||||
|
super(s, t);
|
||||||
|
log.error(MESSAGE, t);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,78 @@
|
||||||
|
//$Id: CacheMode.java 9194 2006-02-01 19:59:07Z steveebersole $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Controls how the session interacts with the second-level
|
||||||
|
* cache and query cache.
|
||||||
|
*
|
||||||
|
* @see Session#setCacheMode(CacheMode)
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
public final class CacheMode implements Serializable {
|
||||||
|
private final String name;
|
||||||
|
private final boolean isPutEnabled;
|
||||||
|
private final boolean isGetEnabled;
|
||||||
|
private static final Map INSTANCES = new HashMap();
|
||||||
|
|
||||||
|
private CacheMode(String name, boolean isPutEnabled, boolean isGetEnabled) {
|
||||||
|
this.name=name;
|
||||||
|
this.isPutEnabled = isPutEnabled;
|
||||||
|
this.isGetEnabled = isGetEnabled;
|
||||||
|
}
|
||||||
|
public String toString() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
public boolean isPutEnabled() {
|
||||||
|
return isPutEnabled;
|
||||||
|
}
|
||||||
|
public boolean isGetEnabled() {
|
||||||
|
return isGetEnabled;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* The session may read items from the cache, and add items to the cache
|
||||||
|
*/
|
||||||
|
public static final CacheMode NORMAL = new CacheMode("NORMAL", true, true);
|
||||||
|
/**
|
||||||
|
* The session will never interact with the cache, except to invalidate
|
||||||
|
* cache items when updates occur
|
||||||
|
*/
|
||||||
|
public static final CacheMode IGNORE = new CacheMode("IGNORE", false, false);
|
||||||
|
/**
|
||||||
|
* The session may read items from the cache, but will not add items,
|
||||||
|
* except to invalidate items when updates occur
|
||||||
|
*/
|
||||||
|
public static final CacheMode GET = new CacheMode("GET", false, true);
|
||||||
|
/**
|
||||||
|
* The session will never read items from the cache, but will add items
|
||||||
|
* to the cache as it reads them from the database.
|
||||||
|
*/
|
||||||
|
public static final CacheMode PUT = new CacheMode("PUT", true, false);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The session will never read items from the cache, but will add items
|
||||||
|
* to the cache as it reads them from the database. In this mode, the
|
||||||
|
* effect of <tt>hibernate.cache.use_minimal_puts</tt> is bypassed, in
|
||||||
|
* order to <em>force</em> a cache refresh
|
||||||
|
*/
|
||||||
|
public static final CacheMode REFRESH = new CacheMode("REFRESH", true, false);
|
||||||
|
|
||||||
|
static {
|
||||||
|
INSTANCES.put( NORMAL.name, NORMAL );
|
||||||
|
INSTANCES.put( IGNORE.name, IGNORE );
|
||||||
|
INSTANCES.put( GET.name, GET );
|
||||||
|
INSTANCES.put( PUT.name, PUT );
|
||||||
|
INSTANCES.put( REFRESH.name, REFRESH );
|
||||||
|
}
|
||||||
|
|
||||||
|
private Object readResolve() {
|
||||||
|
return INSTANCES.get( name );
|
||||||
|
}
|
||||||
|
|
||||||
|
public static CacheMode parse(String name) {
|
||||||
|
return ( CacheMode ) INSTANCES.get( name );
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,34 @@
|
||||||
|
//$Id: CallbackException.java 4242 2004-08-11 09:10:45Z oneovthafew $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Should be thrown by persistent objects from <tt>Lifecycle</tt>
|
||||||
|
* or <tt>Interceptor</tt> callbacks.
|
||||||
|
*
|
||||||
|
* @see Lifecycle
|
||||||
|
* @see Interceptor
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
|
||||||
|
public class CallbackException extends HibernateException {
|
||||||
|
|
||||||
|
public CallbackException(Exception root) {
|
||||||
|
super("An exception occurred in a callback", root);
|
||||||
|
}
|
||||||
|
|
||||||
|
public CallbackException(String message) {
|
||||||
|
super(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
public CallbackException(String message, Exception e) {
|
||||||
|
super(message, e);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,77 @@
|
||||||
|
// $Id: ConnectionReleaseMode.java 8409 2005-10-14 20:28:18Z steveebersole $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Defines the various policies by which Hibernate might release its underlying
|
||||||
|
* JDBC connection.
|
||||||
|
*
|
||||||
|
* @author Steve Ebersole
|
||||||
|
*/
|
||||||
|
public class ConnectionReleaseMode implements Serializable {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Indicates that JDBC connection should be aggressively released after each
|
||||||
|
* SQL statement is executed. In this mode, the application <em>must</em>
|
||||||
|
* explicitly close all iterators and scrollable results. This mode may
|
||||||
|
* only be used with a JTA datasource.
|
||||||
|
*/
|
||||||
|
public static final ConnectionReleaseMode AFTER_STATEMENT = new ConnectionReleaseMode( "after_statement" );
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Indicates that JDBC connections should be released after each transaction
|
||||||
|
* ends (works with both JTA-registered synch and HibernateTransaction API).
|
||||||
|
* This mode may not be used with an application server JTA datasource.
|
||||||
|
* <p/>
|
||||||
|
* This is the default mode starting in 3.1; was previously {@link #ON_CLOSE}.
|
||||||
|
*/
|
||||||
|
public static final ConnectionReleaseMode AFTER_TRANSACTION = new ConnectionReleaseMode( "after_transaction" );
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Indicates that connections should only be released when the Session is explicitly closed
|
||||||
|
* or disconnected; this is the legacy (Hibernate2 and pre-3.1) behavior.
|
||||||
|
*/
|
||||||
|
public static final ConnectionReleaseMode ON_CLOSE = new ConnectionReleaseMode( "on_close" );
|
||||||
|
|
||||||
|
|
||||||
|
private String name;
|
||||||
|
|
||||||
|
private ConnectionReleaseMode(String name) {
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Override of Object.toString(). Returns the release mode name.
|
||||||
|
*
|
||||||
|
* @return The release mode name.
|
||||||
|
*/
|
||||||
|
public String toString() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Determine the correct ConnectionReleaseMode instance based on the given
|
||||||
|
* name.
|
||||||
|
*
|
||||||
|
* @param modeName The release mode name.
|
||||||
|
* @return The appropriate ConnectionReleaseMode instance
|
||||||
|
* @throws HibernateException Indicates the modeName param did not match any known modes.
|
||||||
|
*/
|
||||||
|
public static ConnectionReleaseMode parse(String modeName) throws HibernateException {
|
||||||
|
if ( AFTER_STATEMENT.name.equals( modeName ) ) {
|
||||||
|
return AFTER_STATEMENT;
|
||||||
|
}
|
||||||
|
else if ( AFTER_TRANSACTION.name.equals( modeName ) ) {
|
||||||
|
return AFTER_TRANSACTION;
|
||||||
|
}
|
||||||
|
else if ( ON_CLOSE.name.equals( modeName ) ) {
|
||||||
|
return ON_CLOSE;
|
||||||
|
}
|
||||||
|
throw new HibernateException( "could not determine appropriate connection release mode [" + modeName + "]" );
|
||||||
|
}
|
||||||
|
|
||||||
|
private Object readResolve() {
|
||||||
|
return parse( name );
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,338 @@
|
||||||
|
//$Id: Criteria.java 9116 2006-01-23 21:21:01Z steveebersole $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.hibernate.criterion.CriteriaSpecification;
|
||||||
|
import org.hibernate.criterion.Criterion;
|
||||||
|
import org.hibernate.criterion.Order;
|
||||||
|
import org.hibernate.criterion.Projection;
|
||||||
|
import org.hibernate.transform.ResultTransformer;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* <tt>Criteria</tt> is a simplified API for retrieving entities
|
||||||
|
* by composing <tt>Criterion</tt> objects. This is a very
|
||||||
|
* convenient approach for functionality like "search" screens
|
||||||
|
* where there is a variable number of conditions to be placed
|
||||||
|
* upon the result set.<br>
|
||||||
|
* <br>
|
||||||
|
* The <tt>Session</tt> is a factory for <tt>Criteria</tt>.
|
||||||
|
* <tt>Criterion</tt> instances are usually obtained via
|
||||||
|
* the factory methods on <tt>Restrictions</tt>. eg.
|
||||||
|
* <pre>
|
||||||
|
* List cats = session.createCriteria(Cat.class)
|
||||||
|
* .add( Restrictions.like("name", "Iz%") )
|
||||||
|
* .add( Restrictions.gt( "weight", new Float(minWeight) ) )
|
||||||
|
* .addOrder( Order.asc("age") )
|
||||||
|
* .list();
|
||||||
|
* </pre>
|
||||||
|
* You may navigate associations using <tt>createAlias()</tt> or
|
||||||
|
* <tt>createCriteria()</tt>.
|
||||||
|
* <pre>
|
||||||
|
* List cats = session.createCriteria(Cat.class)
|
||||||
|
* .createCriteria("kittens")
|
||||||
|
* .add( Restrictions.like("name", "Iz%") )
|
||||||
|
* .list();
|
||||||
|
* </pre>
|
||||||
|
* <pre>
|
||||||
|
* List cats = session.createCriteria(Cat.class)
|
||||||
|
* .createAlias("kittens", "kit")
|
||||||
|
* .add( Restrictions.like("kit.name", "Iz%") )
|
||||||
|
* .list();
|
||||||
|
* </pre>
|
||||||
|
* You may specify projection and aggregation using <tt>Projection</tt>
|
||||||
|
* instances obtained via the factory methods on <tt>Projections</tt>.
|
||||||
|
* <pre>
|
||||||
|
* List cats = session.createCriteria(Cat.class)
|
||||||
|
* .setProjection( Projections.projectionList()
|
||||||
|
* .add( Projections.rowCount() )
|
||||||
|
* .add( Projections.avg("weight") )
|
||||||
|
* .add( Projections.max("weight") )
|
||||||
|
* .add( Projections.min("weight") )
|
||||||
|
* .add( Projections.groupProperty("color") )
|
||||||
|
* )
|
||||||
|
* .addOrder( Order.asc("color") )
|
||||||
|
* .list();
|
||||||
|
* </pre>
|
||||||
|
*
|
||||||
|
* @see Session#createCriteria(java.lang.Class)
|
||||||
|
* @see org.hibernate.criterion.Restrictions
|
||||||
|
* @see org.hibernate.criterion.Projections
|
||||||
|
* @see org.hibernate.criterion.Order
|
||||||
|
* @see org.hibernate.criterion.Criterion
|
||||||
|
* @see org.hibernate.criterion.Projection
|
||||||
|
* @see org.hibernate.criterion.DetachedCriteria a disconnected version of this API
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
public interface Criteria extends CriteriaSpecification {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the alias of the entity encapsulated by this criteria instance.
|
||||||
|
*
|
||||||
|
* @return The alias for the encapsulated entity.
|
||||||
|
*/
|
||||||
|
public String getAlias();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Used to specify that the query results will be a projection (scalar in
|
||||||
|
* nature). Implicitly specifies the {@link #PROJECTION} result transformer.
|
||||||
|
* <p/>
|
||||||
|
* The individual components contained within the given
|
||||||
|
* {@link Projection projection} determines the overall "shape" of the
|
||||||
|
* query result.
|
||||||
|
*
|
||||||
|
* @param projection The projection representing the overall "shape" of the
|
||||||
|
* query results.
|
||||||
|
* @return this (for method chaining)
|
||||||
|
*/
|
||||||
|
public Criteria setProjection(Projection projection);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a {@link Criterion restriction} to constrain the results to be
|
||||||
|
* retrieved.
|
||||||
|
*
|
||||||
|
* @param criterion The {@link Criterion criterion} object representing the
|
||||||
|
* restriction to be applied.
|
||||||
|
* @return this (for method chaining)
|
||||||
|
*/
|
||||||
|
public Criteria add(Criterion criterion);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add an {@link Order ordering} to the result set.
|
||||||
|
*
|
||||||
|
* @param order The {@link Order order} object representing an ordering
|
||||||
|
* to be applied to the results.
|
||||||
|
* @return this (for method chaining)
|
||||||
|
*/
|
||||||
|
public Criteria addOrder(Order order);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Specify an association fetching strategy for an association or a
|
||||||
|
* collection of values.
|
||||||
|
*
|
||||||
|
* @param associationPath a dot seperated property path
|
||||||
|
* @param mode The fetch mode for the referenced association
|
||||||
|
* @return this (for method chaining)
|
||||||
|
*/
|
||||||
|
public Criteria setFetchMode(String associationPath, FetchMode mode) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the lock mode of the current entity
|
||||||
|
*
|
||||||
|
* @param lockMode The lock mode to be applied
|
||||||
|
* @return this (for method chaining)
|
||||||
|
*/
|
||||||
|
public Criteria setLockMode(LockMode lockMode);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the lock mode of the aliased entity
|
||||||
|
*
|
||||||
|
* @param alias The previously assigned alias representing the entity to
|
||||||
|
* which the given lock mode should apply.
|
||||||
|
* @param lockMode The lock mode to be applied
|
||||||
|
* @return this (for method chaining)
|
||||||
|
*/
|
||||||
|
public Criteria setLockMode(String alias, LockMode lockMode);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Join an association, assigning an alias to the joined association.
|
||||||
|
* <p/>
|
||||||
|
* Functionally equivalent to {@link #createAlias(String, String, int)} using
|
||||||
|
* {@link #INNER_JOIN} for the joinType.
|
||||||
|
*
|
||||||
|
* @param associationPath A dot-seperated property path
|
||||||
|
* @param alias The alias to assign to the joined association (for later reference).
|
||||||
|
* @return this (for method chaining)
|
||||||
|
*/
|
||||||
|
public Criteria createAlias(String associationPath, String alias) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Join an association using the specified join-type, assigning an alias
|
||||||
|
* to the joined association.
|
||||||
|
* <p/>
|
||||||
|
* The joinType is expected to be one of {@link #INNER_JOIN} (the default),
|
||||||
|
* {@link #FULL_JOIN}, or {@link #LEFT_JOIN}.
|
||||||
|
*
|
||||||
|
* @param associationPath A dot-seperated property path
|
||||||
|
* @param alias The alias to assign to the joined association (for later reference).
|
||||||
|
* @param joinType The type of join to use.
|
||||||
|
* @return this (for method chaining)
|
||||||
|
*/
|
||||||
|
public Criteria createAlias(String associationPath, String alias, int joinType) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new <tt>Criteria</tt>, "rooted" at the associated entity.
|
||||||
|
* <p/>
|
||||||
|
* Functionally equivalent to {@link #createCriteria(String, int)} using
|
||||||
|
* {@link #INNER_JOIN} for the joinType.
|
||||||
|
*
|
||||||
|
* @param associationPath A dot-seperated property path
|
||||||
|
* @return the created "sub criteria"
|
||||||
|
*/
|
||||||
|
public Criteria createCriteria(String associationPath) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new <tt>Criteria</tt>, "rooted" at the associated entity, using the
|
||||||
|
* specified join type.
|
||||||
|
*
|
||||||
|
* @param associationPath A dot-seperated property path
|
||||||
|
* @param joinType The type of join to use.
|
||||||
|
* @return the created "sub criteria"
|
||||||
|
*/
|
||||||
|
public Criteria createCriteria(String associationPath, int joinType) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new <tt>Criteria</tt>, "rooted" at the associated entity,
|
||||||
|
* assigning the given alias.
|
||||||
|
* <p/>
|
||||||
|
* Functionally equivalent to {@link #createCriteria(String, String, int)} using
|
||||||
|
* {@link #INNER_JOIN} for the joinType.
|
||||||
|
*
|
||||||
|
* @param associationPath A dot-seperated property path
|
||||||
|
* @param alias The alias to assign to the joined association (for later reference).
|
||||||
|
* @return the created "sub criteria"
|
||||||
|
*/
|
||||||
|
public Criteria createCriteria(String associationPath, String alias) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new <tt>Criteria</tt>, "rooted" at the associated entity,
|
||||||
|
* assigning the given alias and using the specified join type.
|
||||||
|
*
|
||||||
|
* @param associationPath A dot-seperated property path
|
||||||
|
* @param alias The alias to assign to the joined association (for later reference).
|
||||||
|
* @param joinType The type of join to use.
|
||||||
|
* @return the created "sub criteria"
|
||||||
|
*/
|
||||||
|
public Criteria createCriteria(String associationPath, String alias, int joinType) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set a strategy for handling the query results. This determines the
|
||||||
|
* "shape" of the query result.
|
||||||
|
*
|
||||||
|
* @param resultTransformer The transformer to apply
|
||||||
|
* @return this (for method chaining)
|
||||||
|
*
|
||||||
|
* @see #ROOT_ENTITY
|
||||||
|
* @see #DISTINCT_ROOT_ENTITY
|
||||||
|
* @see #ALIAS_TO_ENTITY_MAP
|
||||||
|
* @see #PROJECTION
|
||||||
|
*/
|
||||||
|
public Criteria setResultTransformer(ResultTransformer resultTransformer);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set a limit upon the number of objects to be retrieved.
|
||||||
|
*
|
||||||
|
* @param maxResults the maximum number of results
|
||||||
|
* @return this (for method chaining)
|
||||||
|
*/
|
||||||
|
public Criteria setMaxResults(int maxResults);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the first result to be retrieved.
|
||||||
|
*
|
||||||
|
* @param firstResult the first result to retrieve, numbered from <tt>0</tt>
|
||||||
|
* @return this (for method chaining)
|
||||||
|
*/
|
||||||
|
public Criteria setFirstResult(int firstResult);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set a fetch size for the underlying JDBC query.
|
||||||
|
*
|
||||||
|
* @param fetchSize the fetch size
|
||||||
|
* @return this (for method chaining)
|
||||||
|
*
|
||||||
|
* @see java.sql.Statement#setFetchSize
|
||||||
|
*/
|
||||||
|
public Criteria setFetchSize(int fetchSize);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set a timeout for the underlying JDBC query.
|
||||||
|
*
|
||||||
|
* @param timeout The timeout value to apply.
|
||||||
|
* @return this (for method chaining)
|
||||||
|
*
|
||||||
|
* @see java.sql.Statement#setQueryTimeout
|
||||||
|
*/
|
||||||
|
public Criteria setTimeout(int timeout);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enable caching of this query result, provided query caching is enabled
|
||||||
|
* for the underlying session factory.
|
||||||
|
*
|
||||||
|
* @param cacheable Should the result be considered cacheable; default is
|
||||||
|
* to not cache (false).
|
||||||
|
* @return this (for method chaining)
|
||||||
|
*/
|
||||||
|
public Criteria setCacheable(boolean cacheable);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the name of the cache region to use for query result caching.
|
||||||
|
*
|
||||||
|
* @param cacheRegion the name of a query cache region, or <tt>null</tt>
|
||||||
|
* for the default query cache
|
||||||
|
* @return this (for method chaining)
|
||||||
|
*
|
||||||
|
* @see #setCacheable
|
||||||
|
*/
|
||||||
|
public Criteria setCacheRegion(String cacheRegion);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a comment to the generated SQL.
|
||||||
|
*
|
||||||
|
* @param comment a human-readable string
|
||||||
|
* @return this (for method chaining)
|
||||||
|
*/
|
||||||
|
public Criteria setComment(String comment);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Override the flush mode for this particular query.
|
||||||
|
*
|
||||||
|
* @param flushMode The flush mode to use.
|
||||||
|
* @return this (for method chaining)
|
||||||
|
*/
|
||||||
|
public Criteria setFlushMode(FlushMode flushMode);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Override the cache mode for this particular query.
|
||||||
|
*
|
||||||
|
* @param cacheMode The cache mode to use.
|
||||||
|
* @return this (for method chaining)
|
||||||
|
*/
|
||||||
|
public Criteria setCacheMode(CacheMode cacheMode);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the results.
|
||||||
|
*
|
||||||
|
* @return The list of matched query results.
|
||||||
|
*/
|
||||||
|
public List list() throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the results as an instance of {@link ScrollableResults}
|
||||||
|
*
|
||||||
|
* @return The {@link ScrollableResults} representing the matched
|
||||||
|
* query results.
|
||||||
|
*/
|
||||||
|
public ScrollableResults scroll() throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the results as an instance of {@link ScrollableResults} based on the
|
||||||
|
* given scroll mode.
|
||||||
|
*
|
||||||
|
* @param scrollMode Indicates the type of underlying database cursor to
|
||||||
|
* request.
|
||||||
|
* @return The {@link ScrollableResults} representing the matched
|
||||||
|
* query results.
|
||||||
|
*/
|
||||||
|
public ScrollableResults scroll(ScrollMode scrollMode) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convenience method to return a single instance that matches
|
||||||
|
* the query, or null if the query returns no results.
|
||||||
|
*
|
||||||
|
* @return the single result or <tt>null</tt>
|
||||||
|
* @throws HibernateException if there is more than one matching result
|
||||||
|
*/
|
||||||
|
public Object uniqueResult() throws HibernateException;
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,32 @@
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Raised whenever a duplicate for a certain type occurs.
|
||||||
|
* Duplicate class, table, property name etc.
|
||||||
|
*
|
||||||
|
* @author Max Rydahl Andersen
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public class DuplicateMappingException extends MappingException {
|
||||||
|
|
||||||
|
private final String name;
|
||||||
|
private final String type;
|
||||||
|
|
||||||
|
public DuplicateMappingException(String customMessage, String type, String name) {
|
||||||
|
super(customMessage);
|
||||||
|
this.type=type;
|
||||||
|
this.name=name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public DuplicateMappingException(String type, String name) {
|
||||||
|
this("Duplicate " + type + " mapping " + name, type, name);
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getType() {
|
||||||
|
return type;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getName() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,98 @@
|
||||||
|
//$Id: EmptyInterceptor.java 7859 2005-08-11 21:57:33Z oneovthafew $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.util.Iterator;
|
||||||
|
|
||||||
|
import org.hibernate.type.Type;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An interceptor that does nothing. May be used as a base class
|
||||||
|
* for application-defined custom interceptors.
|
||||||
|
*
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
public class EmptyInterceptor implements Interceptor, Serializable {
|
||||||
|
|
||||||
|
public static final Interceptor INSTANCE = new EmptyInterceptor();
|
||||||
|
|
||||||
|
protected EmptyInterceptor() {}
|
||||||
|
|
||||||
|
public void onDelete(
|
||||||
|
Object entity,
|
||||||
|
Serializable id,
|
||||||
|
Object[] state,
|
||||||
|
String[] propertyNames,
|
||||||
|
Type[] types) {}
|
||||||
|
|
||||||
|
public boolean onFlushDirty(
|
||||||
|
Object entity,
|
||||||
|
Serializable id,
|
||||||
|
Object[] currentState,
|
||||||
|
Object[] previousState,
|
||||||
|
String[] propertyNames,
|
||||||
|
Type[] types) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean onLoad(
|
||||||
|
Object entity,
|
||||||
|
Serializable id,
|
||||||
|
Object[] state,
|
||||||
|
String[] propertyNames,
|
||||||
|
Type[] types) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean onSave(
|
||||||
|
Object entity,
|
||||||
|
Serializable id,
|
||||||
|
Object[] state,
|
||||||
|
String[] propertyNames,
|
||||||
|
Type[] types) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void postFlush(Iterator entities) {}
|
||||||
|
public void preFlush(Iterator entities) {}
|
||||||
|
|
||||||
|
public Boolean isTransient(Object entity) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Object instantiate(String entityName, EntityMode entityMode, Serializable id) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int[] findDirty(Object entity,
|
||||||
|
Serializable id,
|
||||||
|
Object[] currentState,
|
||||||
|
Object[] previousState,
|
||||||
|
String[] propertyNames,
|
||||||
|
Type[] types) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getEntityName(Object object) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Object getEntity(String entityName, Serializable id) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void afterTransactionBegin(Transaction tx) {}
|
||||||
|
public void afterTransactionCompletion(Transaction tx) {}
|
||||||
|
public void beforeTransactionCompletion(Transaction tx) {}
|
||||||
|
|
||||||
|
public String onPrepareStatement(String sql) {
|
||||||
|
return sql;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void onCollectionRemove(Object collection, Serializable key) throws CallbackException {}
|
||||||
|
|
||||||
|
public void onCollectionRecreate(Object collection, Serializable key) throws CallbackException {}
|
||||||
|
|
||||||
|
public void onCollectionUpdate(Object collection, Serializable key) throws CallbackException {}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,49 @@
|
||||||
|
// $Id: EntityMode.java 8697 2005-11-29 14:29:24Z steveebersole $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Defines the representation modes available for entities.
|
||||||
|
*
|
||||||
|
* @author Steve Ebersole
|
||||||
|
*/
|
||||||
|
public class EntityMode implements Serializable {
|
||||||
|
|
||||||
|
private static final Map INSTANCES = new HashMap();
|
||||||
|
|
||||||
|
public static final EntityMode POJO = new EntityMode( "pojo" );
|
||||||
|
public static final EntityMode DOM4J = new EntityMode( "dom4j" );
|
||||||
|
public static final EntityMode MAP = new EntityMode( "dynamic-map" );
|
||||||
|
|
||||||
|
static {
|
||||||
|
INSTANCES.put( POJO.name, POJO );
|
||||||
|
INSTANCES.put( DOM4J.name, DOM4J );
|
||||||
|
INSTANCES.put( MAP.name, MAP );
|
||||||
|
}
|
||||||
|
|
||||||
|
private final String name;
|
||||||
|
|
||||||
|
public EntityMode(String name) {
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String toString() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
private Object readResolve() {
|
||||||
|
return INSTANCES.get( name );
|
||||||
|
}
|
||||||
|
|
||||||
|
public static EntityMode parse(String name) {
|
||||||
|
EntityMode rtn = ( EntityMode ) INSTANCES.get( name );
|
||||||
|
if ( rtn == null ) {
|
||||||
|
// default is POJO
|
||||||
|
rtn = POJO;
|
||||||
|
}
|
||||||
|
return rtn;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,70 @@
|
||||||
|
//$Id: FetchMode.java 5060 2004-12-24 03:11:05Z oneovthafew $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Represents an association fetching strategy. This is used
|
||||||
|
* together with the <tt>Criteria</tt> API to specify runtime
|
||||||
|
* fetching strategies.<br>
|
||||||
|
* <br>
|
||||||
|
* For HQL queries, use the <tt>FETCH</tt> keyword instead.
|
||||||
|
*
|
||||||
|
* @see Criteria#setFetchMode(java.lang.String, FetchMode)
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
public final class FetchMode implements Serializable {
|
||||||
|
private final String name;
|
||||||
|
private static final Map INSTANCES = new HashMap();
|
||||||
|
|
||||||
|
private FetchMode(String name) {
|
||||||
|
this.name=name;
|
||||||
|
}
|
||||||
|
public String toString() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Default to the setting configured in the mapping file.
|
||||||
|
*/
|
||||||
|
public static final FetchMode DEFAULT = new FetchMode("DEFAULT");
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetch using an outer join. Equivalent to <tt>fetch="join"</tt>.
|
||||||
|
*/
|
||||||
|
public static final FetchMode JOIN = new FetchMode("JOIN");
|
||||||
|
/**
|
||||||
|
* Fetch eagerly, using a separate select. Equivalent to
|
||||||
|
* <tt>fetch="select"</tt>.
|
||||||
|
*/
|
||||||
|
public static final FetchMode SELECT = new FetchMode("SELECT");
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetch lazily. Equivalent to <tt>outer-join="false"</tt>.
|
||||||
|
* @deprecated use <tt>FetchMode.SELECT</tt>
|
||||||
|
*/
|
||||||
|
public static final FetchMode LAZY = SELECT;
|
||||||
|
/**
|
||||||
|
* Fetch eagerly, using an outer join. Equivalent to
|
||||||
|
* <tt>outer-join="true"</tt>.
|
||||||
|
* @deprecated use <tt>FetchMode.JOIN</tt>
|
||||||
|
*/
|
||||||
|
public static final FetchMode EAGER = JOIN;
|
||||||
|
|
||||||
|
static {
|
||||||
|
INSTANCES.put( JOIN.name, JOIN );
|
||||||
|
INSTANCES.put( SELECT.name, SELECT );
|
||||||
|
INSTANCES.put( DEFAULT.name, DEFAULT );
|
||||||
|
}
|
||||||
|
|
||||||
|
private Object readResolve() {
|
||||||
|
return INSTANCES.get(name);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,68 @@
|
||||||
|
// $Id: Filter.java 8754 2005-12-05 23:36:59Z steveebersole $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
import org.hibernate.engine.FilterDefinition;
|
||||||
|
|
||||||
|
import java.util.Collection;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Type definition of Filter. Filter defines the user's view into enabled dynamic filters,
|
||||||
|
* allowing them to set filter parameter values.
|
||||||
|
*
|
||||||
|
* @author Steve Ebersole
|
||||||
|
*/
|
||||||
|
public interface Filter {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the name of this filter.
|
||||||
|
*
|
||||||
|
* @return This filter's name.
|
||||||
|
*/
|
||||||
|
public String getName();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the filter definition containing additional information about the
|
||||||
|
* filter (such as default-condition and expected parameter names/types).
|
||||||
|
*
|
||||||
|
* @return The filter definition
|
||||||
|
*/
|
||||||
|
public FilterDefinition getFilterDefinition();
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the named parameter's value for this filter.
|
||||||
|
*
|
||||||
|
* @param name The parameter's name.
|
||||||
|
* @param value The value to be applied.
|
||||||
|
* @return This FilterImpl instance (for method chaining).
|
||||||
|
*/
|
||||||
|
public Filter setParameter(String name, Object value);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the named parameter's value list for this filter. Used
|
||||||
|
* in conjunction with IN-style filter criteria.
|
||||||
|
*
|
||||||
|
* @param name The parameter's name.
|
||||||
|
* @param values The values to be expanded into an SQL IN list.
|
||||||
|
* @return This FilterImpl instance (for method chaining).
|
||||||
|
*/
|
||||||
|
public Filter setParameterList(String name, Collection values);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the named parameter's value list for this filter. Used
|
||||||
|
* in conjunction with IN-style filter criteria.
|
||||||
|
*
|
||||||
|
* @param name The parameter's name.
|
||||||
|
* @param values The values to be expanded into an SQL IN list.
|
||||||
|
* @return This FilterImpl instance (for method chaining).
|
||||||
|
*/
|
||||||
|
public Filter setParameterList(String name, Object[] values);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Perform validation of the filter state. This is used to verify the
|
||||||
|
* state of the filter after its enablement and before its use.
|
||||||
|
*
|
||||||
|
* @throws HibernateException If the state is not currently valid.
|
||||||
|
*/
|
||||||
|
public void validate() throws HibernateException;
|
||||||
|
}
|
|
@ -0,0 +1,92 @@
|
||||||
|
//$Id: FlushMode.java 10469 2006-09-08 12:23:18Z steve.ebersole@jboss.com $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Represents a flushing strategy. The flush process synchronizes
|
||||||
|
* database state with session state by detecting state changes
|
||||||
|
* and executing SQL statements.
|
||||||
|
*
|
||||||
|
* @see Session#setFlushMode(FlushMode)
|
||||||
|
* @see Query#setFlushMode(FlushMode)
|
||||||
|
* @see Criteria#setFlushMode(FlushMode)
|
||||||
|
*
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
public final class FlushMode implements Serializable {
|
||||||
|
private static final Map INSTANCES = new HashMap();
|
||||||
|
|
||||||
|
private final int level;
|
||||||
|
private final String name;
|
||||||
|
|
||||||
|
private FlushMode(int level, String name) {
|
||||||
|
this.level = level;
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String toString() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The {@link Session} is never flushed unless {@link Session#flush}
|
||||||
|
* is explicitly called by the application. This mode is very
|
||||||
|
* efficient for read only transactions.
|
||||||
|
*
|
||||||
|
* @deprecated use {@link #MANUAL} instead.
|
||||||
|
*/
|
||||||
|
public static final FlushMode NEVER = new FlushMode( 0, "NEVER" );
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The {@link Session} is only ever flushed when {@link Session#flush}
|
||||||
|
* is explicitly called by the application. This mode is very
|
||||||
|
* efficient for read only transactions.
|
||||||
|
*/
|
||||||
|
public static final FlushMode MANUAL = new FlushMode( 0, "MANUAL" );
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The {@link Session} is flushed when {@link Transaction#commit}
|
||||||
|
* is called.
|
||||||
|
*/
|
||||||
|
public static final FlushMode COMMIT = new FlushMode(5, "COMMIT");
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The {@link Session} is sometimes flushed before query execution
|
||||||
|
* in order to ensure that queries never return stale state. This
|
||||||
|
* is the default flush mode.
|
||||||
|
*/
|
||||||
|
public static final FlushMode AUTO = new FlushMode(10, "AUTO");
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The {@link Session} is flushed before every query. This is
|
||||||
|
* almost always unnecessary and inefficient.
|
||||||
|
*/
|
||||||
|
public static final FlushMode ALWAYS = new FlushMode(20, "ALWAYS");
|
||||||
|
|
||||||
|
public boolean lessThan(FlushMode other) {
|
||||||
|
return this.level<other.level;
|
||||||
|
}
|
||||||
|
|
||||||
|
static {
|
||||||
|
INSTANCES.put( NEVER.name, NEVER );
|
||||||
|
INSTANCES.put( MANUAL.name, MANUAL );
|
||||||
|
INSTANCES.put( AUTO.name, AUTO );
|
||||||
|
INSTANCES.put( ALWAYS.name, ALWAYS );
|
||||||
|
INSTANCES.put( COMMIT.name, COMMIT );
|
||||||
|
}
|
||||||
|
|
||||||
|
public static boolean isManualFlushMode(FlushMode mode) {
|
||||||
|
return MANUAL.level == mode.level;
|
||||||
|
}
|
||||||
|
|
||||||
|
private Object readResolve() {
|
||||||
|
return INSTANCES.get( name );
|
||||||
|
}
|
||||||
|
|
||||||
|
public static FlushMode parse(String name) {
|
||||||
|
return ( FlushMode ) INSTANCES.get( name );
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,454 @@
|
||||||
|
//$Id: Hibernate.java 10009 2006-06-10 03:24:05Z epbernard $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.InputStream;
|
||||||
|
import java.io.Reader;
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.sql.Blob;
|
||||||
|
import java.sql.Clob;
|
||||||
|
import java.util.Iterator;
|
||||||
|
import java.util.Properties;
|
||||||
|
|
||||||
|
import org.hibernate.collection.PersistentCollection;
|
||||||
|
import org.hibernate.engine.HibernateIterator;
|
||||||
|
import org.hibernate.intercept.FieldInterceptionHelper;
|
||||||
|
import org.hibernate.intercept.FieldInterceptor;
|
||||||
|
import org.hibernate.lob.BlobImpl;
|
||||||
|
import org.hibernate.lob.ClobImpl;
|
||||||
|
import org.hibernate.lob.SerializableBlob;
|
||||||
|
import org.hibernate.lob.SerializableClob;
|
||||||
|
import org.hibernate.proxy.HibernateProxy;
|
||||||
|
import org.hibernate.proxy.LazyInitializer;
|
||||||
|
import org.hibernate.type.AnyType;
|
||||||
|
import org.hibernate.type.BigDecimalType;
|
||||||
|
import org.hibernate.type.BigIntegerType;
|
||||||
|
import org.hibernate.type.BinaryType;
|
||||||
|
import org.hibernate.type.BlobType;
|
||||||
|
import org.hibernate.type.BooleanType;
|
||||||
|
import org.hibernate.type.ByteType;
|
||||||
|
import org.hibernate.type.CalendarDateType;
|
||||||
|
import org.hibernate.type.CalendarType;
|
||||||
|
import org.hibernate.type.CharacterType;
|
||||||
|
import org.hibernate.type.ClassType;
|
||||||
|
import org.hibernate.type.ClobType;
|
||||||
|
import org.hibernate.type.CompositeCustomType;
|
||||||
|
import org.hibernate.type.CurrencyType;
|
||||||
|
import org.hibernate.type.CustomType;
|
||||||
|
import org.hibernate.type.DateType;
|
||||||
|
import org.hibernate.type.DoubleType;
|
||||||
|
import org.hibernate.type.FloatType;
|
||||||
|
import org.hibernate.type.IntegerType;
|
||||||
|
import org.hibernate.type.LocaleType;
|
||||||
|
import org.hibernate.type.LongType;
|
||||||
|
import org.hibernate.type.ManyToOneType;
|
||||||
|
import org.hibernate.type.NullableType;
|
||||||
|
import org.hibernate.type.SerializableType;
|
||||||
|
import org.hibernate.type.ShortType;
|
||||||
|
import org.hibernate.type.StringType;
|
||||||
|
import org.hibernate.type.TextType;
|
||||||
|
import org.hibernate.type.TimeType;
|
||||||
|
import org.hibernate.type.TimeZoneType;
|
||||||
|
import org.hibernate.type.TimestampType;
|
||||||
|
import org.hibernate.type.TrueFalseType;
|
||||||
|
import org.hibernate.type.Type;
|
||||||
|
import org.hibernate.type.YesNoType;
|
||||||
|
import org.hibernate.type.CharArrayType;
|
||||||
|
import org.hibernate.type.WrapperBinaryType;
|
||||||
|
import org.hibernate.type.CharacterArrayType;
|
||||||
|
import org.hibernate.usertype.CompositeUserType;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* <ul>
|
||||||
|
* <li>Provides access to the full range of Hibernate built-in types. <tt>Type</tt>
|
||||||
|
* instances may be used to bind values to query parameters.
|
||||||
|
* <li>A factory for new <tt>Blob</tt>s and <tt>Clob</tt>s.
|
||||||
|
* <li>Defines static methods for manipulation of proxies.
|
||||||
|
* </ul>
|
||||||
|
*
|
||||||
|
* @author Gavin King
|
||||||
|
* @see java.sql.Clob
|
||||||
|
* @see java.sql.Blob
|
||||||
|
* @see org.hibernate.type.Type
|
||||||
|
*/
|
||||||
|
|
||||||
|
public final class Hibernate {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Hibernate <tt>long</tt> type.
|
||||||
|
*/
|
||||||
|
public static final NullableType LONG = new LongType();
|
||||||
|
/**
|
||||||
|
* Hibernate <tt>short</tt> type.
|
||||||
|
*/
|
||||||
|
public static final NullableType SHORT = new ShortType();
|
||||||
|
/**
|
||||||
|
* Hibernate <tt>integer</tt> type.
|
||||||
|
*/
|
||||||
|
public static final NullableType INTEGER = new IntegerType();
|
||||||
|
/**
|
||||||
|
* Hibernate <tt>byte</tt> type.
|
||||||
|
*/
|
||||||
|
public static final NullableType BYTE = new ByteType();
|
||||||
|
/**
|
||||||
|
* Hibernate <tt>float</tt> type.
|
||||||
|
*/
|
||||||
|
public static final NullableType FLOAT = new FloatType();
|
||||||
|
/**
|
||||||
|
* Hibernate <tt>double</tt> type.
|
||||||
|
*/
|
||||||
|
public static final NullableType DOUBLE = new DoubleType();
|
||||||
|
/**
|
||||||
|
* Hibernate <tt>character</tt> type.
|
||||||
|
*/
|
||||||
|
public static final NullableType CHARACTER = new CharacterType();
|
||||||
|
/**
|
||||||
|
* Hibernate <tt>string</tt> type.
|
||||||
|
*/
|
||||||
|
public static final NullableType STRING = new StringType();
|
||||||
|
/**
|
||||||
|
* Hibernate <tt>time</tt> type.
|
||||||
|
*/
|
||||||
|
public static final NullableType TIME = new TimeType();
|
||||||
|
/**
|
||||||
|
* Hibernate <tt>date</tt> type.
|
||||||
|
*/
|
||||||
|
public static final NullableType DATE = new DateType();
|
||||||
|
/**
|
||||||
|
* Hibernate <tt>timestamp</tt> type.
|
||||||
|
*/
|
||||||
|
public static final NullableType TIMESTAMP = new TimestampType();
|
||||||
|
/**
|
||||||
|
* Hibernate <tt>boolean</tt> type.
|
||||||
|
*/
|
||||||
|
public static final NullableType BOOLEAN = new BooleanType();
|
||||||
|
/**
|
||||||
|
* Hibernate <tt>true_false</tt> type.
|
||||||
|
*/
|
||||||
|
public static final NullableType TRUE_FALSE = new TrueFalseType();
|
||||||
|
/**
|
||||||
|
* Hibernate <tt>yes_no</tt> type.
|
||||||
|
*/
|
||||||
|
public static final NullableType YES_NO = new YesNoType();
|
||||||
|
/**
|
||||||
|
* Hibernate <tt>big_decimal</tt> type.
|
||||||
|
*/
|
||||||
|
public static final NullableType BIG_DECIMAL = new BigDecimalType();
|
||||||
|
/**
|
||||||
|
* Hibernate <tt>big_integer</tt> type.
|
||||||
|
*/
|
||||||
|
public static final NullableType BIG_INTEGER = new BigIntegerType();
|
||||||
|
/**
|
||||||
|
* Hibernate <tt>binary</tt> type.
|
||||||
|
*/
|
||||||
|
public static final NullableType BINARY = new BinaryType();
|
||||||
|
/**
|
||||||
|
* Hibernate <tt>wrapper-binary</tt> type.
|
||||||
|
*/
|
||||||
|
public static final NullableType WRAPPER_BINARY = new WrapperBinaryType();
|
||||||
|
/**
|
||||||
|
* Hibernate char[] type.
|
||||||
|
*/
|
||||||
|
public static final NullableType CHAR_ARRAY = new CharArrayType();
|
||||||
|
/**
|
||||||
|
* Hibernate Character[] type.
|
||||||
|
*/
|
||||||
|
public static final NullableType CHARACTER_ARRAY = new CharacterArrayType();
|
||||||
|
/**
|
||||||
|
* Hibernate <tt>text</tt> type.
|
||||||
|
*/
|
||||||
|
public static final NullableType TEXT = new TextType();
|
||||||
|
/**
|
||||||
|
* Hibernate <tt>blob</tt> type.
|
||||||
|
*/
|
||||||
|
public static final Type BLOB = new BlobType();
|
||||||
|
/**
|
||||||
|
* Hibernate <tt>clob</tt> type.
|
||||||
|
*/
|
||||||
|
public static final Type CLOB = new ClobType();
|
||||||
|
/**
|
||||||
|
* Hibernate <tt>calendar</tt> type.
|
||||||
|
*/
|
||||||
|
public static final NullableType CALENDAR = new CalendarType();
|
||||||
|
/**
|
||||||
|
* Hibernate <tt>calendar_date</tt> type.
|
||||||
|
*/
|
||||||
|
public static final NullableType CALENDAR_DATE = new CalendarDateType();
|
||||||
|
/**
|
||||||
|
* Hibernate <tt>locale</tt> type.
|
||||||
|
*/
|
||||||
|
public static final NullableType LOCALE = new LocaleType();
|
||||||
|
/**
|
||||||
|
* Hibernate <tt>currency</tt> type.
|
||||||
|
*/
|
||||||
|
public static final NullableType CURRENCY = new CurrencyType();
|
||||||
|
/**
|
||||||
|
* Hibernate <tt>timezone</tt> type.
|
||||||
|
*/
|
||||||
|
public static final NullableType TIMEZONE = new TimeZoneType();
|
||||||
|
/**
|
||||||
|
* Hibernate <tt>class</tt> type.
|
||||||
|
*/
|
||||||
|
public static final NullableType CLASS = new ClassType();
|
||||||
|
/**
|
||||||
|
* Hibernate <tt>serializable</tt> type.
|
||||||
|
*/
|
||||||
|
public static final NullableType SERIALIZABLE = new SerializableType( Serializable.class );
|
||||||
|
/**
|
||||||
|
* Hibernate <tt>object</tt> type.
|
||||||
|
*/
|
||||||
|
public static final Type OBJECT = new AnyType();
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cannot be instantiated.
|
||||||
|
*/
|
||||||
|
private Hibernate() {
|
||||||
|
throw new UnsupportedOperationException();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A Hibernate <tt>serializable</tt> type.
|
||||||
|
*/
|
||||||
|
public static Type serializable(Class serializableClass) {
|
||||||
|
return new SerializableType( serializableClass );
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A Hibernate <tt>any</tt> type.
|
||||||
|
*
|
||||||
|
* @param metaType a type mapping <tt>java.lang.Class</tt> to a single column
|
||||||
|
* @param identifierType the entity identifier type
|
||||||
|
* @return the Type
|
||||||
|
*/
|
||||||
|
public static Type any(Type metaType, Type identifierType) {
|
||||||
|
return new AnyType( metaType, identifierType );
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A Hibernate persistent object (entity) type.
|
||||||
|
*
|
||||||
|
* @param persistentClass a mapped entity class
|
||||||
|
*/
|
||||||
|
public static Type entity(Class persistentClass) {
|
||||||
|
// not really a many-to-one association *necessarily*
|
||||||
|
return new ManyToOneType( persistentClass.getName() );
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A Hibernate persistent object (entity) type.
|
||||||
|
*
|
||||||
|
* @param entityName a mapped entity class
|
||||||
|
*/
|
||||||
|
public static Type entity(String entityName) {
|
||||||
|
// not really a many-to-one association *necessarily*
|
||||||
|
return new ManyToOneType( entityName );
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A Hibernate custom type.
|
||||||
|
*
|
||||||
|
* @param userTypeClass a class that implements <tt>UserType</tt>
|
||||||
|
*/
|
||||||
|
public static Type custom(Class userTypeClass) throws HibernateException {
|
||||||
|
return custom( userTypeClass, null );
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A Hibernate parameterizable custom type.
|
||||||
|
*
|
||||||
|
* @param userTypeClass a class that implements <tt>UserType and ParameterizableType</tt>
|
||||||
|
* @param parameterNames the names of the parameters passed to the type
|
||||||
|
* @param parameterValues the values of the parameters passed to the type. They must match
|
||||||
|
* up with the order and length of the parameterNames array.
|
||||||
|
*/
|
||||||
|
public static Type custom(Class userTypeClass, String[] parameterNames, String[] parameterValues)
|
||||||
|
throws HibernateException {
|
||||||
|
Properties parameters = new Properties();
|
||||||
|
for ( int i = 0; i < parameterNames.length; i++ ) {
|
||||||
|
parameters.setProperty( parameterNames[i], parameterValues[i] );
|
||||||
|
}
|
||||||
|
return custom( userTypeClass, parameters );
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A Hibernate parameterizable custom type.
|
||||||
|
*
|
||||||
|
* @param userTypeClass a class that implements <tt>UserType and ParameterizableType</tt>
|
||||||
|
* @param parameters the parameters as a collection of name/value pairs
|
||||||
|
*/
|
||||||
|
public static Type custom(Class userTypeClass, Properties parameters)
|
||||||
|
throws HibernateException {
|
||||||
|
if ( CompositeUserType.class.isAssignableFrom( userTypeClass ) ) {
|
||||||
|
CompositeCustomType type = new CompositeCustomType( userTypeClass, parameters );
|
||||||
|
return type;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
CustomType type = new CustomType( userTypeClass, parameters );
|
||||||
|
return type;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Force initialization of a proxy or persistent collection.
|
||||||
|
* <p/>
|
||||||
|
* Note: This only ensures intialization of a proxy object or collection;
|
||||||
|
* it is not guaranteed that the elements INSIDE the collection will be initialized/materialized.
|
||||||
|
*
|
||||||
|
* @param proxy a persistable object, proxy, persistent collection or <tt>null</tt>
|
||||||
|
* @throws HibernateException if we can't initialize the proxy at this time, eg. the <tt>Session</tt> was closed
|
||||||
|
*/
|
||||||
|
public static void initialize(Object proxy) throws HibernateException {
|
||||||
|
if ( proxy == null ) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
else if ( proxy instanceof HibernateProxy ) {
|
||||||
|
( ( HibernateProxy ) proxy ).getHibernateLazyInitializer().initialize();
|
||||||
|
}
|
||||||
|
else if ( proxy instanceof PersistentCollection ) {
|
||||||
|
( ( PersistentCollection ) proxy ).forceInitialization();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if the proxy or persistent collection is initialized.
|
||||||
|
*
|
||||||
|
* @param proxy a persistable object, proxy, persistent collection or <tt>null</tt>
|
||||||
|
* @return true if the argument is already initialized, or is not a proxy or collection
|
||||||
|
*/
|
||||||
|
public static boolean isInitialized(Object proxy) {
|
||||||
|
if ( proxy instanceof HibernateProxy ) {
|
||||||
|
return !( ( HibernateProxy ) proxy ).getHibernateLazyInitializer().isUninitialized();
|
||||||
|
}
|
||||||
|
else if ( proxy instanceof PersistentCollection ) {
|
||||||
|
return ( ( PersistentCollection ) proxy ).wasInitialized();
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the true, underlying class of a proxied persistent class. This operation
|
||||||
|
* will initialize a proxy by side-effect.
|
||||||
|
*
|
||||||
|
* @param proxy a persistable object or proxy
|
||||||
|
* @return the true class of the instance
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public static Class getClass(Object proxy) {
|
||||||
|
if ( proxy instanceof HibernateProxy ) {
|
||||||
|
return ( ( HibernateProxy ) proxy ).getHibernateLazyInitializer()
|
||||||
|
.getImplementation()
|
||||||
|
.getClass();
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return proxy.getClass();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new <tt>Blob</tt>. The returned object will be initially immutable.
|
||||||
|
*
|
||||||
|
* @param bytes a byte array
|
||||||
|
* @return the Blob
|
||||||
|
*/
|
||||||
|
public static Blob createBlob(byte[] bytes) {
|
||||||
|
return new SerializableBlob( new BlobImpl( bytes ) );
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new <tt>Blob</tt>. The returned object will be initially immutable.
|
||||||
|
*
|
||||||
|
* @param stream a binary stream
|
||||||
|
* @param length the number of bytes in the stream
|
||||||
|
* @return the Blob
|
||||||
|
*/
|
||||||
|
public static Blob createBlob(InputStream stream, int length) {
|
||||||
|
return new SerializableBlob( new BlobImpl( stream, length ) );
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new <tt>Blob</tt>. The returned object will be initially immutable.
|
||||||
|
*
|
||||||
|
* @param stream a binary stream
|
||||||
|
* @return the Blob
|
||||||
|
* @throws IOException
|
||||||
|
*/
|
||||||
|
public static Blob createBlob(InputStream stream) throws IOException {
|
||||||
|
return new SerializableBlob( new BlobImpl( stream, stream.available() ) );
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new <tt>Clob</tt>. The returned object will be initially immutable.
|
||||||
|
*
|
||||||
|
* @param string a <tt>String</tt>
|
||||||
|
*/
|
||||||
|
public static Clob createClob(String string) {
|
||||||
|
return new SerializableClob( new ClobImpl( string ) );
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new <tt>Clob</tt>. The returned object will be initially immutable.
|
||||||
|
*
|
||||||
|
* @param reader a character stream
|
||||||
|
* @param length the number of characters in the stream
|
||||||
|
*/
|
||||||
|
public static Clob createClob(Reader reader, int length) {
|
||||||
|
return new SerializableClob( new ClobImpl( reader, length ) );
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Close an <tt>Iterator</tt> created by <tt>iterate()</tt> immediately,
|
||||||
|
* instead of waiting until the session is closed or disconnected.
|
||||||
|
*
|
||||||
|
* @param iterator an <tt>Iterator</tt> created by <tt>iterate()</tt>
|
||||||
|
* @throws HibernateException
|
||||||
|
* @see org.hibernate.Query#iterate
|
||||||
|
* @see Query#iterate()
|
||||||
|
*/
|
||||||
|
public static void close(Iterator iterator) throws HibernateException {
|
||||||
|
if ( iterator instanceof HibernateIterator ) {
|
||||||
|
( ( HibernateIterator ) iterator ).close();
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
throw new IllegalArgumentException( "not a Hibernate iterator" );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if the property is initialized. If the named property does not exist
|
||||||
|
* or is not persistent, this method always returns <tt>true</tt>.
|
||||||
|
*
|
||||||
|
* @param proxy The potential proxy
|
||||||
|
* @param propertyName the name of a persistent attribute of the object
|
||||||
|
* @return true if the named property of the object is not listed as uninitialized
|
||||||
|
* @return false if the object is an uninitialized proxy, or the named property is uninitialized
|
||||||
|
*/
|
||||||
|
public static boolean isPropertyInitialized(Object proxy, String propertyName) {
|
||||||
|
|
||||||
|
Object entity;
|
||||||
|
if ( proxy instanceof HibernateProxy ) {
|
||||||
|
LazyInitializer li = ( ( HibernateProxy ) proxy ).getHibernateLazyInitializer();
|
||||||
|
if ( li.isUninitialized() ) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
entity = li.getImplementation();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
entity = proxy;
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( FieldInterceptionHelper.isInstrumented( entity ) ) {
|
||||||
|
FieldInterceptor interceptor = FieldInterceptionHelper.extractFieldInterceptor( entity );
|
||||||
|
return interceptor == null || interceptor.isInitialized( propertyName );
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,34 @@
|
||||||
|
//$Id: HibernateException.java 5683 2005-02-12 03:09:22Z oneovthafew $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
import org.hibernate.exception.NestableRuntimeException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Any exception that occurs inside the persistence layer
|
||||||
|
* or JDBC driver. <tt>SQLException</tt>s are always wrapped
|
||||||
|
* by instances of <tt>JDBCException</tt>.
|
||||||
|
*
|
||||||
|
* @see JDBCException
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
|
||||||
|
public class HibernateException extends NestableRuntimeException {
|
||||||
|
|
||||||
|
public HibernateException(Throwable root) {
|
||||||
|
super(root);
|
||||||
|
}
|
||||||
|
|
||||||
|
public HibernateException(String string, Throwable root) {
|
||||||
|
super(string, root);
|
||||||
|
}
|
||||||
|
|
||||||
|
public HibernateException(String s) {
|
||||||
|
super(s);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,44 @@
|
||||||
|
//$Id: InstantiationException.java 6781 2005-05-14 17:27:57Z oneovthafew $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Thrown if Hibernate can't instantiate an entity or component
|
||||||
|
* class at runtime.
|
||||||
|
*
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
|
||||||
|
public class InstantiationException extends HibernateException {
|
||||||
|
|
||||||
|
private final Class clazz;
|
||||||
|
|
||||||
|
public InstantiationException(String s, Class clazz, Throwable root) {
|
||||||
|
super(s, root);
|
||||||
|
this.clazz = clazz;
|
||||||
|
}
|
||||||
|
|
||||||
|
public InstantiationException(String s, Class clazz) {
|
||||||
|
super(s);
|
||||||
|
this.clazz = clazz;
|
||||||
|
}
|
||||||
|
|
||||||
|
public InstantiationException(String s, Class clazz, Exception e) {
|
||||||
|
super(s, e);
|
||||||
|
this.clazz = clazz;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Class getPersistentClass() {
|
||||||
|
return clazz;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getMessage() {
|
||||||
|
return super.getMessage() + clazz.getName();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,155 @@
|
||||||
|
//$Id: Interceptor.java 7883 2005-08-12 20:03:07Z oneovthafew $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.util.Iterator;
|
||||||
|
|
||||||
|
import org.hibernate.type.Type;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Allows user code to inspect and/or change property values.
|
||||||
|
* <br><br>
|
||||||
|
* Inspection occurs before property values are written and after they are read
|
||||||
|
* from the database.<br>
|
||||||
|
* <br>
|
||||||
|
* There might be a single instance of <tt>Interceptor</tt> for a <tt>SessionFactory</tt>, or a new instance
|
||||||
|
* might be specified for each <tt>Session</tt>. Whichever approach is used, the interceptor must be
|
||||||
|
* serializable if the <tt>Session</tt> is to be serializable. This means that <tt>SessionFactory</tt>-scoped
|
||||||
|
* interceptors should implement <tt>readResolve()</tt>.<br>
|
||||||
|
* <br>
|
||||||
|
* The <tt>Session</tt> may not be invoked from a callback (nor may a callback cause a collection or proxy to
|
||||||
|
* be lazily initialized).<br>
|
||||||
|
* <br>
|
||||||
|
* Instead of implementing this interface directly, it is usually better to extend <tt>EmptyInterceptor</tt>
|
||||||
|
* and override only the callback methods of interest.
|
||||||
|
*
|
||||||
|
* @see SessionFactory#openSession(Interceptor)
|
||||||
|
* @see org.hibernate.cfg.Configuration#setInterceptor(Interceptor)
|
||||||
|
* @see EmptyInterceptor
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
public interface Interceptor {
|
||||||
|
/**
|
||||||
|
* Called just before an object is initialized. The interceptor may change the <tt>state</tt>, which will
|
||||||
|
* be propagated to the persistent object. Note that when this method is called, <tt>entity</tt> will be
|
||||||
|
* an empty uninitialized instance of the class.
|
||||||
|
*
|
||||||
|
* @return <tt>true</tt> if the user modified the <tt>state</tt> in any way.
|
||||||
|
*/
|
||||||
|
public boolean onLoad(Object entity, Serializable id, Object[] state, String[] propertyNames, Type[] types) throws CallbackException;
|
||||||
|
/**
|
||||||
|
* Called when an object is detected to be dirty, during a flush. The interceptor may modify the detected
|
||||||
|
* <tt>currentState</tt>, which will be propagated to both the database and the persistent object.
|
||||||
|
* Note that not all flushes end in actual synchronization with the database, in which case the
|
||||||
|
* new <tt>currentState</tt> will be propagated to the object, but not necessarily (immediately) to
|
||||||
|
* the database. It is strongly recommended that the interceptor <b>not</b> modify the <tt>previousState</tt>.
|
||||||
|
*
|
||||||
|
* @return <tt>true</tt> if the user modified the <tt>currentState</tt> in any way.
|
||||||
|
*/
|
||||||
|
public boolean onFlushDirty(Object entity, Serializable id, Object[] currentState, Object[] previousState, String[] propertyNames, Type[] types) throws CallbackException;
|
||||||
|
/**
|
||||||
|
* Called before an object is saved. The interceptor may modify the <tt>state</tt>, which will be used for
|
||||||
|
* the SQL <tt>INSERT</tt> and propagated to the persistent object.
|
||||||
|
*
|
||||||
|
* @return <tt>true</tt> if the user modified the <tt>state</tt> in any way.
|
||||||
|
*/
|
||||||
|
public boolean onSave(Object entity, Serializable id, Object[] state, String[] propertyNames, Type[] types) throws CallbackException;
|
||||||
|
/**
|
||||||
|
* Called before an object is deleted. It is not recommended that the interceptor modify the <tt>state</tt>.
|
||||||
|
*/
|
||||||
|
public void onDelete(Object entity, Serializable id, Object[] state, String[] propertyNames, Type[] types) throws CallbackException;
|
||||||
|
/**
|
||||||
|
* Called before a collection is (re)created.
|
||||||
|
*/
|
||||||
|
public void onCollectionRecreate(Object collection, Serializable key) throws CallbackException;
|
||||||
|
/**
|
||||||
|
* Called before a collection is deleted.
|
||||||
|
*/
|
||||||
|
public void onCollectionRemove(Object collection, Serializable key) throws CallbackException;
|
||||||
|
/**
|
||||||
|
* Called before a collection is updated.
|
||||||
|
*/
|
||||||
|
public void onCollectionUpdate(Object collection, Serializable key) throws CallbackException;
|
||||||
|
/**
|
||||||
|
* Called before a flush
|
||||||
|
*/
|
||||||
|
public void preFlush(Iterator entities) throws CallbackException;
|
||||||
|
/**
|
||||||
|
* Called after a flush that actually ends in execution of the SQL statements required to synchronize
|
||||||
|
* in-memory state with the database.
|
||||||
|
*/
|
||||||
|
public void postFlush(Iterator entities) throws CallbackException;
|
||||||
|
/**
|
||||||
|
* Called to distinguish between transient and detached entities. The return value determines the
|
||||||
|
* state of the entity with respect to the current session.
|
||||||
|
* <ul>
|
||||||
|
* <li><tt>Boolean.TRUE</tt> - the entity is transient
|
||||||
|
* <li><tt>Boolean.FALSE</tt> - the entity is detached
|
||||||
|
* <li><tt>null</tt> - Hibernate uses the <tt>unsaved-value</tt> mapping and other heuristics to
|
||||||
|
* determine if the object is unsaved
|
||||||
|
* </ul>
|
||||||
|
* @param entity a transient or detached entity
|
||||||
|
* @return Boolean or <tt>null</tt> to choose default behaviour
|
||||||
|
*/
|
||||||
|
public Boolean isTransient(Object entity);
|
||||||
|
/**
|
||||||
|
* Called from <tt>flush()</tt>. The return value determines whether the entity is updated
|
||||||
|
* <ul>
|
||||||
|
* <li>an array of property indices - the entity is dirty
|
||||||
|
* <li>an empty array - the entity is not dirty
|
||||||
|
* <li><tt>null</tt> - use Hibernate's default dirty-checking algorithm
|
||||||
|
* </ul>
|
||||||
|
* @param entity a persistent entity
|
||||||
|
* @return array of dirty property indices or <tt>null</tt> to choose default behaviour
|
||||||
|
*/
|
||||||
|
public int[] findDirty(Object entity, Serializable id, Object[] currentState, Object[] previousState, String[] propertyNames, Type[] types);
|
||||||
|
/**
|
||||||
|
* Instantiate the entity class. Return <tt>null</tt> to indicate that Hibernate should use
|
||||||
|
* the default constructor of the class. The identifier property of the returned instance
|
||||||
|
* should be initialized with the given identifier.
|
||||||
|
*
|
||||||
|
* @param entityName the name of the entity
|
||||||
|
* @param entityMode The type of entity instance to be returned.
|
||||||
|
* @param id the identifier of the new instance
|
||||||
|
* @return an instance of the class, or <tt>null</tt> to choose default behaviour
|
||||||
|
*/
|
||||||
|
public Object instantiate(String entityName, EntityMode entityMode, Serializable id) throws CallbackException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the entity name for a persistent or transient instance
|
||||||
|
* @param object an entity instance
|
||||||
|
* @return the name of the entity
|
||||||
|
*/
|
||||||
|
public String getEntityName(Object object) throws CallbackException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a fully loaded entity instance that is cached externally
|
||||||
|
* @param entityName the name of the entity
|
||||||
|
* @param id the instance identifier
|
||||||
|
* @return a fully initialized entity
|
||||||
|
* @throws CallbackException
|
||||||
|
*/
|
||||||
|
public Object getEntity(String entityName, Serializable id) throws CallbackException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Called when a Hibernate transaction is begun via the Hibernate <tt>Transaction</tt>
|
||||||
|
* API. Will not be called if transactions are being controlled via some other
|
||||||
|
* mechanism (CMT, for example).
|
||||||
|
*/
|
||||||
|
public void afterTransactionBegin(Transaction tx);
|
||||||
|
/**
|
||||||
|
* Called before a transaction is committed (but not before rollback).
|
||||||
|
*/
|
||||||
|
public void beforeTransactionCompletion(Transaction tx);
|
||||||
|
/**
|
||||||
|
* Called after a transaction is committed or rolled back.
|
||||||
|
*/
|
||||||
|
public void afterTransactionCompletion(Transaction tx);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Called when sql string is being prepared.
|
||||||
|
* @param sql sql to be prepared
|
||||||
|
* @return original or modified sql
|
||||||
|
*/
|
||||||
|
public String onPrepareStatement(String sql);
|
||||||
|
}
|
|
@ -0,0 +1,42 @@
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Thrown when a mapping is found to be invalid.
|
||||||
|
* Similar to MappingException, but this contains more info about the path and type of mapping (e.g. file, resource or url)
|
||||||
|
*
|
||||||
|
* @author Max Rydahl Andersen
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public class InvalidMappingException extends MappingException {
|
||||||
|
|
||||||
|
private final String path;
|
||||||
|
private final String type;
|
||||||
|
|
||||||
|
public InvalidMappingException(String customMessage, String type, String path, Throwable cause) {
|
||||||
|
super(customMessage, cause);
|
||||||
|
this.type=type;
|
||||||
|
this.path=path;
|
||||||
|
}
|
||||||
|
|
||||||
|
public InvalidMappingException(String customMessage, String type, String path) {
|
||||||
|
super(customMessage);
|
||||||
|
this.type=type;
|
||||||
|
this.path=path;
|
||||||
|
}
|
||||||
|
|
||||||
|
public InvalidMappingException(String type, String path) {
|
||||||
|
this("Could not parse mapping document from " + type + (path==null?"":" " + path), type, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
public InvalidMappingException(String type, String path, Throwable cause) {
|
||||||
|
this("Could not parse mapping document from " + type + (path==null?"":" " + path), type, path, cause);
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getType() {
|
||||||
|
return type;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getPath() {
|
||||||
|
return path;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,63 @@
|
||||||
|
//$Id: JDBCException.java 4626 2004-09-27 15:24:38Z oneovthafew $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
import java.sql.SQLException;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Wraps an <tt>SQLException</tt>. Indicates that an exception
|
||||||
|
* occurred during a JDBC call.
|
||||||
|
*
|
||||||
|
* @see java.sql.SQLException
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
public class JDBCException extends HibernateException {
|
||||||
|
|
||||||
|
private SQLException sqle;
|
||||||
|
private String sql;
|
||||||
|
|
||||||
|
public JDBCException(String string, SQLException root) {
|
||||||
|
super(string, root);
|
||||||
|
sqle=root;
|
||||||
|
}
|
||||||
|
|
||||||
|
public JDBCException(String string, SQLException root, String sql) {
|
||||||
|
this(string, root);
|
||||||
|
this.sql = sql;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the SQLState of the underlying <tt>SQLException</tt>.
|
||||||
|
* @see java.sql.SQLException
|
||||||
|
* @return String
|
||||||
|
*/
|
||||||
|
public String getSQLState() {
|
||||||
|
return sqle.getSQLState();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the <tt>errorCode</tt> of the underlying <tt>SQLException</tt>.
|
||||||
|
* @see java.sql.SQLException
|
||||||
|
* @return int the error code
|
||||||
|
*/
|
||||||
|
public int getErrorCode() {
|
||||||
|
return sqle.getErrorCode();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the underlying <tt>SQLException</tt>.
|
||||||
|
* @return SQLException
|
||||||
|
*/
|
||||||
|
public SQLException getSQLException() {
|
||||||
|
return sqle;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the actual SQL statement that caused the exception
|
||||||
|
* (may be null)
|
||||||
|
*/
|
||||||
|
public String getSQL() {
|
||||||
|
return sql;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,28 @@
|
||||||
|
//$Id: LazyInitializationException.java 4458 2004-08-29 09:59:17Z oneovthafew $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Indicates access to unfetched data outside of a session context.
|
||||||
|
* For example, when an uninitialized proxy or collection is accessed
|
||||||
|
* after the session was closed.
|
||||||
|
*
|
||||||
|
* @see Hibernate#initialize(java.lang.Object)
|
||||||
|
* @see Hibernate#isInitialized(java.lang.Object)
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
public class LazyInitializationException extends HibernateException {
|
||||||
|
|
||||||
|
public LazyInitializationException(String msg) {
|
||||||
|
super(msg);
|
||||||
|
LogFactory.getLog(LazyInitializationException.class).error(msg, this);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,106 @@
|
||||||
|
//$Id: LockMode.java 9581 2006-03-09 15:50:15Z epbernard $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Instances represent a lock mode for a row of a relational
|
||||||
|
* database table. It is not intended that users spend much
|
||||||
|
* time worrying about locking since Hibernate usually
|
||||||
|
* obtains exactly the right lock level automatically.
|
||||||
|
* Some "advanced" users may wish to explicitly specify lock
|
||||||
|
* levels.
|
||||||
|
*
|
||||||
|
* @see Session#lock(Object,LockMode)
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
public final class LockMode implements Serializable {
|
||||||
|
private final int level;
|
||||||
|
private final String name;
|
||||||
|
private static final Map INSTANCES = new HashMap();
|
||||||
|
|
||||||
|
private LockMode(int level, String name) {
|
||||||
|
this.level=level;
|
||||||
|
this.name=name;
|
||||||
|
}
|
||||||
|
public String toString() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Check if this lock mode is more restrictive than the given lock mode.
|
||||||
|
*
|
||||||
|
* @param mode LockMode to check
|
||||||
|
* @return true if this lock mode is more restrictive than given lock mode
|
||||||
|
*/
|
||||||
|
public boolean greaterThan(LockMode mode) {
|
||||||
|
return level > mode.level;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Check if this lock mode is less restrictive than the given lock mode.
|
||||||
|
*
|
||||||
|
* @param mode LockMode to check
|
||||||
|
* @return true if this lock mode is less restrictive than given lock mode
|
||||||
|
*/
|
||||||
|
public boolean lessThan(LockMode mode) {
|
||||||
|
return level < mode.level;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* No lock required. If an object is requested with this lock
|
||||||
|
* mode, a <tt>READ</tt> lock will be obtained if it is
|
||||||
|
* necessary to actually read the state from the database,
|
||||||
|
* rather than pull it from a cache.<br>
|
||||||
|
* <br>
|
||||||
|
* This is the "default" lock mode.
|
||||||
|
*/
|
||||||
|
public static final LockMode NONE = new LockMode(0, "NONE");
|
||||||
|
/**
|
||||||
|
* A shared lock. Objects in this lock mode were read from
|
||||||
|
* the database in the current transaction, rather than being
|
||||||
|
* pulled from a cache.
|
||||||
|
*/
|
||||||
|
public static final LockMode READ = new LockMode(5, "READ");
|
||||||
|
/**
|
||||||
|
* An upgrade lock. Objects loaded in this lock mode are
|
||||||
|
* materialized using an SQL <tt>select ... for update</tt>.
|
||||||
|
*/
|
||||||
|
public static final LockMode UPGRADE = new LockMode(10, "UPGRADE");
|
||||||
|
/**
|
||||||
|
* Attempt to obtain an upgrade lock, using an Oracle-style
|
||||||
|
* <tt>select for update nowait</tt>. The semantics of
|
||||||
|
* this lock mode, once obtained, are the same as
|
||||||
|
* <tt>UPGRADE</tt>.
|
||||||
|
*/
|
||||||
|
public static final LockMode UPGRADE_NOWAIT = new LockMode(10, "UPGRADE_NOWAIT");
|
||||||
|
/**
|
||||||
|
* A <tt>WRITE</tt> lock is obtained when an object is updated
|
||||||
|
* or inserted. This lock mode is for internal use only and is
|
||||||
|
* not a valid mode for <tt>load()</tt> or <tt>lock()</tt> (both
|
||||||
|
* of which throw exceptions if WRITE is specified).
|
||||||
|
*/
|
||||||
|
public static final LockMode WRITE = new LockMode(10, "WRITE");
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Similiar to {@link #UPGRADE} except that, for versioned entities,
|
||||||
|
* it results in a forced version increment.
|
||||||
|
*/
|
||||||
|
public static final LockMode FORCE = new LockMode( 15, "FORCE" );
|
||||||
|
|
||||||
|
static {
|
||||||
|
INSTANCES.put( NONE.name, NONE );
|
||||||
|
INSTANCES.put( READ.name, READ );
|
||||||
|
INSTANCES.put( UPGRADE.name, UPGRADE );
|
||||||
|
INSTANCES.put( UPGRADE_NOWAIT.name, UPGRADE_NOWAIT );
|
||||||
|
INSTANCES.put( WRITE.name, WRITE );
|
||||||
|
INSTANCES.put( FORCE.name, FORCE );
|
||||||
|
}
|
||||||
|
|
||||||
|
private Object readResolve() {
|
||||||
|
return parse( name );
|
||||||
|
}
|
||||||
|
|
||||||
|
public static LockMode parse(String name) {
|
||||||
|
return ( LockMode ) INSTANCES.get(name);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,31 @@
|
||||||
|
//$Id: MappingException.java 3890 2004-06-03 16:31:32Z steveebersole $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An exception that usually occurs at configuration time, rather
|
||||||
|
* than runtime, as a result of something screwy in the O-R mappings.
|
||||||
|
*
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
|
||||||
|
public class MappingException extends HibernateException {
|
||||||
|
|
||||||
|
public MappingException(String msg, Throwable root) {
|
||||||
|
super( msg, root );
|
||||||
|
}
|
||||||
|
|
||||||
|
public MappingException(Throwable root) {
|
||||||
|
super(root);
|
||||||
|
}
|
||||||
|
|
||||||
|
public MappingException(String s) {
|
||||||
|
super(s);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,41 @@
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Thrown when a resource for a mapping could not be found.
|
||||||
|
*
|
||||||
|
* @author Max Rydahl Andersen
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public class MappingNotFoundException extends MappingException {
|
||||||
|
|
||||||
|
private final String path;
|
||||||
|
private final String type;
|
||||||
|
|
||||||
|
public MappingNotFoundException(String customMessage, String type, String path, Throwable cause) {
|
||||||
|
super(customMessage, cause);
|
||||||
|
this.type=type;
|
||||||
|
this.path=path;
|
||||||
|
}
|
||||||
|
|
||||||
|
public MappingNotFoundException(String customMessage, String type, String path) {
|
||||||
|
super(customMessage);
|
||||||
|
this.type=type;
|
||||||
|
this.path=path;
|
||||||
|
}
|
||||||
|
|
||||||
|
public MappingNotFoundException(String type, String path) {
|
||||||
|
this(type + ": " + path + " not found", type, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
public MappingNotFoundException(String type, String path, Throwable cause) {
|
||||||
|
this(type + ": " + path + " not found", type, path, cause);
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getType() {
|
||||||
|
return type;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getPath() {
|
||||||
|
return path;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,44 @@
|
||||||
|
//$Id: NonUniqueObjectException.java 5685 2005-02-12 07:19:50Z steveebersole $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
import org.hibernate.pretty.MessageHelper;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This exception is thrown when an operation would
|
||||||
|
* break session-scoped identity. This occurs if the
|
||||||
|
* user tries to associate two different instances of
|
||||||
|
* the same Java class with a particular identifier,
|
||||||
|
* in the scope of a single <tt>Session</tt>.
|
||||||
|
*
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
public class NonUniqueObjectException extends HibernateException {
|
||||||
|
private final Serializable identifier;
|
||||||
|
private final String entityName;
|
||||||
|
|
||||||
|
public NonUniqueObjectException(String message, Serializable id, String clazz) {
|
||||||
|
super(message);
|
||||||
|
this.entityName = clazz;
|
||||||
|
this.identifier = id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public NonUniqueObjectException(Serializable id, String clazz) {
|
||||||
|
this("a different object with the same identifier value was already associated with the session", id, clazz);
|
||||||
|
}
|
||||||
|
|
||||||
|
public Serializable getIdentifier() {
|
||||||
|
return identifier;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getMessage() {
|
||||||
|
return super.getMessage() + ": " +
|
||||||
|
MessageHelper.infoString(entityName, identifier);
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getEntityName() {
|
||||||
|
return entityName;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,17 @@
|
||||||
|
//$Id: NonUniqueResultException.java 3890 2004-06-03 16:31:32Z steveebersole $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Thrown when the application calls <tt>Query.uniqueResult()</tt> and
|
||||||
|
* the query returned more than one result. Unlike all other Hibernate
|
||||||
|
* exceptions, this one is recoverable!
|
||||||
|
*
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
public class NonUniqueResultException extends HibernateException {
|
||||||
|
|
||||||
|
public NonUniqueResultException(int resultCount) {
|
||||||
|
super( "query did not return a unique result: " + resultCount );
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,25 @@
|
||||||
|
//$Id: ObjectDeletedException.java 3890 2004-06-03 16:31:32Z steveebersole $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Thrown when the user tries to do something illegal with a deleted
|
||||||
|
* object.
|
||||||
|
*
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
public class ObjectDeletedException extends UnresolvableObjectException {
|
||||||
|
|
||||||
|
public ObjectDeletedException(String message, Serializable identifier, String clazz) {
|
||||||
|
super(message, identifier, clazz);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,24 @@
|
||||||
|
//$Id: ObjectNotFoundException.java 9855 2006-05-02 18:55:45Z steve.ebersole@jboss.com $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Thrown when <tt>Session.load()</tt> fails to select a row with
|
||||||
|
* the given primary key (identifier value). This exception might not
|
||||||
|
* be thrown when <tt>load()</tt> is called, even if there was no
|
||||||
|
* row on the database, because <tt>load()</tt> returns a proxy if
|
||||||
|
* possible. Applications should use <tt>Session.get()</tt> to test if
|
||||||
|
* a row exists in the database.<br>
|
||||||
|
* <br>
|
||||||
|
* Like all Hibernate exceptions, this exception is considered
|
||||||
|
* unrecoverable.
|
||||||
|
*
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
public class ObjectNotFoundException extends UnresolvableObjectException {
|
||||||
|
|
||||||
|
public ObjectNotFoundException(Serializable identifier, String clazz) {
|
||||||
|
super(identifier, clazz);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,21 @@
|
||||||
|
//$Id: PersistentObjectException.java 6877 2005-05-23 15:00:25Z oneovthafew $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Thrown when the user passes a persistent instance to a <tt>Session</tt>
|
||||||
|
* method that expects a transient instance.
|
||||||
|
*
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
public class PersistentObjectException extends HibernateException {
|
||||||
|
|
||||||
|
public PersistentObjectException(String s) {
|
||||||
|
super(s);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,50 @@
|
||||||
|
//$Id: PropertyAccessException.java 3890 2004-06-03 16:31:32Z steveebersole $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
import org.hibernate.util.StringHelper;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A problem occurred accessing a property of an instance of a
|
||||||
|
* persistent class by reflection, or via CGLIB. There are a
|
||||||
|
* number of possible underlying causes, including
|
||||||
|
* <ul>
|
||||||
|
* <li>failure of a security check
|
||||||
|
* <li>an exception occurring inside the getter or setter method
|
||||||
|
* <li>a nullable database column was mapped to a primitive-type property
|
||||||
|
* <li>the Hibernate type was not castable to the property type (or vice-versa)
|
||||||
|
* </ul>
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
public class PropertyAccessException extends HibernateException {
|
||||||
|
|
||||||
|
private final Class persistentClass;
|
||||||
|
private final String propertyName;
|
||||||
|
private final boolean wasSetter;
|
||||||
|
|
||||||
|
public PropertyAccessException(Throwable root, String s, boolean wasSetter, Class persistentClass, String propertyName) {
|
||||||
|
super(s, root);
|
||||||
|
this.persistentClass = persistentClass;
|
||||||
|
this.wasSetter = wasSetter;
|
||||||
|
this.propertyName = propertyName;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Class getPersistentClass() {
|
||||||
|
return persistentClass;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getPropertyName() {
|
||||||
|
return propertyName;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getMessage() {
|
||||||
|
return super.getMessage() +
|
||||||
|
( wasSetter ? " setter of " : " getter of ") +
|
||||||
|
StringHelper.qualify( persistentClass.getName(), propertyName );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,22 @@
|
||||||
|
//$Id: PropertyNotFoundException.java 3890 2004-06-03 16:31:32Z steveebersole $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Indicates that an expected getter or setter method could not be
|
||||||
|
* found on a class.
|
||||||
|
*
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
public class PropertyNotFoundException extends MappingException {
|
||||||
|
|
||||||
|
public PropertyNotFoundException(String s) {
|
||||||
|
super(s);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,56 @@
|
||||||
|
//$Id: PropertyValueException.java 5685 2005-02-12 07:19:50Z steveebersole $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
import org.hibernate.util.StringHelper;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Thrown when the (illegal) value of a property can not be persisted.
|
||||||
|
* There are two main causes:
|
||||||
|
* <ul>
|
||||||
|
* <li>a property declared <tt>not-null="true"</tt> is null
|
||||||
|
* <li>an association references an unsaved transient instance
|
||||||
|
* </ul>
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
public class PropertyValueException extends HibernateException {
|
||||||
|
|
||||||
|
private final String entityName;
|
||||||
|
private final String propertyName;
|
||||||
|
|
||||||
|
public PropertyValueException(String s, String entityName, String propertyName) {
|
||||||
|
super(s);
|
||||||
|
this.entityName = entityName;
|
||||||
|
this.propertyName = propertyName;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getEntityName() {
|
||||||
|
return entityName;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getPropertyName() {
|
||||||
|
return propertyName;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getMessage() {
|
||||||
|
return super.getMessage() + ": " +
|
||||||
|
StringHelper.qualify(entityName, propertyName);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return a well formed property path.
|
||||||
|
* Basicaly, it will return parent.child
|
||||||
|
*
|
||||||
|
* @param parent parent in path
|
||||||
|
* @param child child in path
|
||||||
|
* @return parent-child path
|
||||||
|
*/
|
||||||
|
public static String buildPropertyPath(String parent, String child) {
|
||||||
|
return new StringBuffer(parent).append('.').append(child).toString();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,386 @@
|
||||||
|
//$Id: Query.java 10591 2006-10-17 08:57:26Z max.andersen@jboss.com $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.math.BigDecimal;
|
||||||
|
import java.math.BigInteger;
|
||||||
|
import java.util.Calendar;
|
||||||
|
import java.util.Collection;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.Iterator;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Locale;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import org.hibernate.transform.ResultTransformer;
|
||||||
|
import org.hibernate.type.Type;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An object-oriented representation of a Hibernate query. A <tt>Query</tt>
|
||||||
|
* instance is obtained by calling <tt>Session.createQuery()</tt>. This
|
||||||
|
* interface exposes some extra functionality beyond that provided by
|
||||||
|
* <tt>Session.iterate()</tt> and <tt>Session.find()</tt>:
|
||||||
|
* <ul>
|
||||||
|
* <li>a particular page of the result set may be selected by calling <tt>
|
||||||
|
* setMaxResults(), setFirstResult()</tt>
|
||||||
|
* <li>named query parameters may be used
|
||||||
|
* <li>the results may be returned as an instance of <tt>ScrollableResults</tt>
|
||||||
|
* </ul>
|
||||||
|
* <br>
|
||||||
|
* Named query parameters are tokens of the form <tt>:name</tt> in the
|
||||||
|
* query string. A value is bound to the <tt>integer</tt> parameter
|
||||||
|
* <tt>:foo</tt> by calling<br>
|
||||||
|
* <br>
|
||||||
|
* <tt>setParameter("foo", foo, Hibernate.INTEGER);</tt><br>
|
||||||
|
* <br>
|
||||||
|
* for example. A name may appear multiple times in the query string.<br>
|
||||||
|
* <br>
|
||||||
|
* JDBC-style <tt>?</tt> parameters are also supported. To bind a
|
||||||
|
* value to a JDBC-style parameter use a set method that accepts an
|
||||||
|
* <tt>int</tt> positional argument (numbered from zero, contrary
|
||||||
|
* to JDBC).<br>
|
||||||
|
* <br>
|
||||||
|
* You may not mix and match JDBC-style parameters and named parameters
|
||||||
|
* in the same query.<br>
|
||||||
|
* <br>
|
||||||
|
* Queries are executed by calling <tt>list()</tt>, <tt>scroll()</tt> or
|
||||||
|
* <tt>iterate()</tt>. A query may be re-executed by subsequent invocations.
|
||||||
|
* Its lifespan is, however, bounded by the lifespan of the <tt>Session</tt>
|
||||||
|
* that created it.<br>
|
||||||
|
* <br>
|
||||||
|
* Implementors are not intended to be threadsafe.
|
||||||
|
*
|
||||||
|
* @see org.hibernate.Session#createQuery(java.lang.String)
|
||||||
|
* @see org.hibernate.ScrollableResults
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
public interface Query {
|
||||||
|
/**
|
||||||
|
* Get the query string.
|
||||||
|
*
|
||||||
|
* @return the query string
|
||||||
|
*/
|
||||||
|
public String getQueryString();
|
||||||
|
/**
|
||||||
|
* Return the Hibernate types of the query result set.
|
||||||
|
* @return an array of types
|
||||||
|
*/
|
||||||
|
public Type[] getReturnTypes() throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Return the HQL select clause aliases (if any)
|
||||||
|
* @return an array of aliases as strings
|
||||||
|
*/
|
||||||
|
public String[] getReturnAliases() throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Return the names of all named parameters of the query.
|
||||||
|
* @return the parameter names, in no particular order
|
||||||
|
*/
|
||||||
|
public String[] getNamedParameters() throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Return the query results as an <tt>Iterator</tt>. If the query
|
||||||
|
* contains multiple results pre row, the results are returned in
|
||||||
|
* an instance of <tt>Object[]</tt>.<br>
|
||||||
|
* <br>
|
||||||
|
* Entities returned as results are initialized on demand. The first
|
||||||
|
* SQL query returns identifiers only.<br>
|
||||||
|
*
|
||||||
|
* @return the result iterator
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public Iterator iterate() throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Return the query results as <tt>ScrollableResults</tt>. The
|
||||||
|
* scrollability of the returned results depends upon JDBC driver
|
||||||
|
* support for scrollable <tt>ResultSet</tt>s.<br>
|
||||||
|
*
|
||||||
|
* @see ScrollableResults
|
||||||
|
* @return the result iterator
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public ScrollableResults scroll() throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Return the query results as <tt>ScrollableResults</tt>. The
|
||||||
|
* scrollability of the returned results depends upon JDBC driver
|
||||||
|
* support for scrollable <tt>ResultSet</tt>s.<br>
|
||||||
|
*
|
||||||
|
* @see ScrollableResults
|
||||||
|
* @see ScrollMode
|
||||||
|
* @return the result iterator
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public ScrollableResults scroll(ScrollMode scrollMode) throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Return the query results as a <tt>List</tt>. If the query contains
|
||||||
|
* multiple results pre row, the results are returned in an instance
|
||||||
|
* of <tt>Object[]</tt>.
|
||||||
|
*
|
||||||
|
* @return the result list
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public List list() throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Convenience method to return a single instance that matches
|
||||||
|
* the query, or null if the query returns no results.
|
||||||
|
*
|
||||||
|
* @return the single result or <tt>null</tt>
|
||||||
|
* @throws NonUniqueResultException if there is more than one matching result
|
||||||
|
*/
|
||||||
|
public Object uniqueResult() throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute the update or delete statement.
|
||||||
|
* </p>
|
||||||
|
* The semantics are compliant with the ejb3 Query.executeUpdate()
|
||||||
|
* method.
|
||||||
|
*
|
||||||
|
* @return The number of entities updated or deleted.
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public int executeUpdate() throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the maximum number of rows to retrieve. If not set,
|
||||||
|
* there is no limit to the number of rows retrieved.
|
||||||
|
* @param maxResults the maximum number of rows
|
||||||
|
*/
|
||||||
|
public Query setMaxResults(int maxResults);
|
||||||
|
/**
|
||||||
|
* Set the first row to retrieve. If not set, rows will be
|
||||||
|
* retrieved beginnning from row <tt>0</tt>.
|
||||||
|
* @param firstResult a row number, numbered from <tt>0</tt>
|
||||||
|
*/
|
||||||
|
public Query setFirstResult(int firstResult);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Entities retrieved by this query will be loaded in
|
||||||
|
* a read-only mode where Hibernate will never dirty-check
|
||||||
|
* them or make changes persistent.
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public Query setReadOnly(boolean readOnly);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enable caching of this query result set.
|
||||||
|
* @param cacheable Should the query results be cacheable?
|
||||||
|
*/
|
||||||
|
public Query setCacheable(boolean cacheable);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the name of the cache region.
|
||||||
|
* @param cacheRegion the name of a query cache region, or <tt>null</tt>
|
||||||
|
* for the default query cache
|
||||||
|
*/
|
||||||
|
public Query setCacheRegion(String cacheRegion);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set a timeout for the underlying JDBC query.
|
||||||
|
* @param timeout the timeout in seconds
|
||||||
|
*/
|
||||||
|
public Query setTimeout(int timeout);
|
||||||
|
/**
|
||||||
|
* Set a fetch size for the underlying JDBC query.
|
||||||
|
* @param fetchSize the fetch size
|
||||||
|
*/
|
||||||
|
public Query setFetchSize(int fetchSize);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the lockmode for the objects idententified by the
|
||||||
|
* given alias that appears in the <tt>FROM</tt> clause.
|
||||||
|
* @param alias a query alias, or <tt>this</tt> for a collection filter
|
||||||
|
*/
|
||||||
|
public Query setLockMode(String alias, LockMode lockMode);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a comment to the generated SQL.
|
||||||
|
* @param comment a human-readable string
|
||||||
|
*/
|
||||||
|
public Query setComment(String comment);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Override the current session flush mode, just for
|
||||||
|
* this query.
|
||||||
|
* @see org.hibernate.FlushMode
|
||||||
|
*/
|
||||||
|
public Query setFlushMode(FlushMode flushMode);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Override the current session cache mode, just for
|
||||||
|
* this query.
|
||||||
|
* @see org.hibernate.CacheMode
|
||||||
|
*/
|
||||||
|
public Query setCacheMode(CacheMode cacheMode);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Bind a value to a JDBC-style query parameter.
|
||||||
|
* @param position the position of the parameter in the query
|
||||||
|
* string, numbered from <tt>0</tt>.
|
||||||
|
* @param val the possibly-null parameter value
|
||||||
|
* @param type the Hibernate type
|
||||||
|
*/
|
||||||
|
public Query setParameter(int position, Object val, Type type);
|
||||||
|
/**
|
||||||
|
* Bind a value to a named query parameter.
|
||||||
|
* @param name the name of the parameter
|
||||||
|
* @param val the possibly-null parameter value
|
||||||
|
* @param type the Hibernate type
|
||||||
|
*/
|
||||||
|
public Query setParameter(String name, Object val, Type type);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Bind a value to a JDBC-style query parameter. The Hibernate type of the parameter is
|
||||||
|
* first detected via the usage/position in the query and if not sufficient secondly
|
||||||
|
* guessed from the class of the given object.
|
||||||
|
* @param position the position of the parameter in the query
|
||||||
|
* string, numbered from <tt>0</tt>.
|
||||||
|
* @param val the non-null parameter value
|
||||||
|
* @throws org.hibernate.HibernateException if no type could be determined
|
||||||
|
*/
|
||||||
|
public Query setParameter(int position, Object val) throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Bind a value to a named query parameter. The Hibernate type of the parameter is
|
||||||
|
* first detected via the usage/position in the query and if not sufficient secondly
|
||||||
|
* guessed from the class of the given object.
|
||||||
|
* @param name the name of the parameter
|
||||||
|
* @param val the non-null parameter value
|
||||||
|
* @throws org.hibernate.HibernateException if no type could be determined
|
||||||
|
*/
|
||||||
|
public Query setParameter(String name, Object val) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Bind values and types to positional parameters.
|
||||||
|
*/
|
||||||
|
public Query setParameters(Object[] values, Type[] types) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Bind multiple values to a named query parameter. This is useful for binding
|
||||||
|
* a list of values to an expression such as <tt>foo.bar in (:value_list)</tt>.
|
||||||
|
* @param name the name of the parameter
|
||||||
|
* @param vals a collection of values to list
|
||||||
|
* @param type the Hibernate type of the values
|
||||||
|
*/
|
||||||
|
public Query setParameterList(String name, Collection vals, Type type) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Bind multiple values to a named query parameter. The Hibernate type of the parameter is
|
||||||
|
* first detected via the usage/position in the query and if not sufficient secondly
|
||||||
|
* guessed from the class of the first object in the collection. This is useful for binding a list of values
|
||||||
|
* to an expression such as <tt>foo.bar in (:value_list)</tt>.
|
||||||
|
* @param name the name of the parameter
|
||||||
|
* @param vals a collection of values to list
|
||||||
|
*/
|
||||||
|
public Query setParameterList(String name, Collection vals) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Bind multiple values to a named query parameter. This is useful for binding
|
||||||
|
* a list of values to an expression such as <tt>foo.bar in (:value_list)</tt>.
|
||||||
|
* @param name the name of the parameter
|
||||||
|
* @param vals a collection of values to list
|
||||||
|
* @param type the Hibernate type of the values
|
||||||
|
*/
|
||||||
|
public Query setParameterList(String name, Object[] vals, Type type) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Bind multiple values to a named query parameter. The Hibernate type of the parameter is
|
||||||
|
* first detected via the usage/position in the query and if not sufficient secondly
|
||||||
|
* guessed from the class of the first object in the array. This is useful for binding a list of values
|
||||||
|
* to an expression such as <tt>foo.bar in (:value_list)</tt>.
|
||||||
|
* @param name the name of the parameter
|
||||||
|
* @param vals a collection of values to list
|
||||||
|
*/
|
||||||
|
public Query setParameterList(String name, Object[] vals) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Bind the property values of the given bean to named parameters of the query,
|
||||||
|
* matching property names with parameter names and mapping property types to
|
||||||
|
* Hibernate types using hueristics.
|
||||||
|
* @param bean any JavaBean or POJO
|
||||||
|
*/
|
||||||
|
public Query setProperties(Object bean) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Bind the values of the given Map for each named parameters of the query,
|
||||||
|
* matching key names with parameter names and mapping value types to
|
||||||
|
* Hibernate types using hueristics.
|
||||||
|
* @param bean a java.util.Map
|
||||||
|
*/
|
||||||
|
public Query setProperties(Map bean) throws HibernateException;
|
||||||
|
|
||||||
|
public Query setString(int position, String val);
|
||||||
|
public Query setCharacter(int position, char val);
|
||||||
|
public Query setBoolean(int position, boolean val);
|
||||||
|
public Query setByte(int position, byte val);
|
||||||
|
public Query setShort(int position, short val);
|
||||||
|
public Query setInteger(int position, int val);
|
||||||
|
public Query setLong(int position, long val);
|
||||||
|
public Query setFloat(int position, float val);
|
||||||
|
public Query setDouble(int position, double val);
|
||||||
|
public Query setBinary(int position, byte[] val);
|
||||||
|
public Query setText(int position, String val);
|
||||||
|
public Query setSerializable(int position, Serializable val);
|
||||||
|
public Query setLocale(int position, Locale locale);
|
||||||
|
public Query setBigDecimal(int position, BigDecimal number);
|
||||||
|
public Query setBigInteger(int position, BigInteger number);
|
||||||
|
|
||||||
|
public Query setDate(int position, Date date);
|
||||||
|
public Query setTime(int position, Date date);
|
||||||
|
public Query setTimestamp(int position, Date date);
|
||||||
|
|
||||||
|
public Query setCalendar(int position, Calendar calendar);
|
||||||
|
public Query setCalendarDate(int position, Calendar calendar);
|
||||||
|
|
||||||
|
public Query setString(String name, String val);
|
||||||
|
public Query setCharacter(String name, char val);
|
||||||
|
public Query setBoolean(String name, boolean val);
|
||||||
|
public Query setByte(String name, byte val);
|
||||||
|
public Query setShort(String name, short val);
|
||||||
|
public Query setInteger(String name, int val);
|
||||||
|
public Query setLong(String name, long val);
|
||||||
|
public Query setFloat(String name, float val);
|
||||||
|
public Query setDouble(String name, double val);
|
||||||
|
public Query setBinary(String name, byte[] val);
|
||||||
|
public Query setText(String name, String val);
|
||||||
|
public Query setSerializable(String name, Serializable val);
|
||||||
|
public Query setLocale(String name, Locale locale);
|
||||||
|
public Query setBigDecimal(String name, BigDecimal number);
|
||||||
|
public Query setBigInteger(String name, BigInteger number);
|
||||||
|
|
||||||
|
public Query setDate(String name, Date date);
|
||||||
|
public Query setTime(String name, Date date);
|
||||||
|
public Query setTimestamp(String name, Date date);
|
||||||
|
|
||||||
|
public Query setCalendar(String name, Calendar calendar);
|
||||||
|
public Query setCalendarDate(String name, Calendar calendar);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Bind an instance of a mapped persistent class to a JDBC-style query parameter.
|
||||||
|
* @param position the position of the parameter in the query
|
||||||
|
* string, numbered from <tt>0</tt>.
|
||||||
|
* @param val a non-null instance of a persistent class
|
||||||
|
*/
|
||||||
|
public Query setEntity(int position, Object val); // use setParameter for null values
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Bind an instance of a mapped persistent class to a named query parameter.
|
||||||
|
* @param name the name of the parameter
|
||||||
|
* @param val a non-null instance of a persistent class
|
||||||
|
*/
|
||||||
|
public Query setEntity(String name, Object val); // use setParameter for null values
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set a strategy for handling the query results. This can be used to change
|
||||||
|
* "shape" of the query result.
|
||||||
|
*
|
||||||
|
* @param transformer The transformer to apply
|
||||||
|
* @return this (for method chaining)
|
||||||
|
*/
|
||||||
|
public Query setResultTransformer(ResultTransformer transformer);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,48 @@
|
||||||
|
//$Id: QueryException.java 3890 2004-06-03 16:31:32Z steveebersole $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A problem occurred translating a Hibernate query to SQL
|
||||||
|
* due to invalid query syntax, etc.
|
||||||
|
*/
|
||||||
|
public class QueryException extends HibernateException {
|
||||||
|
|
||||||
|
private String queryString;
|
||||||
|
|
||||||
|
public QueryException(String message) {
|
||||||
|
super(message);
|
||||||
|
}
|
||||||
|
public QueryException(String message, Throwable e) {
|
||||||
|
super(message, e);
|
||||||
|
}
|
||||||
|
|
||||||
|
public QueryException(String message, String queryString) {
|
||||||
|
super(message);
|
||||||
|
this.queryString = queryString;
|
||||||
|
}
|
||||||
|
|
||||||
|
public QueryException(Exception e) {
|
||||||
|
super(e);
|
||||||
|
}
|
||||||
|
public String getQueryString() {
|
||||||
|
return queryString;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setQueryString(String queryString) {
|
||||||
|
this.queryString = queryString;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getMessage() {
|
||||||
|
String msg = super.getMessage();
|
||||||
|
if ( queryString!=null ) msg += " [" + queryString + ']';
|
||||||
|
return msg;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,26 @@
|
||||||
|
//$Id: $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parameter invalid or not found in the query
|
||||||
|
*
|
||||||
|
* @author Emmanuel Bernard
|
||||||
|
*/
|
||||||
|
public class QueryParameterException extends QueryException {
|
||||||
|
|
||||||
|
public QueryParameterException(Exception e) {
|
||||||
|
super( e );
|
||||||
|
}
|
||||||
|
|
||||||
|
public QueryParameterException(String message) {
|
||||||
|
super( message );
|
||||||
|
}
|
||||||
|
|
||||||
|
public QueryParameterException(String message, Throwable e) {
|
||||||
|
super( message, e );
|
||||||
|
}
|
||||||
|
|
||||||
|
public QueryParameterException(String message, String queryString) {
|
||||||
|
super( message, queryString );
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,78 @@
|
||||||
|
//$Id: ReplicationMode.java 5060 2004-12-24 03:11:05Z oneovthafew $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import org.hibernate.type.VersionType;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Represents a replication strategy.
|
||||||
|
*
|
||||||
|
* @see Session#replicate(Object, ReplicationMode)
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
public abstract class ReplicationMode implements Serializable {
|
||||||
|
private final String name;
|
||||||
|
private static final Map INSTANCES = new HashMap();
|
||||||
|
|
||||||
|
public ReplicationMode(String name) {
|
||||||
|
this.name=name;
|
||||||
|
}
|
||||||
|
public String toString() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
public abstract boolean shouldOverwriteCurrentVersion(Object entity, Object currentVersion, Object newVersion, VersionType versionType);
|
||||||
|
/**
|
||||||
|
* Throw an exception when a row already exists.
|
||||||
|
*/
|
||||||
|
public static final ReplicationMode EXCEPTION = new ReplicationMode("EXCEPTION") {
|
||||||
|
public boolean shouldOverwriteCurrentVersion(Object entity, Object currentVersion, Object newVersion, VersionType versionType) {
|
||||||
|
throw new AssertionFailure("should not be called");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
/**
|
||||||
|
* Ignore replicated entities when a row already exists.
|
||||||
|
*/
|
||||||
|
public static final ReplicationMode IGNORE = new ReplicationMode("IGNORE") {
|
||||||
|
public boolean shouldOverwriteCurrentVersion(Object entity, Object currentVersion, Object newVersion, VersionType versionType) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
/**
|
||||||
|
* Overwrite existing rows when a row already exists.
|
||||||
|
*/
|
||||||
|
public static final ReplicationMode OVERWRITE = new ReplicationMode("OVERWRITE") {
|
||||||
|
public boolean shouldOverwriteCurrentVersion(Object entity, Object currentVersion, Object newVersion, VersionType versionType) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
/**
|
||||||
|
* When a row already exists, choose the latest version.
|
||||||
|
*/
|
||||||
|
public static final ReplicationMode LATEST_VERSION = new ReplicationMode("LATEST_VERSION") {
|
||||||
|
public boolean shouldOverwriteCurrentVersion(Object entity, Object currentVersion, Object newVersion, VersionType versionType) {
|
||||||
|
if (versionType==null) return true; //always overwrite nonversioned data
|
||||||
|
return versionType.getComparator().compare(currentVersion, newVersion) <= 0;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
static {
|
||||||
|
INSTANCES.put( LATEST_VERSION.name, LATEST_VERSION );
|
||||||
|
INSTANCES.put( IGNORE.name, IGNORE );
|
||||||
|
INSTANCES.put( OVERWRITE.name, OVERWRITE );
|
||||||
|
INSTANCES.put( EXCEPTION.name, EXCEPTION );
|
||||||
|
}
|
||||||
|
|
||||||
|
private Object readResolve() {
|
||||||
|
return INSTANCES.get(name);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,93 @@
|
||||||
|
//$Id: SQLQuery.java 10845 2006-11-18 04:20:30Z steve.ebersole@jboss.com $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
import org.hibernate.type.Type;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Allows the user to declare the types and select list injection
|
||||||
|
* points of all entities returned by the query. Also allows
|
||||||
|
* declaration of the type and column alias of any scalar results
|
||||||
|
* of the query.
|
||||||
|
*
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
public interface SQLQuery extends Query {
|
||||||
|
/**
|
||||||
|
* Declare a "root" entity, without specifying an alias
|
||||||
|
*/
|
||||||
|
public SQLQuery addEntity(String entityName);
|
||||||
|
/**
|
||||||
|
* Declare a "root" entity
|
||||||
|
*/
|
||||||
|
public SQLQuery addEntity(String alias, String entityName);
|
||||||
|
/**
|
||||||
|
* Declare a "root" entity, specifying a lock mode
|
||||||
|
*/
|
||||||
|
public SQLQuery addEntity(String alias, String entityName, LockMode lockMode);
|
||||||
|
/**
|
||||||
|
* Declare a "root" entity, without specifying an alias
|
||||||
|
*/
|
||||||
|
public SQLQuery addEntity(Class entityClass);
|
||||||
|
/**
|
||||||
|
* Declare a "root" entity
|
||||||
|
*/
|
||||||
|
public SQLQuery addEntity(String alias, Class entityClass);
|
||||||
|
/**
|
||||||
|
* Declare a "root" entity, specifying a lock mode
|
||||||
|
*/
|
||||||
|
public SQLQuery addEntity(String alias, Class entityClass, LockMode lockMode);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Declare a "joined" entity
|
||||||
|
*/
|
||||||
|
public SQLQuery addJoin(String alias, String path);
|
||||||
|
/**
|
||||||
|
* Declare a "joined" entity, specifying a lock mode
|
||||||
|
*/
|
||||||
|
public SQLQuery addJoin(String alias, String path, LockMode lockMode);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Declare a scalar query result
|
||||||
|
*/
|
||||||
|
public SQLQuery addScalar(String columnAlias, Type type);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Declare a scalar query. Hibernate will attempt to automatically detect the underlying type.
|
||||||
|
*/
|
||||||
|
public SQLQuery addScalar(String columnAlias);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Use a predefined named ResultSetMapping
|
||||||
|
*/
|
||||||
|
public SQLQuery setResultSetMapping(String name);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds a query space for auto-flush synchronization.
|
||||||
|
*
|
||||||
|
* @param querySpace The query space to be auto-flushed for this query.
|
||||||
|
* @return this, for method chaning
|
||||||
|
*/
|
||||||
|
public SQLQuery addSynchronizedQuerySpace(String querySpace);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds an entity name or auto-flush synchronization.
|
||||||
|
*
|
||||||
|
* @param entityName The name of the entity upon whose defined
|
||||||
|
* query spaces we should additionally synchronize.
|
||||||
|
* @return this, for method chaning
|
||||||
|
* @throws MappingException Indicates the given entity name could not be
|
||||||
|
* resolved.
|
||||||
|
*/
|
||||||
|
public SQLQuery addSynchronizedEntityName(String entityName) throws MappingException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds an entity name or auto-flush synchronization.
|
||||||
|
*
|
||||||
|
* @param entityClass The class of the entity upon whose defined
|
||||||
|
* query spaces we should additionally synchronize.
|
||||||
|
* @return this, for method chaning
|
||||||
|
* @throws MappingException Indicates the given entity class could not be
|
||||||
|
* resolved.
|
||||||
|
*/
|
||||||
|
public SQLQuery addSynchronizedEntityClass(Class entityClass) throws MappingException;
|
||||||
|
}
|
|
@ -0,0 +1,74 @@
|
||||||
|
//$Id: ScrollMode.java 4369 2004-08-18 00:28:43Z oneovthafew $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.sql.ResultSet;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Specifies the type of JDBC scrollable result set to use
|
||||||
|
* underneath a <tt>ScrollableResults</tt>
|
||||||
|
*
|
||||||
|
* @see Query#scroll(ScrollMode)
|
||||||
|
* @see ScrollableResults
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
public final class ScrollMode implements Serializable {
|
||||||
|
private final int resultSetType;
|
||||||
|
private final String name;
|
||||||
|
private static final Map INSTANCES = new HashMap();
|
||||||
|
|
||||||
|
private ScrollMode(int level, String name) {
|
||||||
|
this.resultSetType=level;
|
||||||
|
this.name=name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String toString() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return the JDBC result set type code
|
||||||
|
*/
|
||||||
|
public int toResultSetType() {
|
||||||
|
return resultSetType;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @see java.sql.ResultSet.TYPE_FORWARD_ONLY
|
||||||
|
*/
|
||||||
|
public static final ScrollMode FORWARD_ONLY = new ScrollMode(ResultSet.TYPE_FORWARD_ONLY, "FORWARD_ONLY");
|
||||||
|
/**
|
||||||
|
* @see java.sql.ResultSet.TYPE_SCROLL_SENSITIVE
|
||||||
|
*/
|
||||||
|
public static final ScrollMode SCROLL_SENSITIVE = new ScrollMode(ResultSet.TYPE_SCROLL_SENSITIVE, "SCROLL_SENSITIVE");
|
||||||
|
/**
|
||||||
|
* Note that since the Hibernate session acts as a cache, you
|
||||||
|
* might need to expicitly evict objects, if you need to see
|
||||||
|
* changes made by other transactions.
|
||||||
|
* @see java.sql.ResultSet.TYPE_SCROLL_INSENSITIVE
|
||||||
|
*/
|
||||||
|
public static final ScrollMode SCROLL_INSENSITIVE = new ScrollMode(ResultSet.TYPE_SCROLL_INSENSITIVE, "SCROLL_INSENSITIVE");
|
||||||
|
|
||||||
|
public boolean lessThan(ScrollMode other) {
|
||||||
|
return this.resultSetType<other.resultSetType;
|
||||||
|
}
|
||||||
|
|
||||||
|
static {
|
||||||
|
INSTANCES.put( FORWARD_ONLY.name, FORWARD_ONLY );
|
||||||
|
INSTANCES.put( SCROLL_INSENSITIVE.name, SCROLL_INSENSITIVE );
|
||||||
|
INSTANCES.put( SCROLL_SENSITIVE.name, SCROLL_SENSITIVE );
|
||||||
|
}
|
||||||
|
|
||||||
|
private Object readResolve() {
|
||||||
|
return INSTANCES.get(name);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,204 @@
|
||||||
|
//$Id: ScrollableResults.java 6411 2005-04-13 07:37:50Z oneovthafew $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
import java.math.BigDecimal;
|
||||||
|
import java.math.BigInteger;
|
||||||
|
import java.sql.Blob;
|
||||||
|
import java.sql.Clob;
|
||||||
|
import java.util.Calendar;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.Locale;
|
||||||
|
import java.util.TimeZone;
|
||||||
|
|
||||||
|
import org.hibernate.type.Type;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A result iterator that allows moving around within the results
|
||||||
|
* by arbitrary increments. The <tt>Query</tt> / <tt>ScrollableResults</tt>
|
||||||
|
* pattern is very similar to the JDBC <tt>PreparedStatement</tt>/
|
||||||
|
* <tt>ResultSet</tt> pattern and the semantics of methods of this interface
|
||||||
|
* are similar to the similarly named methods on <tt>ResultSet</tt>.<br>
|
||||||
|
* <br>
|
||||||
|
* Contrary to JDBC, columns of results are numbered from zero.
|
||||||
|
*
|
||||||
|
* @see Query#scroll()
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
public interface ScrollableResults {
|
||||||
|
/**
|
||||||
|
* Advance to the next result
|
||||||
|
* @return <tt>true</tt> if there is another result
|
||||||
|
*/
|
||||||
|
public boolean next() throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Retreat to the previous result
|
||||||
|
* @return <tt>true</tt> if there is a previous result
|
||||||
|
*/
|
||||||
|
public boolean previous() throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Scroll an arbitrary number of locations
|
||||||
|
* @param i a positive (forward) or negative (backward) number of rows
|
||||||
|
* @return <tt>true</tt> if there is a result at the new location
|
||||||
|
*/
|
||||||
|
public boolean scroll(int i) throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Go to the last result
|
||||||
|
* @return <tt>true</tt> if there are any results
|
||||||
|
*/
|
||||||
|
public boolean last() throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Go to the first result
|
||||||
|
* @return <tt>true</tt> if there are any results
|
||||||
|
*/
|
||||||
|
public boolean first() throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Go to a location just before first result (this is the initial location)
|
||||||
|
*/
|
||||||
|
public void beforeFirst() throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Go to a location just after the last result
|
||||||
|
*/
|
||||||
|
public void afterLast() throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Is this the first result?
|
||||||
|
*
|
||||||
|
* @return <tt>true</tt> if this is the first row of results
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public boolean isFirst() throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Is this the last result?
|
||||||
|
*
|
||||||
|
* @return <tt>true</tt> if this is the last row of results
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public boolean isLast() throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Release resources immediately.
|
||||||
|
*/
|
||||||
|
public void close() throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Get the current row of results
|
||||||
|
* @return an object or array
|
||||||
|
*/
|
||||||
|
public Object[] get() throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Get the <tt>i</tt>th object in the current row of results, without
|
||||||
|
* initializing any other results in the row. This method may be used
|
||||||
|
* safely, regardless of the type of the column (ie. even for scalar
|
||||||
|
* results).
|
||||||
|
* @param i the column, numbered from zero
|
||||||
|
* @return an object of any Hibernate type or <tt>null</tt>
|
||||||
|
*/
|
||||||
|
public Object get(int i) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the type of the <tt>i</tt>th column of results
|
||||||
|
* @param i the column, numbered from zero
|
||||||
|
* @return the Hibernate type
|
||||||
|
*/
|
||||||
|
public Type getType(int i);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convenience method to read an <tt>integer</tt>
|
||||||
|
*/
|
||||||
|
public Integer getInteger(int col) throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Convenience method to read a <tt>long</tt>
|
||||||
|
*/
|
||||||
|
public Long getLong(int col) throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Convenience method to read a <tt>float</tt>
|
||||||
|
*/
|
||||||
|
public Float getFloat(int col) throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Convenience method to read a <tt>boolean</tt>
|
||||||
|
*/
|
||||||
|
public Boolean getBoolean(int col) throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Convenience method to read a <tt>double</tt>
|
||||||
|
*/
|
||||||
|
public Double getDouble(int col) throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Convenience method to read a <tt>short</tt>
|
||||||
|
*/
|
||||||
|
public Short getShort(int col) throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Convenience method to read a <tt>byte</tt>
|
||||||
|
*/
|
||||||
|
public Byte getByte(int col) throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Convenience method to read a <tt>character</tt>
|
||||||
|
*/
|
||||||
|
public Character getCharacter(int col) throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Convenience method to read a <tt>binary</tt>
|
||||||
|
*/
|
||||||
|
public byte[] getBinary(int col) throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Convenience method to read <tt>text</tt>
|
||||||
|
*/
|
||||||
|
public String getText(int col) throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Convenience method to read a <tt>blob</tt>
|
||||||
|
*/
|
||||||
|
public Blob getBlob(int col) throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Convenience method to read a <tt>clob</tt>
|
||||||
|
*/
|
||||||
|
public Clob getClob(int col) throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Convenience method to read a <tt>string</tt>
|
||||||
|
*/
|
||||||
|
public String getString(int col) throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Convenience method to read a <tt>big_decimal</tt>
|
||||||
|
*/
|
||||||
|
public BigDecimal getBigDecimal(int col) throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Convenience method to read a <tt>big_integer</tt>
|
||||||
|
*/
|
||||||
|
public BigInteger getBigInteger(int col) throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Convenience method to read a <tt>date</tt>, <tt>time</tt> or <tt>timestamp</tt>
|
||||||
|
*/
|
||||||
|
public Date getDate(int col) throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Convenience method to read a <tt>locale</tt>
|
||||||
|
*/
|
||||||
|
public Locale getLocale(int col) throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Convenience method to read a <tt>calendar</tt> or <tt>calendar_date</tt>
|
||||||
|
*/
|
||||||
|
public Calendar getCalendar(int col) throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Convenience method to read a <tt>currency</tt>
|
||||||
|
*/
|
||||||
|
//public Currency getCurrency(int col) throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Convenience method to read a <tt>timezone</tt>
|
||||||
|
*/
|
||||||
|
public TimeZone getTimeZone(int col) throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Get the current location in the result set. The first
|
||||||
|
* row is number <tt>0</tt>, contrary to JDBC.
|
||||||
|
* @return the row number, numbered from <tt>0</tt>, or <tt>-1</tt> if
|
||||||
|
* there is no current row
|
||||||
|
*/
|
||||||
|
public int getRowNumber() throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Set the current location in the result set, numbered from either the
|
||||||
|
* first row (row number <tt>0</tt>), or the last row (row
|
||||||
|
* number <tt>-1</tt>).
|
||||||
|
* @param rowNumber the row number, numbered from the last row, in the
|
||||||
|
* case of a negative row number
|
||||||
|
* @return true if there is a row at that row number
|
||||||
|
*/
|
||||||
|
public boolean setRowNumber(int rowNumber) throws HibernateException;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,783 @@
|
||||||
|
//$Id: Session.java 11494 2007-05-09 02:00:16Z steve.ebersole@jboss.com $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.sql.Connection;
|
||||||
|
|
||||||
|
import org.hibernate.stat.SessionStatistics;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The main runtime interface between a Java application and Hibernate. This is the
|
||||||
|
* central API class abstracting the notion of a persistence service.<br>
|
||||||
|
* <br>
|
||||||
|
* The lifecycle of a <tt>Session</tt> is bounded by the beginning and end of a logical
|
||||||
|
* transaction. (Long transactions might span several database transactions.)<br>
|
||||||
|
* <br>
|
||||||
|
* The main function of the <tt>Session</tt> is to offer create, read and delete operations
|
||||||
|
* for instances of mapped entity classes. Instances may exist in one of three states:<br>
|
||||||
|
* <br>
|
||||||
|
* <i>transient:</i> never persistent, not associated with any <tt>Session</tt><br>
|
||||||
|
* <i>persistent:</i> associated with a unique <tt>Session</tt><br>
|
||||||
|
* <i>detached:</i> previously persistent, not associated with any <tt>Session</tt><br>
|
||||||
|
* <br>
|
||||||
|
* Transient instances may be made persistent by calling <tt>save()</tt>,
|
||||||
|
* <tt>persist()</tt> or <tt>saveOrUpdate()</tt>. Persistent instances may be made transient
|
||||||
|
* by calling<tt> delete()</tt>. Any instance returned by a <tt>get()</tt> or
|
||||||
|
* <tt>load()</tt> method is persistent. Detached instances may be made persistent
|
||||||
|
* by calling <tt>update()</tt>, <tt>saveOrUpdate()</tt>, <tt>lock()</tt> or <tt>replicate()</tt>.
|
||||||
|
* The state of a transient or detached instance may also be made persistent as a new
|
||||||
|
* persistent instance by calling <tt>merge()</tt>.<br>
|
||||||
|
* <br>
|
||||||
|
* <tt>save()</tt> and <tt>persist()</tt> result in an SQL <tt>INSERT</tt>, <tt>delete()</tt>
|
||||||
|
* in an SQL <tt>DELETE</tt> and <tt>update()</tt> or <tt>merge()</tt> in an SQL <tt>UPDATE</tt>.
|
||||||
|
* Changes to <i>persistent</i> instances are detected at flush time and also result in an SQL
|
||||||
|
* <tt>UPDATE</tt>. <tt>saveOrUpdate()</tt> and <tt>replicate()</tt> result in either an
|
||||||
|
* <tt>INSERT</tt> or an <tt>UPDATE</tt>.<br>
|
||||||
|
* <br>
|
||||||
|
* It is not intended that implementors be threadsafe. Instead each thread/transaction
|
||||||
|
* should obtain its own instance from a <tt>SessionFactory</tt>.<br>
|
||||||
|
* <br>
|
||||||
|
* A <tt>Session</tt> instance is serializable if its persistent classes are serializable.<br>
|
||||||
|
* <br>
|
||||||
|
* A typical transaction should use the following idiom:
|
||||||
|
* <pre>
|
||||||
|
* Session sess = factory.openSession();
|
||||||
|
* Transaction tx;
|
||||||
|
* try {
|
||||||
|
* tx = sess.beginTransaction();
|
||||||
|
* //do some work
|
||||||
|
* ...
|
||||||
|
* tx.commit();
|
||||||
|
* }
|
||||||
|
* catch (Exception e) {
|
||||||
|
* if (tx!=null) tx.rollback();
|
||||||
|
* throw e;
|
||||||
|
* }
|
||||||
|
* finally {
|
||||||
|
* sess.close();
|
||||||
|
* }
|
||||||
|
* </pre>
|
||||||
|
* <br>
|
||||||
|
* If the <tt>Session</tt> throws an exception, the transaction must be rolled back
|
||||||
|
* and the session discarded. The internal state of the <tt>Session</tt> might not
|
||||||
|
* be consistent with the database after the exception occurs.
|
||||||
|
*
|
||||||
|
* @see SessionFactory
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
public interface Session extends Serializable {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Retrieve the entity mode in effect for this session.
|
||||||
|
*
|
||||||
|
* @return The entity mode for this session.
|
||||||
|
*/
|
||||||
|
public EntityMode getEntityMode();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Starts a new Session with the given entity mode in effect. This secondary
|
||||||
|
* Session inherits the connection, transaction, and other context
|
||||||
|
* information from the primary Session. It doesn't need to be flushed
|
||||||
|
* or closed by the developer.
|
||||||
|
*
|
||||||
|
* @param entityMode The entity mode to use for the new session.
|
||||||
|
* @return The new session
|
||||||
|
*/
|
||||||
|
public Session getSession(EntityMode entityMode);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Force this session to flush. Must be called at the end of a
|
||||||
|
* unit of work, before commiting the transaction and closing the
|
||||||
|
* session (depending on {@link #setFlushMode flush-mode},
|
||||||
|
* {@link Transaction#commit()} calls this method).
|
||||||
|
* <p/>
|
||||||
|
* <i>Flushing</i> is the process of synchronizing the underlying persistent
|
||||||
|
* store with persistable state held in memory.
|
||||||
|
*
|
||||||
|
* @throws HibernateException Indicates problems flushing the session or
|
||||||
|
* talking to the database.
|
||||||
|
*/
|
||||||
|
public void flush() throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the flush mode for this session.
|
||||||
|
* <p/>
|
||||||
|
* The flush mode determines the points at which the session is flushed.
|
||||||
|
* <i>Flushing</i> is the process of synchronizing the underlying persistent
|
||||||
|
* store with persistable state held in memory.
|
||||||
|
* <p/>
|
||||||
|
* For a logically "read only" session, it is reasonable to set the session's
|
||||||
|
* flush mode to {@link FlushMode#MANUAL} at the start of the session (in
|
||||||
|
* order to achieve some extra performance).
|
||||||
|
*
|
||||||
|
* @param flushMode the new flush mode
|
||||||
|
* @see FlushMode
|
||||||
|
*/
|
||||||
|
public void setFlushMode(FlushMode flushMode);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the current flush mode for this session.
|
||||||
|
*
|
||||||
|
* @return The flush mode
|
||||||
|
*/
|
||||||
|
public FlushMode getFlushMode();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the cache mode.
|
||||||
|
* <p/>
|
||||||
|
* Cache mode determines the manner in which this session can interact with
|
||||||
|
* the second level cache.
|
||||||
|
*
|
||||||
|
* @param cacheMode The new cache mode.
|
||||||
|
*/
|
||||||
|
public void setCacheMode(CacheMode cacheMode);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the current cache mode.
|
||||||
|
*
|
||||||
|
* @return The current cache mode.
|
||||||
|
*/
|
||||||
|
public CacheMode getCacheMode();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the session factory which created this session.
|
||||||
|
*
|
||||||
|
* @return The session factory.
|
||||||
|
* @see SessionFactory
|
||||||
|
|
||||||
|
*/
|
||||||
|
public SessionFactory getSessionFactory();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the JDBC connection of this Session.<br>
|
||||||
|
* <br>
|
||||||
|
* If the session is using aggressive collection release (as in a
|
||||||
|
* CMT environment), it is the application's responsibility to
|
||||||
|
* close the connection returned by this call. Otherwise, the
|
||||||
|
* application should not close the connection.
|
||||||
|
*
|
||||||
|
* @return the JDBC connection in use by the <tt>Session</tt>
|
||||||
|
* @throws HibernateException if the <tt>Session</tt> is disconnected
|
||||||
|
* @deprecated To be replaced with a SPI for performing work against the connection; scheduled for removal in 4.x
|
||||||
|
*/
|
||||||
|
public Connection connection() throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* End the session by releasing the JDBC connection and cleaning up. It is
|
||||||
|
* not strictly necessary to close the session but you must at least
|
||||||
|
* {@link #disconnect()} it.
|
||||||
|
*
|
||||||
|
* @return the connection provided by the application or null.
|
||||||
|
* @throws HibernateException Indicates problems cleaning up.
|
||||||
|
*/
|
||||||
|
public Connection close() throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cancel the execution of the current query.
|
||||||
|
* <p/>
|
||||||
|
* This is the sole method on session which may be safely called from
|
||||||
|
* another thread.
|
||||||
|
*
|
||||||
|
* @throws HibernateException There was a problem canceling the query
|
||||||
|
*/
|
||||||
|
public void cancelQuery() throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if the session is still open.
|
||||||
|
*
|
||||||
|
* @return boolean
|
||||||
|
*/
|
||||||
|
public boolean isOpen();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if the session is currently connected.
|
||||||
|
*
|
||||||
|
* @return boolean
|
||||||
|
*/
|
||||||
|
public boolean isConnected();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Does this session contain any changes which must be synchronized with
|
||||||
|
* the database? In other words, would any DML operations be executed if
|
||||||
|
* we flushed this session?
|
||||||
|
*
|
||||||
|
* @return True if the session contains pending changes; false otherwise.
|
||||||
|
* @throws HibernateException could not perform dirtying checking
|
||||||
|
*/
|
||||||
|
public boolean isDirty() throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return the identifier value of the given entity as associated with this
|
||||||
|
* session. An exception is thrown if the given entity instance is transient
|
||||||
|
* or detached in relation to this session.
|
||||||
|
*
|
||||||
|
* @param object a persistent instance
|
||||||
|
* @return the identifier
|
||||||
|
* @throws TransientObjectException if the instance is transient or associated with
|
||||||
|
* a different session
|
||||||
|
*/
|
||||||
|
public Serializable getIdentifier(Object object) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if this instance is associated with this <tt>Session</tt>.
|
||||||
|
*
|
||||||
|
* @param object an instance of a persistent class
|
||||||
|
* @return true if the given instance is associated with this <tt>Session</tt>
|
||||||
|
*/
|
||||||
|
public boolean contains(Object object);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove this instance from the session cache. Changes to the instance will
|
||||||
|
* not be synchronized with the database. This operation cascades to associated
|
||||||
|
* instances if the association is mapped with <tt>cascade="evict"</tt>.
|
||||||
|
*
|
||||||
|
* @param object a persistent instance
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public void evict(Object object) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return the persistent instance of the given entity class with the given identifier,
|
||||||
|
* obtaining the specified lock mode, assuming the instance exists.
|
||||||
|
*
|
||||||
|
* @param theClass a persistent class
|
||||||
|
* @param id a valid identifier of an existing persistent instance of the class
|
||||||
|
* @param lockMode the lock level
|
||||||
|
* @return the persistent instance or proxy
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public Object load(Class theClass, Serializable id, LockMode lockMode) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return the persistent instance of the given entity class with the given identifier,
|
||||||
|
* obtaining the specified lock mode, assuming the instance exists.
|
||||||
|
*
|
||||||
|
* @param entityName a persistent class
|
||||||
|
* @param id a valid identifier of an existing persistent instance of the class
|
||||||
|
* @param lockMode the lock level
|
||||||
|
* @return the persistent instance or proxy
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public Object load(String entityName, Serializable id, LockMode lockMode) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return the persistent instance of the given entity class with the given identifier,
|
||||||
|
* assuming that the instance exists. This method might return a proxied instance that
|
||||||
|
* is initialized on-demand, when a non-identifier method is accessed.
|
||||||
|
* <br><br>
|
||||||
|
* You should not use this method to determine if an instance exists (use <tt>get()</tt>
|
||||||
|
* instead). Use this only to retrieve an instance that you assume exists, where non-existence
|
||||||
|
* would be an actual error.
|
||||||
|
*
|
||||||
|
* @param theClass a persistent class
|
||||||
|
* @param id a valid identifier of an existing persistent instance of the class
|
||||||
|
* @return the persistent instance or proxy
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public Object load(Class theClass, Serializable id) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return the persistent instance of the given entity class with the given identifier,
|
||||||
|
* assuming that the instance exists. This method might return a proxied instance that
|
||||||
|
* is initialized on-demand, when a non-identifier method is accessed.
|
||||||
|
* <br><br>
|
||||||
|
* You should not use this method to determine if an instance exists (use <tt>get()</tt>
|
||||||
|
* instead). Use this only to retrieve an instance that you assume exists, where non-existence
|
||||||
|
* would be an actual error.
|
||||||
|
*
|
||||||
|
* @param entityName a persistent class
|
||||||
|
* @param id a valid identifier of an existing persistent instance of the class
|
||||||
|
* @return the persistent instance or proxy
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public Object load(String entityName, Serializable id) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Read the persistent state associated with the given identifier into the given transient
|
||||||
|
* instance.
|
||||||
|
*
|
||||||
|
* @param object an "empty" instance of the persistent class
|
||||||
|
* @param id a valid identifier of an existing persistent instance of the class
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public void load(Object object, Serializable id) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Persist the state of the given detached instance, reusing the current
|
||||||
|
* identifier value. This operation cascades to associated instances if
|
||||||
|
* the association is mapped with <tt>cascade="replicate"</tt>.
|
||||||
|
*
|
||||||
|
* @param object a detached instance of a persistent class
|
||||||
|
*/
|
||||||
|
public void replicate(Object object, ReplicationMode replicationMode) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Persist the state of the given detached instance, reusing the current
|
||||||
|
* identifier value. This operation cascades to associated instances if
|
||||||
|
* the association is mapped with <tt>cascade="replicate"</tt>.
|
||||||
|
*
|
||||||
|
* @param object a detached instance of a persistent class
|
||||||
|
*/
|
||||||
|
public void replicate(String entityName, Object object, ReplicationMode replicationMode) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Persist the given transient instance, first assigning a generated identifier. (Or
|
||||||
|
* using the current value of the identifier property if the <tt>assigned</tt>
|
||||||
|
* generator is used.) This operation cascades to associated instances if the
|
||||||
|
* association is mapped with <tt>cascade="save-update"</tt>.
|
||||||
|
*
|
||||||
|
* @param object a transient instance of a persistent class
|
||||||
|
* @return the generated identifier
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public Serializable save(Object object) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Persist the given transient instance, first assigning a generated identifier. (Or
|
||||||
|
* using the current value of the identifier property if the <tt>assigned</tt>
|
||||||
|
* generator is used.) This operation cascades to associated instances if the
|
||||||
|
* association is mapped with <tt>cascade="save-update"</tt>.
|
||||||
|
*
|
||||||
|
* @param object a transient instance of a persistent class
|
||||||
|
* @return the generated identifier
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public Serializable save(String entityName, Object object) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Either {@link #save(Object)} or {@link #update(Object)} the given
|
||||||
|
* instance, depending upon resolution of the unsaved-value checks (see the
|
||||||
|
* manual for discussion of unsaved-value checking).
|
||||||
|
* <p/>
|
||||||
|
* This operation cascades to associated instances if the association is mapped
|
||||||
|
* with <tt>cascade="save-update"</tt>.
|
||||||
|
*
|
||||||
|
* @see Session#save(java.lang.Object)
|
||||||
|
* @see Session#update(Object object)
|
||||||
|
* @param object a transient or detached instance containing new or updated state
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public void saveOrUpdate(Object object) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Either {@link #save(String, Object)} or {@link #update(String, Object)}
|
||||||
|
* the given instance, depending upon resolution of the unsaved-value checks
|
||||||
|
* (see the manual for discussion of unsaved-value checking).
|
||||||
|
* <p/>
|
||||||
|
* This operation cascades to associated instances if the association is mapped
|
||||||
|
* with <tt>cascade="save-update"</tt>.
|
||||||
|
*
|
||||||
|
* @see Session#save(String,Object)
|
||||||
|
* @see Session#update(String,Object)
|
||||||
|
* @param object a transient or detached instance containing new or updated state
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public void saveOrUpdate(String entityName, Object object) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update the persistent instance with the identifier of the given detached
|
||||||
|
* instance. If there is a persistent instance with the same identifier,
|
||||||
|
* an exception is thrown. This operation cascades to associated instances
|
||||||
|
* if the association is mapped with <tt>cascade="save-update"</tt>.
|
||||||
|
*
|
||||||
|
* @param object a detached instance containing updated state
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public void update(Object object) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update the persistent instance with the identifier of the given detached
|
||||||
|
* instance. If there is a persistent instance with the same identifier,
|
||||||
|
* an exception is thrown. This operation cascades to associated instances
|
||||||
|
* if the association is mapped with <tt>cascade="save-update"</tt>.
|
||||||
|
*
|
||||||
|
* @param object a detached instance containing updated state
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public void update(String entityName, Object object) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Copy the state of the given object onto the persistent object with the same
|
||||||
|
* identifier. If there is no persistent instance currently associated with
|
||||||
|
* the session, it will be loaded. Return the persistent instance. If the
|
||||||
|
* given instance is unsaved, save a copy of and return it as a newly persistent
|
||||||
|
* instance. The given instance does not become associated with the session.
|
||||||
|
* This operation cascades to associated instances if the association is mapped
|
||||||
|
* with <tt>cascade="merge"</tt>.<br>
|
||||||
|
* <br>
|
||||||
|
* The semantics of this method are defined by JSR-220.
|
||||||
|
*
|
||||||
|
* @param object a detached instance with state to be copied
|
||||||
|
* @return an updated persistent instance
|
||||||
|
*/
|
||||||
|
public Object merge(Object object) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Copy the state of the given object onto the persistent object with the same
|
||||||
|
* identifier. If there is no persistent instance currently associated with
|
||||||
|
* the session, it will be loaded. Return the persistent instance. If the
|
||||||
|
* given instance is unsaved, save a copy of and return it as a newly persistent
|
||||||
|
* instance. The given instance does not become associated with the session.
|
||||||
|
* This operation cascades to associated instances if the association is mapped
|
||||||
|
* with <tt>cascade="merge"</tt>.<br>
|
||||||
|
* <br>
|
||||||
|
* The semantics of this method are defined by JSR-220.
|
||||||
|
*
|
||||||
|
* @param object a detached instance with state to be copied
|
||||||
|
* @return an updated persistent instance
|
||||||
|
*/
|
||||||
|
public Object merge(String entityName, Object object) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Make a transient instance persistent. This operation cascades to associated
|
||||||
|
* instances if the association is mapped with <tt>cascade="persist"</tt>.<br>
|
||||||
|
* <br>
|
||||||
|
* The semantics of this method are defined by JSR-220.
|
||||||
|
*
|
||||||
|
* @param object a transient instance to be made persistent
|
||||||
|
*/
|
||||||
|
public void persist(Object object) throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Make a transient instance persistent. This operation cascades to associated
|
||||||
|
* instances if the association is mapped with <tt>cascade="persist"</tt>.<br>
|
||||||
|
* <br>
|
||||||
|
* The semantics of this method are defined by JSR-220.
|
||||||
|
*
|
||||||
|
* @param object a transient instance to be made persistent
|
||||||
|
*/
|
||||||
|
public void persist(String entityName, Object object) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove a persistent instance from the datastore. The argument may be
|
||||||
|
* an instance associated with the receiving <tt>Session</tt> or a transient
|
||||||
|
* instance with an identifier associated with existing persistent state.
|
||||||
|
* This operation cascades to associated instances if the association is mapped
|
||||||
|
* with <tt>cascade="delete"</tt>.
|
||||||
|
*
|
||||||
|
* @param object the instance to be removed
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public void delete(Object object) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove a persistent instance from the datastore. The <b>object</b> argument may be
|
||||||
|
* an instance associated with the receiving <tt>Session</tt> or a transient
|
||||||
|
* instance with an identifier associated with existing persistent state.
|
||||||
|
* This operation cascades to associated instances if the association is mapped
|
||||||
|
* with <tt>cascade="delete"</tt>.
|
||||||
|
*
|
||||||
|
* @param entityName The entity name for the instance to be removed.
|
||||||
|
* @param object the instance to be removed
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public void delete(String entityName, Object object) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Obtain the specified lock level upon the given object. This may be used to
|
||||||
|
* perform a version check (<tt>LockMode.READ</tt>), to upgrade to a pessimistic
|
||||||
|
* lock (<tt>LockMode.UPGRADE</tt>), or to simply reassociate a transient instance
|
||||||
|
* with a session (<tt>LockMode.NONE</tt>). This operation cascades to associated
|
||||||
|
* instances if the association is mapped with <tt>cascade="lock"</tt>.
|
||||||
|
*
|
||||||
|
* @param object a persistent or transient instance
|
||||||
|
* @param lockMode the lock level
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public void lock(Object object, LockMode lockMode) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Obtain the specified lock level upon the given object. This may be used to
|
||||||
|
* perform a version check (<tt>LockMode.READ</tt>), to upgrade to a pessimistic
|
||||||
|
* lock (<tt>LockMode.UPGRADE</tt>), or to simply reassociate a transient instance
|
||||||
|
* with a session (<tt>LockMode.NONE</tt>). This operation cascades to associated
|
||||||
|
* instances if the association is mapped with <tt>cascade="lock"</tt>.
|
||||||
|
*
|
||||||
|
* @param object a persistent or transient instance
|
||||||
|
* @param lockMode the lock level
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public void lock(String entityName, Object object, LockMode lockMode) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Re-read the state of the given instance from the underlying database. It is
|
||||||
|
* inadvisable to use this to implement long-running sessions that span many
|
||||||
|
* business tasks. This method is, however, useful in certain special circumstances.
|
||||||
|
* For example
|
||||||
|
* <ul>
|
||||||
|
* <li>where a database trigger alters the object state upon insert or update
|
||||||
|
* <li>after executing direct SQL (eg. a mass update) in the same session
|
||||||
|
* <li>after inserting a <tt>Blob</tt> or <tt>Clob</tt>
|
||||||
|
* </ul>
|
||||||
|
*
|
||||||
|
* @param object a persistent or detached instance
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public void refresh(Object object) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Re-read the state of the given instance from the underlying database, with
|
||||||
|
* the given <tt>LockMode</tt>. It is inadvisable to use this to implement
|
||||||
|
* long-running sessions that span many business tasks. This method is, however,
|
||||||
|
* useful in certain special circumstances.
|
||||||
|
*
|
||||||
|
* @param object a persistent or detached instance
|
||||||
|
* @param lockMode the lock mode to use
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public void refresh(Object object, LockMode lockMode) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Determine the current lock mode of the given object.
|
||||||
|
*
|
||||||
|
* @param object a persistent instance
|
||||||
|
* @return the current lock mode
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public LockMode getCurrentLockMode(Object object) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Begin a unit of work and return the associated <tt>Transaction</tt> object.
|
||||||
|
* If a new underlying transaction is required, begin the transaction. Otherwise
|
||||||
|
* continue the new work in the context of the existing underlying transaction.
|
||||||
|
* The class of the returned <tt>Transaction</tt> object is determined by the
|
||||||
|
* property <tt>hibernate.transaction_factory</tt>.
|
||||||
|
*
|
||||||
|
* @return a Transaction instance
|
||||||
|
* @throws HibernateException
|
||||||
|
* @see Transaction
|
||||||
|
*/
|
||||||
|
public Transaction beginTransaction() throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the <tt>Transaction</tt> instance associated with this session.
|
||||||
|
* The class of the returned <tt>Transaction</tt> object is determined by the
|
||||||
|
* property <tt>hibernate.transaction_factory</tt>.
|
||||||
|
*
|
||||||
|
* @return a Transaction instance
|
||||||
|
* @throws HibernateException
|
||||||
|
* @see Transaction
|
||||||
|
*/
|
||||||
|
public Transaction getTransaction();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new <tt>Criteria</tt> instance, for the given entity class,
|
||||||
|
* or a superclass of an entity class.
|
||||||
|
*
|
||||||
|
* @param persistentClass a class, which is persistent, or has persistent subclasses
|
||||||
|
* @return Criteria
|
||||||
|
*/
|
||||||
|
public Criteria createCriteria(Class persistentClass);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new <tt>Criteria</tt> instance, for the given entity class,
|
||||||
|
* or a superclass of an entity class, with the given alias.
|
||||||
|
*
|
||||||
|
* @param persistentClass a class, which is persistent, or has persistent subclasses
|
||||||
|
* @return Criteria
|
||||||
|
*/
|
||||||
|
public Criteria createCriteria(Class persistentClass, String alias);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new <tt>Criteria</tt> instance, for the given entity name.
|
||||||
|
*
|
||||||
|
* @param entityName
|
||||||
|
* @return Criteria
|
||||||
|
*/
|
||||||
|
public Criteria createCriteria(String entityName);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new <tt>Criteria</tt> instance, for the given entity name,
|
||||||
|
* with the given alias.
|
||||||
|
*
|
||||||
|
* @param entityName
|
||||||
|
* @return Criteria
|
||||||
|
*/
|
||||||
|
public Criteria createCriteria(String entityName, String alias);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new instance of <tt>Query</tt> for the given HQL query string.
|
||||||
|
*
|
||||||
|
* @param queryString a HQL query
|
||||||
|
* @return Query
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public Query createQuery(String queryString) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new instance of <tt>SQLQuery</tt> for the given SQL query string.
|
||||||
|
*
|
||||||
|
* @param queryString a SQL query
|
||||||
|
* @return SQLQuery
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public SQLQuery createSQLQuery(String queryString) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new instance of <tt>Query</tt> for the given collection and filter string.
|
||||||
|
*
|
||||||
|
* @param collection a persistent collection
|
||||||
|
* @param queryString a Hibernate query
|
||||||
|
* @return Query
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public Query createFilter(Object collection, String queryString) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Obtain an instance of <tt>Query</tt> for a named query string defined in the
|
||||||
|
* mapping file.
|
||||||
|
*
|
||||||
|
* @param queryName the name of a query defined externally
|
||||||
|
* @return Query
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public Query getNamedQuery(String queryName) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Completely clear the session. Evict all loaded instances and cancel all pending
|
||||||
|
* saves, updates and deletions. Do not close open iterators or instances of
|
||||||
|
* <tt>ScrollableResults</tt>.
|
||||||
|
*/
|
||||||
|
public void clear();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return the persistent instance of the given entity class with the given identifier,
|
||||||
|
* or null if there is no such persistent instance. (If the instance is already associated
|
||||||
|
* with the session, return that instance. This method never returns an uninitialized instance.)
|
||||||
|
* Obtain the specified lock mode if the instance exists.
|
||||||
|
*
|
||||||
|
* @param clazz a persistent class
|
||||||
|
* @param id an identifier
|
||||||
|
* @return a persistent instance or null
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public Object get(Class clazz, Serializable id) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return the persistent instance of the given entity class with the given identifier,
|
||||||
|
* or null if there is no such persistent instance. (If the instance is already associated
|
||||||
|
* with the session, return that instance. This method never returns an uninitialized instance.)
|
||||||
|
* Obtain the specified lock mode if the instance exists.
|
||||||
|
*
|
||||||
|
* @param clazz a persistent class
|
||||||
|
* @param id an identifier
|
||||||
|
* @param lockMode the lock mode
|
||||||
|
* @return a persistent instance or null
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public Object get(Class clazz, Serializable id, LockMode lockMode) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return the persistent instance of the given named entity with the given identifier,
|
||||||
|
* or null if there is no such persistent instance. (If the instance is already associated
|
||||||
|
* with the session, return that instance. This method never returns an uninitialized instance.)
|
||||||
|
*
|
||||||
|
* @param entityName the entity name
|
||||||
|
* @param id an identifier
|
||||||
|
* @return a persistent instance or null
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public Object get(String entityName, Serializable id) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return the persistent instance of the given entity class with the given identifier,
|
||||||
|
* or null if there is no such persistent instance. (If the instance is already associated
|
||||||
|
* with the session, return that instance. This method never returns an uninitialized instance.)
|
||||||
|
* Obtain the specified lock mode if the instance exists.
|
||||||
|
*
|
||||||
|
* @param entityName the entity name
|
||||||
|
* @param id an identifier
|
||||||
|
* @param lockMode the lock mode
|
||||||
|
* @return a persistent instance or null
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public Object get(String entityName, Serializable id, LockMode lockMode) throws HibernateException;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return the entity name for a persistent entity
|
||||||
|
*
|
||||||
|
* @param object a persistent entity
|
||||||
|
* @return the entity name
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public String getEntityName(Object object) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enable the named filter for this current session.
|
||||||
|
*
|
||||||
|
* @param filterName The name of the filter to be enabled.
|
||||||
|
* @return The Filter instance representing the enabled fiter.
|
||||||
|
*/
|
||||||
|
public Filter enableFilter(String filterName);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Retrieve a currently enabled filter by name.
|
||||||
|
*
|
||||||
|
* @param filterName The name of the filter to be retrieved.
|
||||||
|
* @return The Filter instance representing the enabled fiter.
|
||||||
|
*/
|
||||||
|
public Filter getEnabledFilter(String filterName);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Disable the named filter for the current session.
|
||||||
|
*
|
||||||
|
* @param filterName The name of the filter to be disabled.
|
||||||
|
*/
|
||||||
|
public void disableFilter(String filterName);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the statistics for this session.
|
||||||
|
*/
|
||||||
|
public SessionStatistics getStatistics();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set an unmodified persistent object to read only mode, or a read only
|
||||||
|
* object to modifiable mode. In read only mode, no snapshot is maintained
|
||||||
|
* and the instance is never dirty checked.
|
||||||
|
*
|
||||||
|
* @see Query#setReadOnly(boolean)
|
||||||
|
*/
|
||||||
|
public void setReadOnly(Object entity, boolean readOnly);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Disconnect the <tt>Session</tt> from the current JDBC connection. If
|
||||||
|
* the connection was obtained by Hibernate close it and return it to
|
||||||
|
* the connection pool; otherwise, return it to the application.
|
||||||
|
* <p/>
|
||||||
|
* This is used by applications which supply JDBC connections to Hibernate
|
||||||
|
* and which require long-sessions (or long-conversations)
|
||||||
|
* <p/>
|
||||||
|
* Note that disconnect() called on a session where the connection was
|
||||||
|
* retrieved by Hibernate through its configured
|
||||||
|
* {@link org.hibernate.connection.ConnectionProvider} has no effect,
|
||||||
|
* provided {@link ConnectionReleaseMode#ON_CLOSE} is not in effect.
|
||||||
|
*
|
||||||
|
* @return the application-supplied connection or <tt>null</tt>
|
||||||
|
* @see #reconnect(Connection)
|
||||||
|
* @see #reconnect()
|
||||||
|
*/
|
||||||
|
Connection disconnect() throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Obtain a new JDBC connection. This is used by applications which
|
||||||
|
* require long transactions and do not supply connections to the
|
||||||
|
* session.
|
||||||
|
*
|
||||||
|
* @see #disconnect()
|
||||||
|
* @deprecated Manual reconnection is only needed in the case of
|
||||||
|
* application-supplied connections, in which case the
|
||||||
|
* {@link #reconnect(java.sql.Connection)} for should be used.
|
||||||
|
*/
|
||||||
|
void reconnect() throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reconnect to the given JDBC connection. This is used by applications
|
||||||
|
* which require long transactions and use application-supplied connections.
|
||||||
|
*
|
||||||
|
* @param connection a JDBC connection
|
||||||
|
* @see #disconnect()
|
||||||
|
*/
|
||||||
|
void reconnect(Connection connection) throws HibernateException;
|
||||||
|
}
|
|
@ -0,0 +1,22 @@
|
||||||
|
//$Id: SessionException.java 9024 2006-01-11 22:38:24Z steveebersole $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Thrown when the user calls a method of a {@link Session} that is in an
|
||||||
|
* inappropropriate state for the given call (for example, the the session
|
||||||
|
* is closed or disconnected).
|
||||||
|
*
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
public class SessionException extends HibernateException {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constructs a new SessionException with the given message.
|
||||||
|
*
|
||||||
|
* @param message The message indicating the specific problem.
|
||||||
|
*/
|
||||||
|
public SessionException(String message) {
|
||||||
|
super( message );
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,224 @@
|
||||||
|
//$Id: SessionFactory.java 8754 2005-12-05 23:36:59Z steveebersole $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.sql.Connection;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
import javax.naming.Referenceable;
|
||||||
|
|
||||||
|
import org.hibernate.metadata.ClassMetadata;
|
||||||
|
import org.hibernate.metadata.CollectionMetadata;
|
||||||
|
import org.hibernate.stat.Statistics;
|
||||||
|
import org.hibernate.engine.FilterDefinition;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates <tt>Session</tt>s. Usually an application has a single <tt>SessionFactory</tt>.
|
||||||
|
* Threads servicing client requests obtain <tt>Session</tt>s from the factory.<br>
|
||||||
|
* <br>
|
||||||
|
* Implementors must be threadsafe.<br>
|
||||||
|
* <br>
|
||||||
|
* <tt>SessionFactory</tt>s are immutable. The behaviour of a <tt>SessionFactory</tt> is
|
||||||
|
* controlled by properties supplied at configuration time. These properties are defined
|
||||||
|
* on <tt>Environment</tt>.
|
||||||
|
*
|
||||||
|
* @see Session
|
||||||
|
* @see org.hibernate.cfg.Environment
|
||||||
|
* @see org.hibernate.cfg.Configuration
|
||||||
|
* @see org.hibernate.connection.ConnectionProvider
|
||||||
|
* @see org.hibernate.transaction.TransactionFactory
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
public interface SessionFactory extends Referenceable, Serializable {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Open a <tt>Session</tt> on the given connection.
|
||||||
|
* <p>
|
||||||
|
* Note that the second-level cache will be disabled if you
|
||||||
|
* supply a JDBC connection. Hibernate will not be able to track
|
||||||
|
* any statements you might have executed in the same transaction.
|
||||||
|
* Consider implementing your own <tt>ConnectionProvider</tt>.
|
||||||
|
*
|
||||||
|
* @param connection a connection provided by the application.
|
||||||
|
* @return Session
|
||||||
|
*/
|
||||||
|
public org.hibernate.classic.Session openSession(Connection connection);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create database connection and open a <tt>Session</tt> on it, specifying an
|
||||||
|
* interceptor.
|
||||||
|
*
|
||||||
|
* @param interceptor a session-scoped interceptor
|
||||||
|
* @return Session
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public org.hibernate.classic.Session openSession(Interceptor interceptor) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Open a <tt>Session</tt> on the given connection, specifying an interceptor.
|
||||||
|
* <p>
|
||||||
|
* Note that the second-level cache will be disabled if you
|
||||||
|
* supply a JDBC connection. Hibernate will not be able to track
|
||||||
|
* any statements you might have executed in the same transaction.
|
||||||
|
* Consider implementing your own <tt>ConnectionProvider</tt>.
|
||||||
|
*
|
||||||
|
* @param connection a connection provided by the application.
|
||||||
|
* @param interceptor a session-scoped interceptor
|
||||||
|
* @return Session
|
||||||
|
*/
|
||||||
|
public org.hibernate.classic.Session openSession(Connection connection, Interceptor interceptor);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create database connection and open a <tt>Session</tt> on it.
|
||||||
|
*
|
||||||
|
* @return Session
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public org.hibernate.classic.Session openSession() throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Obtains the current session. The definition of what exactly "current"
|
||||||
|
* means controlled by the {@link org.hibernate.context.CurrentSessionContext} impl configured
|
||||||
|
* for use.
|
||||||
|
* <p/>
|
||||||
|
* Note that for backwards compatibility, if a {@link org.hibernate.context.CurrentSessionContext}
|
||||||
|
* is not configured but a JTA {@link org.hibernate.transaction.TransactionManagerLookup}
|
||||||
|
* is configured this will default to the {@link org.hibernate.context.JTASessionContext}
|
||||||
|
* impl.
|
||||||
|
*
|
||||||
|
* @return The current session.
|
||||||
|
* @throws HibernateException Indicates an issue locating a suitable current session.
|
||||||
|
*/
|
||||||
|
public org.hibernate.classic.Session getCurrentSession() throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the <tt>ClassMetadata</tt> associated with the given entity class
|
||||||
|
*
|
||||||
|
* @see org.hibernate.metadata.ClassMetadata
|
||||||
|
*/
|
||||||
|
public ClassMetadata getClassMetadata(Class persistentClass) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the <tt>ClassMetadata</tt> associated with the given entity name
|
||||||
|
*
|
||||||
|
* @see org.hibernate.metadata.ClassMetadata
|
||||||
|
* @since 3.0
|
||||||
|
*/
|
||||||
|
public ClassMetadata getClassMetadata(String entityName) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the <tt>CollectionMetadata</tt> associated with the named collection role
|
||||||
|
*
|
||||||
|
* @see org.hibernate.metadata.CollectionMetadata
|
||||||
|
*/
|
||||||
|
public CollectionMetadata getCollectionMetadata(String roleName) throws HibernateException;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all <tt>ClassMetadata</tt> as a <tt>Map</tt> from entityname <tt>String</tt>
|
||||||
|
* to metadata object
|
||||||
|
*
|
||||||
|
* @see org.hibernate.metadata.ClassMetadata
|
||||||
|
* @return a map from <tt>String</tt> an entity name to <tt>ClassMetaData</tt>
|
||||||
|
* @since 3.0 changed key from <tt>Class</tt> to <tt>String</tt>
|
||||||
|
*/
|
||||||
|
public Map getAllClassMetadata() throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all <tt>CollectionMetadata</tt> as a <tt>Map</tt> from role name
|
||||||
|
* to metadata object
|
||||||
|
*
|
||||||
|
* @see org.hibernate.metadata.CollectionMetadata
|
||||||
|
* @return a map from <tt>String</tt> to <tt>CollectionMetadata</tt>
|
||||||
|
*/
|
||||||
|
public Map getAllCollectionMetadata() throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the statistics for this session factory
|
||||||
|
*/
|
||||||
|
public Statistics getStatistics();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Destroy this <tt>SessionFactory</tt> and release all resources (caches,
|
||||||
|
* connection pools, etc). It is the responsibility of the application
|
||||||
|
* to ensure that there are no open <tt>Session</tt>s before calling
|
||||||
|
* <tt>close()</tt>.
|
||||||
|
*/
|
||||||
|
public void close() throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Was this <tt>SessionFactory</tt> already closed?
|
||||||
|
*/
|
||||||
|
public boolean isClosed();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Evict all entries from the second-level cache. This method occurs outside
|
||||||
|
* of any transaction; it performs an immediate "hard" remove, so does not respect
|
||||||
|
* any transaction isolation semantics of the usage strategy. Use with care.
|
||||||
|
*/
|
||||||
|
public void evict(Class persistentClass) throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Evict an entry from the second-level cache. This method occurs outside
|
||||||
|
* of any transaction; it performs an immediate "hard" remove, so does not respect
|
||||||
|
* any transaction isolation semantics of the usage strategy. Use with care.
|
||||||
|
*/
|
||||||
|
public void evict(Class persistentClass, Serializable id) throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Evict all entries from the second-level cache. This method occurs outside
|
||||||
|
* of any transaction; it performs an immediate "hard" remove, so does not respect
|
||||||
|
* any transaction isolation semantics of the usage strategy. Use with care.
|
||||||
|
*/
|
||||||
|
public void evictEntity(String entityName) throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Evict an entry from the second-level cache. This method occurs outside
|
||||||
|
* of any transaction; it performs an immediate "hard" remove, so does not respect
|
||||||
|
* any transaction isolation semantics of the usage strategy. Use with care.
|
||||||
|
*/
|
||||||
|
public void evictEntity(String entityName, Serializable id) throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Evict all entries from the second-level cache. This method occurs outside
|
||||||
|
* of any transaction; it performs an immediate "hard" remove, so does not respect
|
||||||
|
* any transaction isolation semantics of the usage strategy. Use with care.
|
||||||
|
*/
|
||||||
|
public void evictCollection(String roleName) throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Evict an entry from the second-level cache. This method occurs outside
|
||||||
|
* of any transaction; it performs an immediate "hard" remove, so does not respect
|
||||||
|
* any transaction isolation semantics of the usage strategy. Use with care.
|
||||||
|
*/
|
||||||
|
public void evictCollection(String roleName, Serializable id) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Evict any query result sets cached in the default query cache region.
|
||||||
|
*/
|
||||||
|
public void evictQueries() throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Evict any query result sets cached in the named query cache region.
|
||||||
|
*/
|
||||||
|
public void evictQueries(String cacheRegion) throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Get a new stateless session.
|
||||||
|
*/
|
||||||
|
public StatelessSession openStatelessSession();
|
||||||
|
/**
|
||||||
|
* Get a new stateless session for the given JDBC connection.
|
||||||
|
*/
|
||||||
|
public StatelessSession openStatelessSession(Connection connection);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Obtain a set of the names of all filters defined on this SessionFactory.
|
||||||
|
*
|
||||||
|
* @return The set of filter names.
|
||||||
|
*/
|
||||||
|
public Set getDefinedFilterNames();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Obtain the definition of a filter by name.
|
||||||
|
*
|
||||||
|
* @param filterName The name of the filter for which to obtain the definition.
|
||||||
|
* @return The filter definition.
|
||||||
|
* @throws HibernateException If no filter defined with the given name.
|
||||||
|
*/
|
||||||
|
public FilterDefinition getFilterDefinition(String filterName) throws HibernateException;
|
||||||
|
}
|
|
@ -0,0 +1,45 @@
|
||||||
|
//$Id: StaleObjectStateException.java 5685 2005-02-12 07:19:50Z steveebersole $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
import org.hibernate.pretty.MessageHelper;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A <tt>StaleStateException</tt> that carries information
|
||||||
|
* about a particular entity instance that was the source
|
||||||
|
* of the failure.
|
||||||
|
*
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
public class StaleObjectStateException extends StaleStateException {
|
||||||
|
private final String entityName;
|
||||||
|
private final Serializable identifier;
|
||||||
|
|
||||||
|
public StaleObjectStateException(String persistentClass, Serializable identifier) {
|
||||||
|
super("Row was updated or deleted by another transaction (or unsaved-value mapping was incorrect)");
|
||||||
|
this.entityName = persistentClass;
|
||||||
|
this.identifier = identifier;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getEntityName() {
|
||||||
|
return entityName;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Serializable getIdentifier() {
|
||||||
|
return identifier;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getMessage() {
|
||||||
|
return super.getMessage() + ": " +
|
||||||
|
MessageHelper.infoString(entityName, identifier);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,21 @@
|
||||||
|
//$Id: StaleStateException.java 5685 2005-02-12 07:19:50Z steveebersole $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Thrown when a version number or timestamp check failed, indicating that the
|
||||||
|
* <tt>Session</tt> contained stale data (when using long transactions
|
||||||
|
* with versioning). Also occurs if we try delete or update a row that does
|
||||||
|
* not exist.<br>
|
||||||
|
* <br>
|
||||||
|
* Note that this exception often indicates that the user failed to specify the
|
||||||
|
* correct <tt>unsaved-value</tt> strategy for a class!
|
||||||
|
*
|
||||||
|
* @see StaleObjectStateException
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
public class StaleStateException extends HibernateException {
|
||||||
|
|
||||||
|
public StaleStateException(String s) {
|
||||||
|
super(s);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,217 @@
|
||||||
|
//$Id: StatelessSession.java 9705 2006-03-28 19:59:31Z steve.ebersole@jboss.com $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.sql.Connection;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A command-oriented API for performing bulk operations
|
||||||
|
* against a database.<br>
|
||||||
|
* <br>
|
||||||
|
* A stateless session does not implement a first-level cache nor
|
||||||
|
* interact with any second-level cache, nor does it implement
|
||||||
|
* transactional write-behind or automatic dirty checking, nor do
|
||||||
|
* operations cascade to associated instances. Collections are
|
||||||
|
* ignored by a stateless session. Operations performed via a
|
||||||
|
* stateless session bypass Hibernate's event model and
|
||||||
|
* interceptors. Stateless sessions are vulnerable to data
|
||||||
|
* aliasing effects, due to the lack of a first-level cache.<br>
|
||||||
|
* <br>
|
||||||
|
* For certain kinds of transactions, a stateless session may
|
||||||
|
* perform slightly faster than a stateful session.
|
||||||
|
*
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
public interface StatelessSession extends Serializable {
|
||||||
|
/**
|
||||||
|
* Close the stateless session and release the JDBC connection.
|
||||||
|
*/
|
||||||
|
public void close();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Insert a row.
|
||||||
|
*
|
||||||
|
* @param entity a new transient instance
|
||||||
|
*/
|
||||||
|
public Serializable insert(Object entity);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Insert a row.
|
||||||
|
*
|
||||||
|
* @param entityName The entityName for the entity to be inserted
|
||||||
|
* @param entity a new transient instance
|
||||||
|
* @return the identifier of the instance
|
||||||
|
*/
|
||||||
|
public Serializable insert(String entityName, Object entity);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update a row.
|
||||||
|
*
|
||||||
|
* @param entity a detached entity instance
|
||||||
|
*/
|
||||||
|
public void update(Object entity);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update a row.
|
||||||
|
*
|
||||||
|
* @param entityName The entityName for the entity to be updated
|
||||||
|
* @param entity a detached entity instance
|
||||||
|
*/
|
||||||
|
public void update(String entityName, Object entity);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete a row.
|
||||||
|
*
|
||||||
|
* @param entity a detached entity instance
|
||||||
|
*/
|
||||||
|
public void delete(Object entity);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete a row.
|
||||||
|
*
|
||||||
|
* @param entityName The entityName for the entity to be deleted
|
||||||
|
* @param entity a detached entity instance
|
||||||
|
*/
|
||||||
|
public void delete(String entityName, Object entity);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Retrieve a row.
|
||||||
|
*
|
||||||
|
* @return a detached entity instance
|
||||||
|
*/
|
||||||
|
public Object get(String entityName, Serializable id);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Retrieve a row.
|
||||||
|
*
|
||||||
|
* @return a detached entity instance
|
||||||
|
*/
|
||||||
|
public Object get(Class entityClass, Serializable id);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Retrieve a row, obtaining the specified lock mode.
|
||||||
|
*
|
||||||
|
* @return a detached entity instance
|
||||||
|
*/
|
||||||
|
public Object get(String entityName, Serializable id, LockMode lockMode);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Retrieve a row, obtaining the specified lock mode.
|
||||||
|
*
|
||||||
|
* @return a detached entity instance
|
||||||
|
*/
|
||||||
|
public Object get(Class entityClass, Serializable id, LockMode lockMode);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Refresh the entity instance state from the database.
|
||||||
|
*
|
||||||
|
* @param entity The entity to be refreshed.
|
||||||
|
*/
|
||||||
|
public void refresh(Object entity);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Refresh the entity instance state from the database.
|
||||||
|
*
|
||||||
|
* @param entityName The entityName for the entity to be refreshed.
|
||||||
|
* @param entity The entity to be refreshed.
|
||||||
|
*/
|
||||||
|
public void refresh(String entityName, Object entity);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Refresh the entity instance state from the database.
|
||||||
|
*
|
||||||
|
* @param entity The entity to be refreshed.
|
||||||
|
* @param lockMode The LockMode to be applied.
|
||||||
|
*/
|
||||||
|
public void refresh(Object entity, LockMode lockMode);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Refresh the entity instance state from the database.
|
||||||
|
*
|
||||||
|
* @param entityName The entityName for the entity to be refreshed.
|
||||||
|
* @param entity The entity to be refreshed.
|
||||||
|
* @param lockMode The LockMode to be applied.
|
||||||
|
*/
|
||||||
|
public void refresh(String entityName, Object entity, LockMode lockMode);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new instance of <tt>Query</tt> for the given HQL query string.
|
||||||
|
* Entities returned by the query are detached.
|
||||||
|
*/
|
||||||
|
public Query createQuery(String queryString);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Obtain an instance of <tt>Query</tt> for a named query string defined in
|
||||||
|
* the mapping file. Entities returned by the query are detached.
|
||||||
|
*/
|
||||||
|
public Query getNamedQuery(String queryName);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new <tt>Criteria</tt> instance, for the given entity class,
|
||||||
|
* or a superclass of an entity class. Entities returned by the query are
|
||||||
|
* detached.
|
||||||
|
*
|
||||||
|
* @param persistentClass a class, which is persistent, or has persistent subclasses
|
||||||
|
* @return Criteria
|
||||||
|
*/
|
||||||
|
public Criteria createCriteria(Class persistentClass);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new <tt>Criteria</tt> instance, for the given entity class,
|
||||||
|
* or a superclass of an entity class, with the given alias.
|
||||||
|
* Entities returned by the query are detached.
|
||||||
|
*
|
||||||
|
* @param persistentClass a class, which is persistent, or has persistent subclasses
|
||||||
|
* @return Criteria
|
||||||
|
*/
|
||||||
|
public Criteria createCriteria(Class persistentClass, String alias);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new <tt>Criteria</tt> instance, for the given entity name.
|
||||||
|
* Entities returned by the query are detached.
|
||||||
|
*
|
||||||
|
* @param entityName
|
||||||
|
* @return Criteria
|
||||||
|
*/
|
||||||
|
public Criteria createCriteria(String entityName);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new <tt>Criteria</tt> instance, for the given entity name,
|
||||||
|
* with the given alias. Entities returned by the query are detached.
|
||||||
|
*
|
||||||
|
* @param entityName
|
||||||
|
* @return Criteria
|
||||||
|
*/
|
||||||
|
public Criteria createCriteria(String entityName, String alias);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new instance of <tt>SQLQuery</tt> for the given SQL query string.
|
||||||
|
* Entities returned by the query are detached.
|
||||||
|
*
|
||||||
|
* @param queryString a SQL query
|
||||||
|
* @return SQLQuery
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public SQLQuery createSQLQuery(String queryString) throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Begin a Hibernate transaction.
|
||||||
|
*/
|
||||||
|
public Transaction beginTransaction();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the current Hibernate transaction.
|
||||||
|
*/
|
||||||
|
public Transaction getTransaction();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the current JDBC connection associated with this
|
||||||
|
* instance.<br>
|
||||||
|
* <br>
|
||||||
|
* If the session is using aggressive connection release (as in a
|
||||||
|
* CMT environment), it is the application's responsibility to
|
||||||
|
* close the connection returned by this call. Otherwise, the
|
||||||
|
* application should not close the connection.
|
||||||
|
*/
|
||||||
|
public Connection connection();
|
||||||
|
}
|
|
@ -0,0 +1,105 @@
|
||||||
|
//$Id: Transaction.java 9595 2006-03-10 18:14:21Z steve.ebersole@jboss.com $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
import javax.transaction.Synchronization;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Allows the application to define units of work, while
|
||||||
|
* maintaining abstraction from the underlying transaction
|
||||||
|
* implementation (eg. JTA, JDBC).<br>
|
||||||
|
* <br>
|
||||||
|
* A transaction is associated with a <tt>Session</tt> and is
|
||||||
|
* usually instantiated by a call to <tt>Session.beginTransaction()</tt>.
|
||||||
|
* A single session might span multiple transactions since
|
||||||
|
* the notion of a session (a conversation between the application
|
||||||
|
* and the datastore) is of coarser granularity than the notion of
|
||||||
|
* a transaction. However, it is intended that there be at most one
|
||||||
|
* uncommitted <tt>Transaction</tt> associated with a particular
|
||||||
|
* <tt>Session</tt> at any time.<br>
|
||||||
|
* <br>
|
||||||
|
* Implementors are not intended to be threadsafe.
|
||||||
|
*
|
||||||
|
* @see Session#beginTransaction()
|
||||||
|
* @see org.hibernate.transaction.TransactionFactory
|
||||||
|
* @author Anton van Straaten
|
||||||
|
*/
|
||||||
|
public interface Transaction {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Begin a new transaction.
|
||||||
|
*/
|
||||||
|
public void begin() throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Flush the associated <tt>Session</tt> and end the unit of work (unless
|
||||||
|
* we are in {@link FlushMode#NEVER}.
|
||||||
|
* </p>
|
||||||
|
* This method will commit the underlying transaction if and only
|
||||||
|
* if the underlying transaction was initiated by this object.
|
||||||
|
*
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public void commit() throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Force the underlying transaction to roll back.
|
||||||
|
*
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public void rollback() throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Was this transaction rolled back or set to rollback only?
|
||||||
|
* <p/>
|
||||||
|
* This only accounts for actions initiated from this local transaction.
|
||||||
|
* If, for example, the underlying transaction is forced to rollback via
|
||||||
|
* some other means, this method still reports false because the rollback
|
||||||
|
* was not initiated from here.
|
||||||
|
*
|
||||||
|
* @return boolean True if the transaction was rolled back via this
|
||||||
|
* local transaction; false otherwise.
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public boolean wasRolledBack() throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if this transaction was successfully committed.
|
||||||
|
* <p/>
|
||||||
|
* This method could return <tt>false</tt> even after successful invocation
|
||||||
|
* of {@link #commit}. As an example, JTA based strategies no-op on
|
||||||
|
* {@link #commit} calls if they did not start the transaction; in that case,
|
||||||
|
* they also report {@link #wasCommitted} as false.
|
||||||
|
*
|
||||||
|
* @return boolean True if the transaction was (unequivocally) committed
|
||||||
|
* via this local transaction; false otherwise.
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public boolean wasCommitted() throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Is this transaction still active?
|
||||||
|
* <p/>
|
||||||
|
* Again, this only returns information in relation to the
|
||||||
|
* local transaction, not the actual underlying transaction.
|
||||||
|
*
|
||||||
|
* @return boolean Treu if this local transaction is still active.
|
||||||
|
*/
|
||||||
|
public boolean isActive() throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Register a user synchronization callback for this transaction.
|
||||||
|
*
|
||||||
|
* @param synchronization The Synchronization callback to register.
|
||||||
|
* @throws HibernateException
|
||||||
|
*/
|
||||||
|
public void registerSynchronization(Synchronization synchronization)
|
||||||
|
throws HibernateException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the transaction timeout for any transaction started by
|
||||||
|
* a subsequent call to <tt>begin()</tt> on this instance.
|
||||||
|
*
|
||||||
|
* @param seconds The number of seconds before a timeout.
|
||||||
|
*/
|
||||||
|
public void setTimeout(int seconds);
|
||||||
|
}
|
|
@ -0,0 +1,22 @@
|
||||||
|
//$Id: TransactionException.java 10312 2006-08-23 12:43:54Z steve.ebersole@jboss.com $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Indicates that a transaction could not be begun, committed
|
||||||
|
* or rolled back.
|
||||||
|
*
|
||||||
|
* @see Transaction
|
||||||
|
* @author Anton van Straaten
|
||||||
|
*/
|
||||||
|
|
||||||
|
public class TransactionException extends HibernateException {
|
||||||
|
|
||||||
|
public TransactionException(String message, Throwable root) {
|
||||||
|
super(message,root);
|
||||||
|
}
|
||||||
|
|
||||||
|
public TransactionException(String message) {
|
||||||
|
super(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,23 @@
|
||||||
|
//$Id: TransientObjectException.java 6877 2005-05-23 15:00:25Z oneovthafew $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Thrown when the user passes a transient instance to a <tt>Session</tt>
|
||||||
|
* method that expects a persistent instance.
|
||||||
|
*
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
|
||||||
|
public class TransientObjectException extends HibernateException {
|
||||||
|
|
||||||
|
public TransientObjectException(String s) {
|
||||||
|
super(s);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,21 @@
|
||||||
|
//$Id: $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Used when a user provided type does not match the expected one
|
||||||
|
*
|
||||||
|
* @author Emmanuel Bernard
|
||||||
|
*/
|
||||||
|
public class TypeMismatchException extends HibernateException {
|
||||||
|
public TypeMismatchException(Throwable root) {
|
||||||
|
super( root );
|
||||||
|
}
|
||||||
|
|
||||||
|
public TypeMismatchException(String s) {
|
||||||
|
super( s );
|
||||||
|
}
|
||||||
|
|
||||||
|
public TypeMismatchException(String string, Throwable root) {
|
||||||
|
super( string, root );
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,52 @@
|
||||||
|
//$Id: UnresolvableObjectException.java 5685 2005-02-12 07:19:50Z steveebersole $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
import org.hibernate.pretty.MessageHelper;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Thrown when Hibernate could not resolve an object by id, especially when
|
||||||
|
* loading an association.
|
||||||
|
*
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
public class UnresolvableObjectException extends HibernateException {
|
||||||
|
|
||||||
|
private final Serializable identifier;
|
||||||
|
private final String entityName;
|
||||||
|
|
||||||
|
public UnresolvableObjectException(Serializable identifier, String clazz) {
|
||||||
|
this("No row with the given identifier exists", identifier, clazz);
|
||||||
|
}
|
||||||
|
UnresolvableObjectException(String message, Serializable identifier, String clazz) {
|
||||||
|
super(message);
|
||||||
|
this.identifier = identifier;
|
||||||
|
this.entityName = clazz;
|
||||||
|
}
|
||||||
|
public Serializable getIdentifier() {
|
||||||
|
return identifier;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getMessage() {
|
||||||
|
return super.getMessage() + ": " +
|
||||||
|
MessageHelper.infoString(entityName, identifier);
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getEntityName() {
|
||||||
|
return entityName;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void throwIfNull(Object o, Serializable id, String clazz)
|
||||||
|
throws UnresolvableObjectException {
|
||||||
|
if (o==null) throw new UnresolvableObjectException(id, clazz);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,47 @@
|
||||||
|
//$Id: WrongClassException.java 5685 2005-02-12 07:19:50Z steveebersole $
|
||||||
|
package org.hibernate;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Thrown when <tt>Session.load()</tt> selects a row with
|
||||||
|
* the given primary key (identifier value) but the row's
|
||||||
|
* discriminator value specifies a subclass that is not
|
||||||
|
* assignable to the class requested by the user.
|
||||||
|
*
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
public class WrongClassException extends HibernateException {
|
||||||
|
|
||||||
|
private final Serializable identifier;
|
||||||
|
private final String entityName;
|
||||||
|
|
||||||
|
public WrongClassException(String msg, Serializable identifier, String clazz) {
|
||||||
|
super(msg);
|
||||||
|
this.identifier = identifier;
|
||||||
|
this.entityName = clazz;
|
||||||
|
}
|
||||||
|
public Serializable getIdentifier() {
|
||||||
|
return identifier;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getMessage() {
|
||||||
|
return "Object with id: " +
|
||||||
|
identifier +
|
||||||
|
" was not of the specified subclass: " +
|
||||||
|
entityName +
|
||||||
|
" (" + super.getMessage() + ")" ;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getEntityName() {
|
||||||
|
return entityName;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,146 @@
|
||||||
|
// $Id: BulkOperationCleanupAction.java 9897 2006-05-05 20:50:27Z max.andersen@jboss.com $
|
||||||
|
package org.hibernate.action;
|
||||||
|
|
||||||
|
import org.hibernate.HibernateException;
|
||||||
|
import org.hibernate.metadata.ClassMetadata;
|
||||||
|
import org.hibernate.persister.entity.EntityPersister;
|
||||||
|
import org.hibernate.persister.entity.Queryable;
|
||||||
|
import org.hibernate.engine.SessionFactoryImplementor;
|
||||||
|
import org.hibernate.engine.SessionImplementor;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Set;
|
||||||
|
import java.util.Iterator;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Implementation of BulkOperationCleanupAction.
|
||||||
|
*
|
||||||
|
* @author Steve Ebersole
|
||||||
|
*/
|
||||||
|
public class BulkOperationCleanupAction implements Executable, Serializable {
|
||||||
|
|
||||||
|
private final SessionImplementor session;
|
||||||
|
|
||||||
|
private final Set affectedEntityNames = new HashSet();
|
||||||
|
private final Set affectedCollectionRoles = new HashSet();
|
||||||
|
private final Serializable[] spaces;
|
||||||
|
|
||||||
|
public BulkOperationCleanupAction(SessionImplementor session, Queryable[] affectedQueryables) {
|
||||||
|
this.session = session;
|
||||||
|
// TODO : probably better to calculate these and pass them in, as it'll be more performant
|
||||||
|
ArrayList tmpSpaces = new ArrayList();
|
||||||
|
for ( int i = 0; i < affectedQueryables.length; i++ ) {
|
||||||
|
if ( affectedQueryables[i].hasCache() ) {
|
||||||
|
affectedEntityNames.add( affectedQueryables[i].getEntityName() );
|
||||||
|
}
|
||||||
|
Set roles = session.getFactory().getCollectionRolesByEntityParticipant( affectedQueryables[i].getEntityName() );
|
||||||
|
if ( roles != null ) {
|
||||||
|
affectedCollectionRoles.addAll( roles );
|
||||||
|
}
|
||||||
|
for ( int y = 0; y < affectedQueryables[i].getQuerySpaces().length; y++ ) {
|
||||||
|
tmpSpaces.add( affectedQueryables[i].getQuerySpaces()[y] );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.spaces = new Serializable[ tmpSpaces.size() ];
|
||||||
|
for ( int i = 0; i < tmpSpaces.size(); i++ ) {
|
||||||
|
this.spaces[i] = ( Serializable ) tmpSpaces.get( i );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Create an action that will evict collection and entity regions based on queryspaces (table names).
|
||||||
|
* TODO: cache the autodetected information and pass it in instead.
|
||||||
|
**/
|
||||||
|
public BulkOperationCleanupAction(SessionImplementor session, Set querySpaces) {
|
||||||
|
this.session = session;
|
||||||
|
|
||||||
|
Set tmpSpaces = new HashSet(querySpaces);
|
||||||
|
SessionFactoryImplementor factory = session.getFactory();
|
||||||
|
Iterator iterator = factory.getAllClassMetadata().entrySet().iterator();
|
||||||
|
while ( iterator.hasNext() ) {
|
||||||
|
Map.Entry entry = (Map.Entry) iterator.next();
|
||||||
|
String entityName = (String) entry.getKey();
|
||||||
|
EntityPersister persister = factory.getEntityPersister( entityName );
|
||||||
|
Serializable[] entitySpaces = persister.getQuerySpaces();
|
||||||
|
|
||||||
|
if (affectedEntity( querySpaces, entitySpaces )) {
|
||||||
|
if ( persister.hasCache() ) {
|
||||||
|
affectedEntityNames.add( persister.getEntityName() );
|
||||||
|
}
|
||||||
|
Set roles = session.getFactory().getCollectionRolesByEntityParticipant( persister.getEntityName() );
|
||||||
|
if ( roles != null ) {
|
||||||
|
affectedCollectionRoles.addAll( roles );
|
||||||
|
}
|
||||||
|
for ( int y = 0; y < entitySpaces.length; y++ ) {
|
||||||
|
tmpSpaces.add( entitySpaces[y] );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
this.spaces = (Serializable[]) tmpSpaces.toArray( new Serializable[tmpSpaces.size()] );
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/** returns true if no queryspaces or if there are a match */
|
||||||
|
private boolean affectedEntity(Set querySpaces, Serializable[] entitySpaces) {
|
||||||
|
if(querySpaces==null || querySpaces.isEmpty()) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
for ( int i = 0; i < entitySpaces.length; i++ ) {
|
||||||
|
if ( querySpaces.contains( entitySpaces[i] ) ) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void init() {
|
||||||
|
evictEntityRegions();
|
||||||
|
evictCollectionRegions();
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean hasAfterTransactionCompletion() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void afterTransactionCompletion(boolean success) throws HibernateException {
|
||||||
|
evictEntityRegions();
|
||||||
|
evictCollectionRegions();
|
||||||
|
}
|
||||||
|
|
||||||
|
public Serializable[] getPropertySpaces() {
|
||||||
|
return spaces;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void beforeExecutions() throws HibernateException {
|
||||||
|
// nothing to do
|
||||||
|
}
|
||||||
|
|
||||||
|
public void execute() throws HibernateException {
|
||||||
|
// nothing to do
|
||||||
|
}
|
||||||
|
|
||||||
|
private void evictEntityRegions() {
|
||||||
|
if ( affectedEntityNames != null ) {
|
||||||
|
Iterator itr = affectedEntityNames.iterator();
|
||||||
|
while ( itr.hasNext() ) {
|
||||||
|
final String entityName = ( String ) itr.next();
|
||||||
|
session.getFactory().evictEntity( entityName );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void evictCollectionRegions() {
|
||||||
|
if ( affectedCollectionRoles != null ) {
|
||||||
|
Iterator itr = affectedCollectionRoles.iterator();
|
||||||
|
while ( itr.hasNext() ) {
|
||||||
|
final String roleName = ( String ) itr.next();
|
||||||
|
session.getFactory().evictCollection( roleName );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,150 @@
|
||||||
|
//$Id: CollectionAction.java 11398 2007-04-10 14:54:07Z steve.ebersole@jboss.com $
|
||||||
|
package org.hibernate.action;
|
||||||
|
|
||||||
|
import org.hibernate.cache.access.SoftLock;
|
||||||
|
import org.hibernate.cache.CacheException;
|
||||||
|
import org.hibernate.cache.CacheKey;
|
||||||
|
import org.hibernate.collection.PersistentCollection;
|
||||||
|
import org.hibernate.engine.SessionImplementor;
|
||||||
|
import org.hibernate.persister.collection.CollectionPersister;
|
||||||
|
import org.hibernate.pretty.MessageHelper;
|
||||||
|
import org.hibernate.util.StringHelper;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.ObjectInputStream;
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Any action relating to insert/update/delete of a collection
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
public abstract class CollectionAction implements Executable, Serializable, Comparable {
|
||||||
|
|
||||||
|
private transient CollectionPersister persister;
|
||||||
|
private final Serializable key;
|
||||||
|
private Serializable finalKey;
|
||||||
|
private final SessionImplementor session;
|
||||||
|
private SoftLock lock;
|
||||||
|
private final String collectionRole;
|
||||||
|
private final PersistentCollection collection;
|
||||||
|
|
||||||
|
public CollectionAction(
|
||||||
|
final CollectionPersister persister,
|
||||||
|
final PersistentCollection collection,
|
||||||
|
final Serializable key,
|
||||||
|
final SessionImplementor session) throws CacheException {
|
||||||
|
this.persister = persister;
|
||||||
|
this.session = session;
|
||||||
|
this.key = key;
|
||||||
|
this.collectionRole = persister.getRole();
|
||||||
|
this.collection = collection;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected PersistentCollection getCollection() {
|
||||||
|
return collection;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void readObject(ObjectInputStream ois) throws IOException, ClassNotFoundException {
|
||||||
|
ois.defaultReadObject();
|
||||||
|
persister = session.getFactory().getCollectionPersister( collectionRole );
|
||||||
|
}
|
||||||
|
|
||||||
|
public void afterTransactionCompletion(boolean success) throws CacheException {
|
||||||
|
if ( persister.hasCache() ) {
|
||||||
|
final CacheKey ck = new CacheKey(
|
||||||
|
key,
|
||||||
|
persister.getKeyType(),
|
||||||
|
persister.getRole(),
|
||||||
|
session.getEntityMode(),
|
||||||
|
session.getFactory()
|
||||||
|
);
|
||||||
|
persister.getCacheAccessStrategy().unlockItem( ck, lock );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean hasAfterTransactionCompletion() {
|
||||||
|
return persister.hasCache();
|
||||||
|
}
|
||||||
|
|
||||||
|
public Serializable[] getPropertySpaces() {
|
||||||
|
return persister.getCollectionSpaces();
|
||||||
|
}
|
||||||
|
|
||||||
|
protected final CollectionPersister getPersister() {
|
||||||
|
return persister;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected final Serializable getKey() {
|
||||||
|
finalKey = key;
|
||||||
|
if ( key instanceof DelayedPostInsertIdentifier ) {
|
||||||
|
// need to look it up from the persistence-context
|
||||||
|
finalKey = session.getPersistenceContext().getEntry( collection.getOwner() ).getId();
|
||||||
|
if ( finalKey == key ) {
|
||||||
|
// we may be screwed here since the collection action is about to execute
|
||||||
|
// and we do not know the final owner key value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return finalKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected final SessionImplementor getSession() {
|
||||||
|
return session;
|
||||||
|
}
|
||||||
|
|
||||||
|
public final void beforeExecutions() throws CacheException {
|
||||||
|
// we need to obtain the lock before any actions are
|
||||||
|
// executed, since this may be an inverse="true"
|
||||||
|
// bidirectional association and it is one of the
|
||||||
|
// earlier entity actions which actually updates
|
||||||
|
// the database (this action is resposible for
|
||||||
|
// second-level cache invalidation only)
|
||||||
|
if ( persister.hasCache() ) {
|
||||||
|
final CacheKey ck = new CacheKey(
|
||||||
|
key,
|
||||||
|
persister.getKeyType(),
|
||||||
|
persister.getRole(),
|
||||||
|
session.getEntityMode(),
|
||||||
|
session.getFactory()
|
||||||
|
);
|
||||||
|
lock = persister.getCacheAccessStrategy().lockItem( ck, null );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
protected final void evict() throws CacheException {
|
||||||
|
if ( persister.hasCache() ) {
|
||||||
|
CacheKey ck = new CacheKey(
|
||||||
|
key,
|
||||||
|
persister.getKeyType(),
|
||||||
|
persister.getRole(),
|
||||||
|
session.getEntityMode(),
|
||||||
|
session.getFactory()
|
||||||
|
);
|
||||||
|
persister.getCacheAccessStrategy().remove( ck );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public String toString() {
|
||||||
|
return StringHelper.unqualify( getClass().getName() ) +
|
||||||
|
MessageHelper.infoString( collectionRole, key );
|
||||||
|
}
|
||||||
|
|
||||||
|
public int compareTo(Object other) {
|
||||||
|
CollectionAction action = ( CollectionAction ) other;
|
||||||
|
//sort first by role name
|
||||||
|
int roleComparison = collectionRole.compareTo( action.collectionRole );
|
||||||
|
if ( roleComparison != 0 ) {
|
||||||
|
return roleComparison;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
//then by fk
|
||||||
|
return persister.getKeyType()
|
||||||
|
.compare( key, action.key, session.getEntityMode() );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,47 @@
|
||||||
|
//$Id: CollectionRecreateAction.java 7147 2005-06-15 13:20:13Z oneovthafew $
|
||||||
|
package org.hibernate.action;
|
||||||
|
|
||||||
|
import org.hibernate.HibernateException;
|
||||||
|
import org.hibernate.cache.CacheException;
|
||||||
|
import org.hibernate.collection.PersistentCollection;
|
||||||
|
import org.hibernate.engine.SessionImplementor;
|
||||||
|
import org.hibernate.persister.collection.CollectionPersister;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
public final class CollectionRecreateAction extends CollectionAction {
|
||||||
|
|
||||||
|
public CollectionRecreateAction(
|
||||||
|
final PersistentCollection collection,
|
||||||
|
final CollectionPersister persister,
|
||||||
|
final Serializable id,
|
||||||
|
final SessionImplementor session)
|
||||||
|
throws CacheException {
|
||||||
|
super( persister, collection, id, session );
|
||||||
|
}
|
||||||
|
|
||||||
|
public void execute() throws HibernateException {
|
||||||
|
final PersistentCollection collection = getCollection();
|
||||||
|
|
||||||
|
getPersister().recreate( collection, getKey(), getSession() );
|
||||||
|
|
||||||
|
getSession().getPersistenceContext()
|
||||||
|
.getCollectionEntry(collection)
|
||||||
|
.afterAction(collection);
|
||||||
|
|
||||||
|
evict();
|
||||||
|
|
||||||
|
if ( getSession().getFactory().getStatistics().isStatisticsEnabled() ) {
|
||||||
|
getSession().getFactory().getStatisticsImplementor()
|
||||||
|
.recreateCollection( getPersister().getRole() );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,53 @@
|
||||||
|
//$Id: CollectionRemoveAction.java 7147 2005-06-15 13:20:13Z oneovthafew $
|
||||||
|
package org.hibernate.action;
|
||||||
|
|
||||||
|
import org.hibernate.HibernateException;
|
||||||
|
import org.hibernate.cache.CacheException;
|
||||||
|
import org.hibernate.collection.PersistentCollection;
|
||||||
|
import org.hibernate.engine.SessionImplementor;
|
||||||
|
import org.hibernate.persister.collection.CollectionPersister;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
public final class CollectionRemoveAction extends CollectionAction {
|
||||||
|
|
||||||
|
private boolean emptySnapshot;
|
||||||
|
|
||||||
|
public CollectionRemoveAction(
|
||||||
|
final PersistentCollection collection,
|
||||||
|
final CollectionPersister persister,
|
||||||
|
final Serializable id,
|
||||||
|
final boolean emptySnapshot,
|
||||||
|
final SessionImplementor session)
|
||||||
|
throws CacheException {
|
||||||
|
super( persister, collection, id, session );
|
||||||
|
this.emptySnapshot = emptySnapshot;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void execute() throws HibernateException {
|
||||||
|
if ( !emptySnapshot ) getPersister().remove( getKey(), getSession() );
|
||||||
|
|
||||||
|
final PersistentCollection collection = getCollection();
|
||||||
|
if (collection!=null) {
|
||||||
|
getSession().getPersistenceContext()
|
||||||
|
.getCollectionEntry(collection)
|
||||||
|
.afterAction(collection);
|
||||||
|
}
|
||||||
|
|
||||||
|
evict();
|
||||||
|
|
||||||
|
if ( getSession().getFactory().getStatistics().isStatisticsEnabled() ) {
|
||||||
|
getSession().getFactory().getStatisticsImplementor()
|
||||||
|
.removeCollection( getPersister().getRole() );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,78 @@
|
||||||
|
//$Id: CollectionUpdateAction.java 7631 2005-07-24 21:26:21Z oneovthafew $
|
||||||
|
package org.hibernate.action;
|
||||||
|
|
||||||
|
import org.hibernate.AssertionFailure;
|
||||||
|
import org.hibernate.HibernateException;
|
||||||
|
import org.hibernate.cache.CacheException;
|
||||||
|
import org.hibernate.collection.PersistentCollection;
|
||||||
|
import org.hibernate.engine.SessionImplementor;
|
||||||
|
import org.hibernate.persister.collection.CollectionPersister;
|
||||||
|
import org.hibernate.pretty.MessageHelper;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
public final class CollectionUpdateAction extends CollectionAction {
|
||||||
|
|
||||||
|
private final boolean emptySnapshot;
|
||||||
|
|
||||||
|
public CollectionUpdateAction(
|
||||||
|
final PersistentCollection collection,
|
||||||
|
final CollectionPersister persister,
|
||||||
|
final Serializable id,
|
||||||
|
final boolean emptySnapshot,
|
||||||
|
final SessionImplementor session)
|
||||||
|
throws CacheException {
|
||||||
|
super( persister, collection, id, session );
|
||||||
|
this.emptySnapshot = emptySnapshot;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void execute() throws HibernateException {
|
||||||
|
final Serializable id = getKey();
|
||||||
|
final SessionImplementor session = getSession();
|
||||||
|
final CollectionPersister persister = getPersister();
|
||||||
|
final PersistentCollection collection = getCollection();
|
||||||
|
boolean affectedByFilters = persister.isAffectedByEnabledFilters(session);
|
||||||
|
|
||||||
|
if ( !collection.wasInitialized() ) {
|
||||||
|
if ( !collection.hasQueuedOperations() ) throw new AssertionFailure( "no queued adds" );
|
||||||
|
//do nothing - we only need to notify the cache...
|
||||||
|
}
|
||||||
|
else if ( !affectedByFilters && collection.empty() ) {
|
||||||
|
if ( !emptySnapshot ) persister.remove( id, session );
|
||||||
|
}
|
||||||
|
else if ( collection.needsRecreate(persister) ) {
|
||||||
|
if (affectedByFilters) {
|
||||||
|
throw new HibernateException(
|
||||||
|
"cannot recreate collection while filter is enabled: " +
|
||||||
|
MessageHelper.collectionInfoString( persister, id, persister.getFactory() )
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if ( !emptySnapshot ) persister.remove( id, session );
|
||||||
|
persister.recreate( collection, id, session );
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
persister.deleteRows( collection, id, session );
|
||||||
|
persister.updateRows( collection, id, session );
|
||||||
|
persister.insertRows( collection, id, session );
|
||||||
|
}
|
||||||
|
|
||||||
|
getSession().getPersistenceContext()
|
||||||
|
.getCollectionEntry(collection)
|
||||||
|
.afterAction(collection);
|
||||||
|
|
||||||
|
evict();
|
||||||
|
|
||||||
|
if ( getSession().getFactory().getStatistics().isStatisticsEnabled() ) {
|
||||||
|
getSession().getFactory().getStatisticsImplementor().
|
||||||
|
updateCollection( getPersister().getRole() );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,49 @@
|
||||||
|
package org.hibernate.action;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Acts as a stand-in for an entity identifier which is supposed to be
|
||||||
|
* generated on insert (like an IDENTITY column) where the insert needed to
|
||||||
|
* be delayed because we were outside a transaction when the persist
|
||||||
|
* occurred (save currently still performs the insert).
|
||||||
|
* <p/>
|
||||||
|
* The stand-in is only used within the {@link org.hibernate.engine.PersistenceContext}
|
||||||
|
* in order to distinguish one instance from another; it is never injected into
|
||||||
|
* the entity instance or returned to the client...
|
||||||
|
*
|
||||||
|
* @author Steve Ebersole
|
||||||
|
*/
|
||||||
|
public class DelayedPostInsertIdentifier implements Serializable {
|
||||||
|
private static long SEQUENCE = 0;
|
||||||
|
private final long sequence;
|
||||||
|
|
||||||
|
public DelayedPostInsertIdentifier() {
|
||||||
|
synchronized( DelayedPostInsertIdentifier.class ) {
|
||||||
|
if ( SEQUENCE == Long.MAX_VALUE ) {
|
||||||
|
SEQUENCE = 0;
|
||||||
|
}
|
||||||
|
this.sequence = SEQUENCE++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean equals(Object o) {
|
||||||
|
if ( this == o ) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if ( o == null || getClass() != o.getClass() ) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
final DelayedPostInsertIdentifier that = ( DelayedPostInsertIdentifier ) o;
|
||||||
|
return sequence == that.sequence;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int hashCode() {
|
||||||
|
return ( int ) ( sequence ^ ( sequence >>> 32 ) );
|
||||||
|
}
|
||||||
|
|
||||||
|
public String toString() {
|
||||||
|
return "<delayed:" + sequence + ">";
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,136 @@
|
||||||
|
//$Id: EntityAction.java 11402 2007-04-11 14:24:35Z steve.ebersole@jboss.com $
|
||||||
|
package org.hibernate.action;
|
||||||
|
|
||||||
|
import org.hibernate.AssertionFailure;
|
||||||
|
import org.hibernate.engine.SessionImplementor;
|
||||||
|
import org.hibernate.persister.entity.EntityPersister;
|
||||||
|
import org.hibernate.pretty.MessageHelper;
|
||||||
|
import org.hibernate.util.StringHelper;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.ObjectInputStream;
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Base class for actions relating to insert/update/delete of an entity
|
||||||
|
* instance.
|
||||||
|
*
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
public abstract class EntityAction implements Executable, Serializable, Comparable {
|
||||||
|
|
||||||
|
private final String entityName;
|
||||||
|
private final Serializable id;
|
||||||
|
private final Object instance;
|
||||||
|
private final SessionImplementor session;
|
||||||
|
|
||||||
|
private transient EntityPersister persister;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Instantiate an action.
|
||||||
|
*
|
||||||
|
* @param session The session from which this action is coming.
|
||||||
|
* @param id The id of the entity
|
||||||
|
* @param instance The entiyt instance
|
||||||
|
* @param persister The entity persister
|
||||||
|
*/
|
||||||
|
protected EntityAction(SessionImplementor session, Serializable id, Object instance, EntityPersister persister) {
|
||||||
|
this.entityName = persister.getEntityName();
|
||||||
|
this.id = id;
|
||||||
|
this.instance = instance;
|
||||||
|
this.session = session;
|
||||||
|
this.persister = persister;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected abstract boolean hasPostCommitEventListeners();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* entity name accessor
|
||||||
|
*
|
||||||
|
* @return The entity name
|
||||||
|
*/
|
||||||
|
public String getEntityName() {
|
||||||
|
return entityName;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* entity id accessor
|
||||||
|
*
|
||||||
|
* @return The entity id
|
||||||
|
*/
|
||||||
|
public final Serializable getId() {
|
||||||
|
if ( id instanceof DelayedPostInsertIdentifier ) {
|
||||||
|
return session.getPersistenceContext().getEntry( instance ).getId();
|
||||||
|
}
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* entity instance accessor
|
||||||
|
*
|
||||||
|
* @return The entity instance
|
||||||
|
*/
|
||||||
|
public final Object getInstance() {
|
||||||
|
return instance;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* originating session accessor
|
||||||
|
*
|
||||||
|
* @return The session from which this action originated.
|
||||||
|
*/
|
||||||
|
public final SessionImplementor getSession() {
|
||||||
|
return session;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* entity persister accessor
|
||||||
|
*
|
||||||
|
* @return The entity persister
|
||||||
|
*/
|
||||||
|
public final EntityPersister getPersister() {
|
||||||
|
return persister;
|
||||||
|
}
|
||||||
|
|
||||||
|
public final Serializable[] getPropertySpaces() {
|
||||||
|
return persister.getPropertySpaces();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void beforeExecutions() {
|
||||||
|
throw new AssertionFailure( "beforeExecutions() called for non-collection action" );
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean hasAfterTransactionCompletion() {
|
||||||
|
return persister.hasCache() || hasPostCommitEventListeners();
|
||||||
|
}
|
||||||
|
|
||||||
|
public String toString() {
|
||||||
|
return StringHelper.unqualify( getClass().getName() ) + MessageHelper.infoString( entityName, id );
|
||||||
|
}
|
||||||
|
|
||||||
|
public int compareTo(Object other) {
|
||||||
|
EntityAction action = ( EntityAction ) other;
|
||||||
|
//sort first by entity name
|
||||||
|
int roleComparison = entityName.compareTo( action.entityName );
|
||||||
|
if ( roleComparison != 0 ) {
|
||||||
|
return roleComparison;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
//then by id
|
||||||
|
return persister.getIdentifierType().compare( id, action.id, session.getEntityMode() );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Serialization...
|
||||||
|
*
|
||||||
|
* @param ois Thed object stream
|
||||||
|
* @throws IOException Problem performing the default stream reading
|
||||||
|
* @throws ClassNotFoundException Problem performing the default stream reading
|
||||||
|
*/
|
||||||
|
private void readObject(ObjectInputStream ois) throws IOException, ClassNotFoundException {
|
||||||
|
ois.defaultReadObject();
|
||||||
|
persister = session.getFactory().getEntityPersister( entityName );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -0,0 +1,174 @@
|
||||||
|
//$Id: EntityDeleteAction.java 11398 2007-04-10 14:54:07Z steve.ebersole@jboss.com $
|
||||||
|
package org.hibernate.action;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
import org.hibernate.AssertionFailure;
|
||||||
|
import org.hibernate.HibernateException;
|
||||||
|
import org.hibernate.cache.CacheKey;
|
||||||
|
import org.hibernate.cache.access.SoftLock;
|
||||||
|
import org.hibernate.engine.EntityEntry;
|
||||||
|
import org.hibernate.engine.EntityKey;
|
||||||
|
import org.hibernate.engine.PersistenceContext;
|
||||||
|
import org.hibernate.engine.SessionImplementor;
|
||||||
|
import org.hibernate.event.PostDeleteEvent;
|
||||||
|
import org.hibernate.event.PostDeleteEventListener;
|
||||||
|
import org.hibernate.event.PreDeleteEvent;
|
||||||
|
import org.hibernate.event.PreDeleteEventListener;
|
||||||
|
import org.hibernate.event.EventSource;
|
||||||
|
import org.hibernate.persister.entity.EntityPersister;
|
||||||
|
|
||||||
|
public final class EntityDeleteAction extends EntityAction {
|
||||||
|
|
||||||
|
private final Object version;
|
||||||
|
private SoftLock lock;
|
||||||
|
private final boolean isCascadeDeleteEnabled;
|
||||||
|
private final Object[] state;
|
||||||
|
|
||||||
|
public EntityDeleteAction(
|
||||||
|
final Serializable id,
|
||||||
|
final Object[] state,
|
||||||
|
final Object version,
|
||||||
|
final Object instance,
|
||||||
|
final EntityPersister persister,
|
||||||
|
final boolean isCascadeDeleteEnabled,
|
||||||
|
final SessionImplementor session) {
|
||||||
|
super( session, id, instance, persister );
|
||||||
|
this.version = version;
|
||||||
|
this.isCascadeDeleteEnabled = isCascadeDeleteEnabled;
|
||||||
|
this.state = state;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void execute() throws HibernateException {
|
||||||
|
Serializable id = getId();
|
||||||
|
EntityPersister persister = getPersister();
|
||||||
|
SessionImplementor session = getSession();
|
||||||
|
Object instance = getInstance();
|
||||||
|
|
||||||
|
boolean veto = preDelete();
|
||||||
|
|
||||||
|
Object version = this.version;
|
||||||
|
if ( persister.isVersionPropertyGenerated() ) {
|
||||||
|
// we need to grab the version value from the entity, otherwise
|
||||||
|
// we have issues with generated-version entities that may have
|
||||||
|
// multiple actions queued during the same flush
|
||||||
|
version = persister.getVersion( instance, session.getEntityMode() );
|
||||||
|
}
|
||||||
|
|
||||||
|
final CacheKey ck;
|
||||||
|
if ( persister.hasCache() ) {
|
||||||
|
ck = new CacheKey(
|
||||||
|
id,
|
||||||
|
persister.getIdentifierType(),
|
||||||
|
persister.getRootEntityName(),
|
||||||
|
session.getEntityMode(),
|
||||||
|
session.getFactory()
|
||||||
|
);
|
||||||
|
lock = persister.getCacheAccessStrategy().lockItem( ck, version );
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
ck = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( !isCascadeDeleteEnabled && !veto ) {
|
||||||
|
persister.delete( id, version, instance, session );
|
||||||
|
}
|
||||||
|
|
||||||
|
//postDelete:
|
||||||
|
// After actually deleting a row, record the fact that the instance no longer
|
||||||
|
// exists on the database (needed for identity-column key generation), and
|
||||||
|
// remove it from the session cache
|
||||||
|
final PersistenceContext persistenceContext = session.getPersistenceContext();
|
||||||
|
EntityEntry entry = persistenceContext.removeEntry( instance );
|
||||||
|
if ( entry == null ) {
|
||||||
|
throw new AssertionFailure( "possible nonthreadsafe access to session" );
|
||||||
|
}
|
||||||
|
entry.postDelete();
|
||||||
|
|
||||||
|
EntityKey key = new EntityKey( entry.getId(), entry.getPersister(), session.getEntityMode() );
|
||||||
|
persistenceContext.removeEntity(key);
|
||||||
|
persistenceContext.removeProxy(key);
|
||||||
|
|
||||||
|
if ( persister.hasCache() ) persister.getCacheAccessStrategy().remove( ck );
|
||||||
|
|
||||||
|
postDelete();
|
||||||
|
|
||||||
|
if ( getSession().getFactory().getStatistics().isStatisticsEnabled() && !veto ) {
|
||||||
|
getSession().getFactory().getStatisticsImplementor()
|
||||||
|
.deleteEntity( getPersister().getEntityName() );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean preDelete() {
|
||||||
|
PreDeleteEventListener[] preListeners = getSession().getListeners()
|
||||||
|
.getPreDeleteEventListeners();
|
||||||
|
boolean veto = false;
|
||||||
|
if (preListeners.length>0) {
|
||||||
|
PreDeleteEvent preEvent = new PreDeleteEvent( getInstance(), getId(), state, getPersister() );
|
||||||
|
for ( int i = 0; i < preListeners.length; i++ ) {
|
||||||
|
veto = preListeners[i].onPreDelete(preEvent) || veto;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return veto;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void postDelete() {
|
||||||
|
PostDeleteEventListener[] postListeners = getSession().getListeners()
|
||||||
|
.getPostDeleteEventListeners();
|
||||||
|
if (postListeners.length>0) {
|
||||||
|
PostDeleteEvent postEvent = new PostDeleteEvent(
|
||||||
|
getInstance(),
|
||||||
|
getId(),
|
||||||
|
state,
|
||||||
|
getPersister(),
|
||||||
|
(EventSource) getSession()
|
||||||
|
);
|
||||||
|
for ( int i = 0; i < postListeners.length; i++ ) {
|
||||||
|
postListeners[i].onPostDelete(postEvent);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void postCommitDelete() {
|
||||||
|
PostDeleteEventListener[] postListeners = getSession().getListeners()
|
||||||
|
.getPostCommitDeleteEventListeners();
|
||||||
|
if (postListeners.length>0) {
|
||||||
|
PostDeleteEvent postEvent = new PostDeleteEvent(
|
||||||
|
getInstance(),
|
||||||
|
getId(),
|
||||||
|
state,
|
||||||
|
getPersister(),
|
||||||
|
(EventSource) getSession()
|
||||||
|
);
|
||||||
|
for ( int i = 0; i < postListeners.length; i++ ) {
|
||||||
|
postListeners[i].onPostDelete(postEvent);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void afterTransactionCompletion(boolean success) throws HibernateException {
|
||||||
|
if ( getPersister().hasCache() ) {
|
||||||
|
final CacheKey ck = new CacheKey(
|
||||||
|
getId(),
|
||||||
|
getPersister().getIdentifierType(),
|
||||||
|
getPersister().getRootEntityName(),
|
||||||
|
getSession().getEntityMode(),
|
||||||
|
getSession().getFactory()
|
||||||
|
);
|
||||||
|
getPersister().getCacheAccessStrategy().unlockItem( ck, lock );
|
||||||
|
}
|
||||||
|
postCommitDelete();
|
||||||
|
}
|
||||||
|
|
||||||
|
protected boolean hasPostCommitEventListeners() {
|
||||||
|
return getSession().getListeners().getPostCommitDeleteEventListeners().length>0;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,159 @@
|
||||||
|
//$Id: EntityIdentityInsertAction.java 10680 2006-11-01 22:53:30Z epbernard $
|
||||||
|
package org.hibernate.action;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
import org.hibernate.HibernateException;
|
||||||
|
import org.hibernate.AssertionFailure;
|
||||||
|
import org.hibernate.engine.SessionImplementor;
|
||||||
|
import org.hibernate.engine.EntityKey;
|
||||||
|
import org.hibernate.event.PostInsertEvent;
|
||||||
|
import org.hibernate.event.PostInsertEventListener;
|
||||||
|
import org.hibernate.event.PreInsertEvent;
|
||||||
|
import org.hibernate.event.PreInsertEventListener;
|
||||||
|
import org.hibernate.event.EventSource;
|
||||||
|
import org.hibernate.persister.entity.EntityPersister;
|
||||||
|
|
||||||
|
public final class EntityIdentityInsertAction extends EntityAction {
|
||||||
|
private final Object[] state;
|
||||||
|
private final boolean isDelayed;
|
||||||
|
private final EntityKey delayedEntityKey;
|
||||||
|
//private CacheEntry cacheEntry;
|
||||||
|
private Serializable generatedId;
|
||||||
|
|
||||||
|
public EntityIdentityInsertAction(
|
||||||
|
Object[] state,
|
||||||
|
Object instance,
|
||||||
|
EntityPersister persister,
|
||||||
|
SessionImplementor session,
|
||||||
|
boolean isDelayed) throws HibernateException {
|
||||||
|
super( session, null, instance, persister );
|
||||||
|
this.state = state;
|
||||||
|
this.isDelayed = isDelayed;
|
||||||
|
delayedEntityKey = isDelayed ? generateDelayedEntityKey() : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void execute() throws HibernateException {
|
||||||
|
|
||||||
|
final EntityPersister persister = getPersister();
|
||||||
|
final SessionImplementor session = getSession();
|
||||||
|
final Object instance = getInstance();
|
||||||
|
|
||||||
|
boolean veto = preInsert();
|
||||||
|
|
||||||
|
// Don't need to lock the cache here, since if someone
|
||||||
|
// else inserted the same pk first, the insert would fail
|
||||||
|
|
||||||
|
if ( !veto ) {
|
||||||
|
generatedId = persister.insert( state, instance, session );
|
||||||
|
if ( persister.hasInsertGeneratedProperties() ) {
|
||||||
|
persister.processInsertGeneratedProperties( generatedId, instance, state, session );
|
||||||
|
}
|
||||||
|
//need to do that here rather than in the save event listener to let
|
||||||
|
//the post insert events to have a id-filled entity when IDENTITY is used (EJB3)
|
||||||
|
persister.setIdentifier( instance, generatedId, session.getEntityMode() );
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
//TODO: this bit actually has to be called after all cascades!
|
||||||
|
// but since identity insert is called *synchronously*,
|
||||||
|
// instead of asynchronously as other actions, it isn't
|
||||||
|
/*if ( persister.hasCache() && !persister.isCacheInvalidationRequired() ) {
|
||||||
|
cacheEntry = new CacheEntry(object, persister, session);
|
||||||
|
persister.getCache().insert(generatedId, cacheEntry);
|
||||||
|
}*/
|
||||||
|
|
||||||
|
postInsert();
|
||||||
|
|
||||||
|
if ( session.getFactory().getStatistics().isStatisticsEnabled() && !veto ) {
|
||||||
|
session.getFactory().getStatisticsImplementor()
|
||||||
|
.insertEntity( getPersister().getEntityName() );
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private void postInsert() {
|
||||||
|
if ( isDelayed ) {
|
||||||
|
getSession().getPersistenceContext().replaceDelayedEntityIdentityInsertKeys( delayedEntityKey, generatedId );
|
||||||
|
}
|
||||||
|
PostInsertEventListener[] postListeners = getSession().getListeners()
|
||||||
|
.getPostInsertEventListeners();
|
||||||
|
if (postListeners.length>0) {
|
||||||
|
PostInsertEvent postEvent = new PostInsertEvent(
|
||||||
|
getInstance(),
|
||||||
|
generatedId,
|
||||||
|
state,
|
||||||
|
getPersister(),
|
||||||
|
(EventSource) getSession()
|
||||||
|
);
|
||||||
|
for ( int i = 0; i < postListeners.length; i++ ) {
|
||||||
|
postListeners[i].onPostInsert(postEvent);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void postCommitInsert() {
|
||||||
|
PostInsertEventListener[] postListeners = getSession().getListeners()
|
||||||
|
.getPostCommitInsertEventListeners();
|
||||||
|
if (postListeners.length>0) {
|
||||||
|
PostInsertEvent postEvent = new PostInsertEvent(
|
||||||
|
getInstance(),
|
||||||
|
generatedId,
|
||||||
|
state,
|
||||||
|
getPersister(),
|
||||||
|
(EventSource) getSession()
|
||||||
|
);
|
||||||
|
for ( int i = 0; i < postListeners.length; i++ ) {
|
||||||
|
postListeners[i].onPostInsert(postEvent);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean preInsert() {
|
||||||
|
PreInsertEventListener[] preListeners = getSession().getListeners()
|
||||||
|
.getPreInsertEventListeners();
|
||||||
|
boolean veto = false;
|
||||||
|
if (preListeners.length>0) {
|
||||||
|
PreInsertEvent preEvent = new PreInsertEvent( getInstance(), null, state, getPersister(), getSession() );
|
||||||
|
for ( int i = 0; i < preListeners.length; i++ ) {
|
||||||
|
veto = preListeners[i].onPreInsert(preEvent) || veto;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return veto;
|
||||||
|
}
|
||||||
|
|
||||||
|
//Make 100% certain that this is called before any subsequent ScheduledUpdate.afterTransactionCompletion()!!
|
||||||
|
public void afterTransactionCompletion(boolean success) throws HibernateException {
|
||||||
|
//TODO: reenable if we also fix the above todo
|
||||||
|
/*EntityPersister persister = getEntityPersister();
|
||||||
|
if ( success && persister.hasCache() && !persister.isCacheInvalidationRequired() ) {
|
||||||
|
persister.getCache().afterInsert( getGeneratedId(), cacheEntry );
|
||||||
|
}*/
|
||||||
|
postCommitInsert();
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean hasAfterTransactionCompletion() {
|
||||||
|
//TODO: simply remove this override
|
||||||
|
// if we fix the above todos
|
||||||
|
return hasPostCommitEventListeners();
|
||||||
|
}
|
||||||
|
|
||||||
|
protected boolean hasPostCommitEventListeners() {
|
||||||
|
return getSession().getListeners().getPostCommitInsertEventListeners().length>0;
|
||||||
|
}
|
||||||
|
|
||||||
|
public final Serializable getGeneratedId() {
|
||||||
|
return generatedId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public EntityKey getDelayedEntityKey() {
|
||||||
|
return delayedEntityKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
private synchronized EntityKey generateDelayedEntityKey() {
|
||||||
|
if ( !isDelayed ) {
|
||||||
|
throw new AssertionFailure( "cannot request delayed entity-key for non-delayed post-insert-id generation" );
|
||||||
|
}
|
||||||
|
return new EntityKey( new DelayedPostInsertIdentifier(), getPersister(), getSession().getEntityMode() );
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,199 @@
|
||||||
|
//$Id: EntityInsertAction.java 11402 2007-04-11 14:24:35Z steve.ebersole@jboss.com $
|
||||||
|
package org.hibernate.action;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
import org.hibernate.AssertionFailure;
|
||||||
|
import org.hibernate.HibernateException;
|
||||||
|
import org.hibernate.cache.CacheKey;
|
||||||
|
import org.hibernate.cache.entry.CacheEntry;
|
||||||
|
import org.hibernate.engine.EntityEntry;
|
||||||
|
import org.hibernate.engine.SessionFactoryImplementor;
|
||||||
|
import org.hibernate.engine.SessionImplementor;
|
||||||
|
import org.hibernate.engine.Versioning;
|
||||||
|
import org.hibernate.event.PostInsertEvent;
|
||||||
|
import org.hibernate.event.PostInsertEventListener;
|
||||||
|
import org.hibernate.event.PreInsertEvent;
|
||||||
|
import org.hibernate.event.PreInsertEventListener;
|
||||||
|
import org.hibernate.event.EventSource;
|
||||||
|
import org.hibernate.persister.entity.EntityPersister;
|
||||||
|
|
||||||
|
public final class EntityInsertAction extends EntityAction {
|
||||||
|
|
||||||
|
private Object[] state;
|
||||||
|
private Object version;
|
||||||
|
private Object cacheEntry;
|
||||||
|
|
||||||
|
public EntityInsertAction(
|
||||||
|
Serializable id,
|
||||||
|
Object[] state,
|
||||||
|
Object instance,
|
||||||
|
Object version,
|
||||||
|
EntityPersister persister,
|
||||||
|
SessionImplementor session) throws HibernateException {
|
||||||
|
super( session, id, instance, persister );
|
||||||
|
this.state = state;
|
||||||
|
this.version = version;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Object[] getState() {
|
||||||
|
return state;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void execute() throws HibernateException {
|
||||||
|
EntityPersister persister = getPersister();
|
||||||
|
SessionImplementor session = getSession();
|
||||||
|
Object instance = getInstance();
|
||||||
|
Serializable id = getId();
|
||||||
|
|
||||||
|
boolean veto = preInsert();
|
||||||
|
|
||||||
|
// Don't need to lock the cache here, since if someone
|
||||||
|
// else inserted the same pk first, the insert would fail
|
||||||
|
|
||||||
|
if ( !veto ) {
|
||||||
|
|
||||||
|
persister.insert( id, state, instance, session );
|
||||||
|
|
||||||
|
EntityEntry entry = session.getPersistenceContext().getEntry( instance );
|
||||||
|
if ( entry == null ) {
|
||||||
|
throw new AssertionFailure( "possible nonthreadsafe access to session" );
|
||||||
|
}
|
||||||
|
|
||||||
|
entry.postInsert();
|
||||||
|
|
||||||
|
if ( persister.hasInsertGeneratedProperties() ) {
|
||||||
|
persister.processInsertGeneratedProperties( id, instance, state, session );
|
||||||
|
if ( persister.isVersionPropertyGenerated() ) {
|
||||||
|
version = Versioning.getVersion(state, persister);
|
||||||
|
}
|
||||||
|
entry.postUpdate(instance, state, version);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
final SessionFactoryImplementor factory = getSession().getFactory();
|
||||||
|
|
||||||
|
if ( isCachePutEnabled( persister, session ) ) {
|
||||||
|
|
||||||
|
CacheEntry ce = new CacheEntry(
|
||||||
|
state,
|
||||||
|
persister,
|
||||||
|
persister.hasUninitializedLazyProperties( instance, session.getEntityMode() ),
|
||||||
|
version,
|
||||||
|
session,
|
||||||
|
instance
|
||||||
|
);
|
||||||
|
|
||||||
|
cacheEntry = persister.getCacheEntryStructure().structure(ce);
|
||||||
|
final CacheKey ck = new CacheKey(
|
||||||
|
id,
|
||||||
|
persister.getIdentifierType(),
|
||||||
|
persister.getRootEntityName(),
|
||||||
|
session.getEntityMode(),
|
||||||
|
session.getFactory()
|
||||||
|
);
|
||||||
|
// boolean put = persister.getCache().insert(ck, cacheEntry);
|
||||||
|
boolean put = persister.getCacheAccessStrategy().insert( ck, cacheEntry, version );
|
||||||
|
|
||||||
|
if ( put && factory.getStatistics().isStatisticsEnabled() ) {
|
||||||
|
factory.getStatisticsImplementor().secondLevelCachePut( getPersister().getCacheAccessStrategy().getRegion().getName() );
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
postInsert();
|
||||||
|
|
||||||
|
if ( factory.getStatistics().isStatisticsEnabled() && !veto ) {
|
||||||
|
factory.getStatisticsImplementor()
|
||||||
|
.insertEntity( getPersister().getEntityName() );
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private void postInsert() {
|
||||||
|
PostInsertEventListener[] postListeners = getSession().getListeners()
|
||||||
|
.getPostInsertEventListeners();
|
||||||
|
if ( postListeners.length > 0 ) {
|
||||||
|
PostInsertEvent postEvent = new PostInsertEvent(
|
||||||
|
getInstance(),
|
||||||
|
getId(),
|
||||||
|
state,
|
||||||
|
getPersister(),
|
||||||
|
(EventSource) getSession()
|
||||||
|
);
|
||||||
|
for ( int i = 0; i < postListeners.length; i++ ) {
|
||||||
|
postListeners[i].onPostInsert(postEvent);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void postCommitInsert() {
|
||||||
|
PostInsertEventListener[] postListeners = getSession().getListeners()
|
||||||
|
.getPostCommitInsertEventListeners();
|
||||||
|
if ( postListeners.length > 0 ) {
|
||||||
|
PostInsertEvent postEvent = new PostInsertEvent(
|
||||||
|
getInstance(),
|
||||||
|
getId(),
|
||||||
|
state,
|
||||||
|
getPersister(),
|
||||||
|
(EventSource) getSession()
|
||||||
|
);
|
||||||
|
for ( int i = 0; i < postListeners.length; i++ ) {
|
||||||
|
postListeners[i].onPostInsert(postEvent);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean preInsert() {
|
||||||
|
PreInsertEventListener[] preListeners = getSession().getListeners()
|
||||||
|
.getPreInsertEventListeners();
|
||||||
|
boolean veto = false;
|
||||||
|
if (preListeners.length>0) {
|
||||||
|
PreInsertEvent preEvent = new PreInsertEvent( getInstance(), getId(), state, getPersister(), getSession() );
|
||||||
|
for ( int i = 0; i < preListeners.length; i++ ) {
|
||||||
|
veto = preListeners[i].onPreInsert(preEvent) || veto;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return veto;
|
||||||
|
}
|
||||||
|
|
||||||
|
//Make 100% certain that this is called before any subsequent ScheduledUpdate.afterTransactionCompletion()!!
|
||||||
|
public void afterTransactionCompletion(boolean success) throws HibernateException {
|
||||||
|
EntityPersister persister = getPersister();
|
||||||
|
if ( success && isCachePutEnabled( persister, getSession() ) ) {
|
||||||
|
final CacheKey ck = new CacheKey(
|
||||||
|
getId(),
|
||||||
|
persister.getIdentifierType(),
|
||||||
|
persister.getRootEntityName(),
|
||||||
|
getSession().getEntityMode(),
|
||||||
|
getSession().getFactory()
|
||||||
|
);
|
||||||
|
boolean put = persister.getCacheAccessStrategy().afterInsert( ck, cacheEntry, version );
|
||||||
|
|
||||||
|
if ( put && getSession().getFactory().getStatistics().isStatisticsEnabled() ) {
|
||||||
|
getSession().getFactory().getStatisticsImplementor()
|
||||||
|
.secondLevelCachePut( getPersister().getCacheAccessStrategy().getRegion().getName() );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
postCommitInsert();
|
||||||
|
}
|
||||||
|
|
||||||
|
protected boolean hasPostCommitEventListeners() {
|
||||||
|
return getSession().getListeners().getPostCommitInsertEventListeners().length>0;
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean isCachePutEnabled(EntityPersister persister, SessionImplementor session) {
|
||||||
|
return persister.hasCache() &&
|
||||||
|
!persister.isCacheInvalidationRequired() &&
|
||||||
|
session.getCacheMode().isPutEnabled();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,257 @@
|
||||||
|
//$Id: EntityUpdateAction.java 11398 2007-04-10 14:54:07Z steve.ebersole@jboss.com $
|
||||||
|
package org.hibernate.action;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
import org.hibernate.AssertionFailure;
|
||||||
|
import org.hibernate.HibernateException;
|
||||||
|
import org.hibernate.cache.CacheException;
|
||||||
|
import org.hibernate.cache.CacheKey;
|
||||||
|
import org.hibernate.cache.access.SoftLock;
|
||||||
|
import org.hibernate.cache.entry.CacheEntry;
|
||||||
|
import org.hibernate.engine.EntityEntry;
|
||||||
|
import org.hibernate.engine.SessionFactoryImplementor;
|
||||||
|
import org.hibernate.engine.SessionImplementor;
|
||||||
|
import org.hibernate.engine.Status;
|
||||||
|
import org.hibernate.engine.Versioning;
|
||||||
|
import org.hibernate.event.PostUpdateEvent;
|
||||||
|
import org.hibernate.event.PostUpdateEventListener;
|
||||||
|
import org.hibernate.event.PreUpdateEvent;
|
||||||
|
import org.hibernate.event.PreUpdateEventListener;
|
||||||
|
import org.hibernate.event.EventSource;
|
||||||
|
import org.hibernate.persister.entity.EntityPersister;
|
||||||
|
import org.hibernate.type.TypeFactory;
|
||||||
|
|
||||||
|
public final class EntityUpdateAction extends EntityAction {
|
||||||
|
|
||||||
|
private final Object[] state;
|
||||||
|
private final Object[] previousState;
|
||||||
|
private final Object previousVersion;
|
||||||
|
private Object nextVersion;
|
||||||
|
private final int[] dirtyFields;
|
||||||
|
private final boolean hasDirtyCollection;
|
||||||
|
private final Object rowId;
|
||||||
|
private Object cacheEntry;
|
||||||
|
private SoftLock lock;
|
||||||
|
|
||||||
|
public EntityUpdateAction(
|
||||||
|
final Serializable id,
|
||||||
|
final Object[] state,
|
||||||
|
final int[] dirtyProperties,
|
||||||
|
final boolean hasDirtyCollection,
|
||||||
|
final Object[] previousState,
|
||||||
|
final Object previousVersion,
|
||||||
|
final Object nextVersion,
|
||||||
|
final Object instance,
|
||||||
|
final Object rowId,
|
||||||
|
final EntityPersister persister,
|
||||||
|
final SessionImplementor session) throws HibernateException {
|
||||||
|
super( session, id, instance, persister );
|
||||||
|
this.state = state;
|
||||||
|
this.previousState = previousState;
|
||||||
|
this.previousVersion = previousVersion;
|
||||||
|
this.nextVersion = nextVersion;
|
||||||
|
this.dirtyFields = dirtyProperties;
|
||||||
|
this.hasDirtyCollection = hasDirtyCollection;
|
||||||
|
this.rowId = rowId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void execute() throws HibernateException {
|
||||||
|
Serializable id = getId();
|
||||||
|
EntityPersister persister = getPersister();
|
||||||
|
SessionImplementor session = getSession();
|
||||||
|
Object instance = getInstance();
|
||||||
|
|
||||||
|
boolean veto = preUpdate();
|
||||||
|
|
||||||
|
final SessionFactoryImplementor factory = getSession().getFactory();
|
||||||
|
Object previousVersion = this.previousVersion;
|
||||||
|
if ( persister.isVersionPropertyGenerated() ) {
|
||||||
|
// we need to grab the version value from the entity, otherwise
|
||||||
|
// we have issues with generated-version entities that may have
|
||||||
|
// multiple actions queued during the same flush
|
||||||
|
previousVersion = persister.getVersion( instance, session.getEntityMode() );
|
||||||
|
}
|
||||||
|
|
||||||
|
final CacheKey ck;
|
||||||
|
if ( persister.hasCache() ) {
|
||||||
|
ck = new CacheKey(
|
||||||
|
id,
|
||||||
|
persister.getIdentifierType(),
|
||||||
|
persister.getRootEntityName(),
|
||||||
|
session.getEntityMode(),
|
||||||
|
session.getFactory()
|
||||||
|
);
|
||||||
|
lock = persister.getCacheAccessStrategy().lockItem( ck, previousVersion );
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
ck = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( !veto ) {
|
||||||
|
persister.update(
|
||||||
|
id,
|
||||||
|
state,
|
||||||
|
dirtyFields,
|
||||||
|
hasDirtyCollection,
|
||||||
|
previousState,
|
||||||
|
previousVersion,
|
||||||
|
instance,
|
||||||
|
rowId,
|
||||||
|
session
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
EntityEntry entry = getSession().getPersistenceContext().getEntry( instance );
|
||||||
|
if ( entry == null ) {
|
||||||
|
throw new AssertionFailure( "possible nonthreadsafe access to session" );
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( entry.getStatus()==Status.MANAGED || persister.isVersionPropertyGenerated() ) {
|
||||||
|
// get the updated snapshot of the entity state by cloning current state;
|
||||||
|
// it is safe to copy in place, since by this time no-one else (should have)
|
||||||
|
// has a reference to the array
|
||||||
|
TypeFactory.deepCopy(
|
||||||
|
state,
|
||||||
|
persister.getPropertyTypes(),
|
||||||
|
persister.getPropertyCheckability(),
|
||||||
|
state,
|
||||||
|
session
|
||||||
|
);
|
||||||
|
if ( persister.hasUpdateGeneratedProperties() ) {
|
||||||
|
// this entity defines proeprty generation, so process those generated
|
||||||
|
// values...
|
||||||
|
persister.processUpdateGeneratedProperties( id, instance, state, session );
|
||||||
|
if ( persister.isVersionPropertyGenerated() ) {
|
||||||
|
nextVersion = Versioning.getVersion( state, persister );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// have the entity entry perform post-update processing, passing it the
|
||||||
|
// update state and the new version (if one).
|
||||||
|
entry.postUpdate( instance, state, nextVersion );
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( persister.hasCache() ) {
|
||||||
|
if ( persister.isCacheInvalidationRequired() || entry.getStatus()!=Status.MANAGED ) {
|
||||||
|
persister.getCacheAccessStrategy().remove( ck );
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
//TODO: inefficient if that cache is just going to ignore the updated state!
|
||||||
|
CacheEntry ce = new CacheEntry(
|
||||||
|
state,
|
||||||
|
persister,
|
||||||
|
persister.hasUninitializedLazyProperties( instance, session.getEntityMode() ),
|
||||||
|
nextVersion,
|
||||||
|
getSession(),
|
||||||
|
instance
|
||||||
|
);
|
||||||
|
cacheEntry = persister.getCacheEntryStructure().structure( ce );
|
||||||
|
boolean put = persister.getCacheAccessStrategy().update( ck, cacheEntry, nextVersion, previousVersion );
|
||||||
|
if ( put && factory.getStatistics().isStatisticsEnabled() ) {
|
||||||
|
factory.getStatisticsImplementor().secondLevelCachePut( getPersister().getCacheAccessStrategy().getRegion().getName() );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
postUpdate();
|
||||||
|
|
||||||
|
if ( factory.getStatistics().isStatisticsEnabled() && !veto ) {
|
||||||
|
factory.getStatisticsImplementor()
|
||||||
|
.updateEntity( getPersister().getEntityName() );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void postUpdate() {
|
||||||
|
PostUpdateEventListener[] postListeners = getSession().getListeners()
|
||||||
|
.getPostUpdateEventListeners();
|
||||||
|
if (postListeners.length>0) {
|
||||||
|
PostUpdateEvent postEvent = new PostUpdateEvent(
|
||||||
|
getInstance(),
|
||||||
|
getId(),
|
||||||
|
state,
|
||||||
|
previousState,
|
||||||
|
getPersister(),
|
||||||
|
(EventSource) getSession()
|
||||||
|
);
|
||||||
|
for ( int i = 0; i < postListeners.length; i++ ) {
|
||||||
|
postListeners[i].onPostUpdate(postEvent);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void postCommitUpdate() {
|
||||||
|
PostUpdateEventListener[] postListeners = getSession().getListeners()
|
||||||
|
.getPostCommitUpdateEventListeners();
|
||||||
|
if (postListeners.length>0) {
|
||||||
|
PostUpdateEvent postEvent = new PostUpdateEvent(
|
||||||
|
getInstance(),
|
||||||
|
getId(),
|
||||||
|
state,
|
||||||
|
previousState,
|
||||||
|
getPersister(),
|
||||||
|
(EventSource) getSession()
|
||||||
|
);
|
||||||
|
for ( int i = 0; i < postListeners.length; i++ ) {
|
||||||
|
postListeners[i].onPostUpdate(postEvent);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean preUpdate() {
|
||||||
|
PreUpdateEventListener[] preListeners = getSession().getListeners()
|
||||||
|
.getPreUpdateEventListeners();
|
||||||
|
boolean veto = false;
|
||||||
|
if (preListeners.length>0) {
|
||||||
|
PreUpdateEvent preEvent = new PreUpdateEvent(
|
||||||
|
getInstance(),
|
||||||
|
getId(),
|
||||||
|
state,
|
||||||
|
previousState,
|
||||||
|
getPersister(),
|
||||||
|
getSession()
|
||||||
|
);
|
||||||
|
for ( int i = 0; i < preListeners.length; i++ ) {
|
||||||
|
veto = preListeners[i].onPreUpdate(preEvent) || veto;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return veto;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void afterTransactionCompletion(boolean success) throws CacheException {
|
||||||
|
EntityPersister persister = getPersister();
|
||||||
|
if ( persister.hasCache() ) {
|
||||||
|
|
||||||
|
final CacheKey ck = new CacheKey(
|
||||||
|
getId(),
|
||||||
|
persister.getIdentifierType(),
|
||||||
|
persister.getRootEntityName(),
|
||||||
|
getSession().getEntityMode(),
|
||||||
|
getSession().getFactory()
|
||||||
|
);
|
||||||
|
|
||||||
|
if ( success && cacheEntry!=null /*!persister.isCacheInvalidationRequired()*/ ) {
|
||||||
|
boolean put = persister.getCacheAccessStrategy().afterUpdate( ck, cacheEntry, nextVersion, previousVersion, lock );
|
||||||
|
|
||||||
|
if ( put && getSession().getFactory().getStatistics().isStatisticsEnabled() ) {
|
||||||
|
getSession().getFactory().getStatisticsImplementor().secondLevelCachePut( getPersister().getCacheAccessStrategy().getRegion().getName() );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
persister.getCacheAccessStrategy().unlockItem( ck, lock );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
postCommitUpdate();
|
||||||
|
}
|
||||||
|
|
||||||
|
protected boolean hasPostCommitEventListeners() {
|
||||||
|
return getSession().getListeners().getPostCommitUpdateEventListeners().length>0;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,39 @@
|
||||||
|
//$Id: Executable.java 6607 2005-04-29 15:26:11Z oneovthafew $
|
||||||
|
package org.hibernate.action;
|
||||||
|
|
||||||
|
import org.hibernate.HibernateException;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An operation which may be scheduled for later execution.
|
||||||
|
* Usually, the operation is a database insert/update/delete,
|
||||||
|
* together with required second-level cache management.
|
||||||
|
*
|
||||||
|
* @author Gavin King
|
||||||
|
*/
|
||||||
|
public interface Executable {
|
||||||
|
/**
|
||||||
|
* Called before executing any actions
|
||||||
|
*/
|
||||||
|
public void beforeExecutions() throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Execute this action
|
||||||
|
*/
|
||||||
|
public void execute() throws HibernateException;
|
||||||
|
/**
|
||||||
|
* Do we need to retain this instance until after the
|
||||||
|
* transaction completes?
|
||||||
|
* @return false if this class defines a no-op
|
||||||
|
* <tt>hasAfterTransactionCompletion()</tt>
|
||||||
|
*/
|
||||||
|
public boolean hasAfterTransactionCompletion();
|
||||||
|
/**
|
||||||
|
* Called after the transaction completes
|
||||||
|
*/
|
||||||
|
public void afterTransactionCompletion(boolean success) throws HibernateException;
|
||||||
|
/**
|
||||||
|
* What spaces (tables) are affected by this action?
|
||||||
|
*/
|
||||||
|
public Serializable[] getPropertySpaces();
|
||||||
|
}
|
|
@ -0,0 +1,10 @@
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<p>
|
||||||
|
This package defines "actions" that are scheduled for
|
||||||
|
asycnchronous execution by the event listeners.
|
||||||
|
</p>
|
||||||
|
</body>
|
||||||
|
</html>
|
|
@ -0,0 +1,45 @@
|
||||||
|
//$Id: $
|
||||||
|
package org.hibernate.bytecode;
|
||||||
|
|
||||||
|
import org.hibernate.bytecode.util.ClassFilter;
|
||||||
|
import org.hibernate.bytecode.util.FieldFilter;
|
||||||
|
|
||||||
|
import java.security.ProtectionDomain;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author Emmanuel Bernard
|
||||||
|
* @author Steve Ebersole
|
||||||
|
*/
|
||||||
|
public abstract class AbstractClassTransformerImpl implements ClassTransformer {
|
||||||
|
|
||||||
|
protected final ClassFilter classFilter;
|
||||||
|
protected final FieldFilter fieldFilter;
|
||||||
|
|
||||||
|
protected AbstractClassTransformerImpl(ClassFilter classFilter, FieldFilter fieldFilter) {
|
||||||
|
this.classFilter = classFilter;
|
||||||
|
this.fieldFilter = fieldFilter;
|
||||||
|
}
|
||||||
|
|
||||||
|
public byte[] transform(
|
||||||
|
ClassLoader loader,
|
||||||
|
String className,
|
||||||
|
Class classBeingRedefined,
|
||||||
|
ProtectionDomain protectionDomain,
|
||||||
|
byte[] classfileBuffer) {
|
||||||
|
// to be safe...
|
||||||
|
className = className.replace( '/', '.' );
|
||||||
|
if ( classFilter.shouldInstrumentClass( className ) ) {
|
||||||
|
return doTransform( loader, className, classBeingRedefined, protectionDomain, classfileBuffer );
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return classfileBuffer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
protected abstract byte[] doTransform(
|
||||||
|
ClassLoader loader,
|
||||||
|
String className,
|
||||||
|
Class classBeingRedefined,
|
||||||
|
ProtectionDomain protectionDomain,
|
||||||
|
byte[] classfileBuffer);
|
||||||
|
}
|
|
@ -0,0 +1,10 @@
|
||||||
|
package org.hibernate.bytecode;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A proxy factory for "basic proxy" generation
|
||||||
|
*
|
||||||
|
* @author Steve Ebersole
|
||||||
|
*/
|
||||||
|
public interface BasicProxyFactory {
|
||||||
|
public Object getProxy();
|
||||||
|
}
|
|
@ -0,0 +1,49 @@
|
||||||
|
package org.hibernate.bytecode;
|
||||||
|
|
||||||
|
import org.hibernate.bytecode.util.ClassFilter;
|
||||||
|
import org.hibernate.bytecode.util.FieldFilter;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Contract for providers of bytecode services to Hibernate.
|
||||||
|
* <p/>
|
||||||
|
* Bytecode requirements break down into basically 3 areas<ol>
|
||||||
|
* <li>proxy generation (both for runtime-lazy-loading and basic proxy generation)
|
||||||
|
* {@link #getProxyFactoryFactory()}
|
||||||
|
* <li>bean relection optimization {@link #getReflectionOptimizer}
|
||||||
|
* <li>field-access instumentation {@link #getTransformer}
|
||||||
|
* </ol>
|
||||||
|
*
|
||||||
|
* @author Steve Ebersole
|
||||||
|
*/
|
||||||
|
public interface BytecodeProvider {
|
||||||
|
/**
|
||||||
|
* Retrieve the specific factory for this provider capable of
|
||||||
|
* generating run-time proxies for lazy-loading purposes.
|
||||||
|
*
|
||||||
|
* @return The provider specifc factory.
|
||||||
|
*/
|
||||||
|
public ProxyFactoryFactory getProxyFactoryFactory();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Retrieve the ReflectionOptimizer delegate for this provider
|
||||||
|
* capable of generating reflection optimization components.
|
||||||
|
*
|
||||||
|
* @param clazz The class to be reflected upon.
|
||||||
|
* @param getterNames Names of all property getters to be accessed via reflection.
|
||||||
|
* @param setterNames Names of all property setters to be accessed via reflection.
|
||||||
|
* @param types The types of all properties to be accessed.
|
||||||
|
* @return The reflection optimization delegate.
|
||||||
|
*/
|
||||||
|
public ReflectionOptimizer getReflectionOptimizer(Class clazz, String[] getterNames, String[] setterNames, Class[] types);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate a ClassTransformer capable of performing bytecode manipulation.
|
||||||
|
*
|
||||||
|
* @param classFilter filter used to limit which classes are to be instrumented
|
||||||
|
* via this ClassTransformer.
|
||||||
|
* @param fieldFilter filter used to limit which fields are to be instrumented
|
||||||
|
* via this ClassTransformer.
|
||||||
|
* @return The appropriate ClassTransformer.
|
||||||
|
*/
|
||||||
|
public ClassTransformer getTransformer(ClassFilter classFilter, FieldFilter fieldFilter);
|
||||||
|
}
|
|
@ -0,0 +1,34 @@
|
||||||
|
//$Id: $
|
||||||
|
package org.hibernate.bytecode;
|
||||||
|
|
||||||
|
import java.security.ProtectionDomain;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A persistence provider provides an instance of this interface
|
||||||
|
* to the PersistenceUnitInfo.addTransformer method.
|
||||||
|
* The supplied transformer instance will get called to transform
|
||||||
|
* entity class files when they are loaded and redefined. The transformation
|
||||||
|
* occurs before the class is defined by the JVM
|
||||||
|
*
|
||||||
|
*
|
||||||
|
* @author <a href="mailto:bill@jboss.org">Bill Burke</a>
|
||||||
|
* @author Emmanuel Bernard
|
||||||
|
*/
|
||||||
|
public interface ClassTransformer
|
||||||
|
{
|
||||||
|
/**
|
||||||
|
* Invoked when a class is being loaded or redefined to add hooks for persistence bytecode manipulation
|
||||||
|
*
|
||||||
|
* @param loader the defining class loaderof the class being transformed. It may be null if using bootstrap loader
|
||||||
|
* @param classname The name of the class being transformed
|
||||||
|
* @param classBeingRedefined If an already loaded class is being redefined, then pass this as a parameter
|
||||||
|
* @param protectionDomain ProtectionDomain of the class being (re)-defined
|
||||||
|
* @param classfileBuffer The input byte buffer in class file format
|
||||||
|
* @return A well-formed class file that can be loaded
|
||||||
|
*/
|
||||||
|
public byte[] transform(ClassLoader loader,
|
||||||
|
String classname,
|
||||||
|
Class classBeingRedefined,
|
||||||
|
ProtectionDomain protectionDomain,
|
||||||
|
byte[] classfileBuffer);
|
||||||
|
}
|
|
@ -0,0 +1,54 @@
|
||||||
|
package org.hibernate.bytecode;
|
||||||
|
|
||||||
|
import org.hibernate.bytecode.util.ByteCodeHelper;
|
||||||
|
|
||||||
|
import java.io.InputStream;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A specialized classloader which performs bytecode enhancement on class
|
||||||
|
* definitions as they are loaded into the classloader scope.
|
||||||
|
*
|
||||||
|
* @author Emmanuel Bernard
|
||||||
|
* @author Steve Ebersole
|
||||||
|
*/
|
||||||
|
public class InstrumentedClassLoader extends ClassLoader {
|
||||||
|
|
||||||
|
private ClassTransformer classTransformer;
|
||||||
|
|
||||||
|
public InstrumentedClassLoader(ClassLoader parent, ClassTransformer classTransformer) {
|
||||||
|
super( parent );
|
||||||
|
this.classTransformer = classTransformer;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Class loadClass(String name) throws ClassNotFoundException {
|
||||||
|
if ( name.startsWith( "java." ) || classTransformer == null ) {
|
||||||
|
return getParent().loadClass( name );
|
||||||
|
}
|
||||||
|
|
||||||
|
Class c = findLoadedClass( name );
|
||||||
|
if ( c != null ) {
|
||||||
|
return c;
|
||||||
|
}
|
||||||
|
|
||||||
|
InputStream is = this.getResourceAsStream( name.replace( '.', '/' ) + ".class" );
|
||||||
|
if ( is == null ) {
|
||||||
|
throw new ClassNotFoundException( name + " not found" );
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
byte[] originalBytecode = ByteCodeHelper.readByteCode( is );
|
||||||
|
byte[] transformedBytecode = classTransformer.transform( getParent(), name, null, null, originalBytecode );
|
||||||
|
if ( originalBytecode == transformedBytecode ) {
|
||||||
|
// no transformations took place, so handle it as we would a
|
||||||
|
// non-instrumented class
|
||||||
|
return getParent().loadClass( name );
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return defineClass( name, transformedBytecode, 0, transformedBytecode.length );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch( Throwable t ) {
|
||||||
|
throw new ClassNotFoundException( name + " not found", t );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,37 @@
|
||||||
|
package org.hibernate.bytecode;
|
||||||
|
|
||||||
|
import org.hibernate.proxy.ProxyFactory;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An interface for factories of {@link ProxyFactory proxy factory} instances.
|
||||||
|
* <p/>
|
||||||
|
* Currently used to abstract from the tupizer whether we are using CGLIB or
|
||||||
|
* Javassist for lazy proxy generation.
|
||||||
|
*
|
||||||
|
* @author Steve Ebersole
|
||||||
|
*/
|
||||||
|
public interface ProxyFactoryFactory {
|
||||||
|
/**
|
||||||
|
* Build a proxy factory specifically for handling runtime
|
||||||
|
* lazy loading.
|
||||||
|
*
|
||||||
|
* @return The lazy-load proxy factory.
|
||||||
|
*/
|
||||||
|
public ProxyFactory buildProxyFactory();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build a proxy factory for basic proxy concerns. The return
|
||||||
|
* should be capable of properly handling newInstance() calls.
|
||||||
|
* <p/>
|
||||||
|
* Should build basic proxies essentially equivalent to JDK proxies in
|
||||||
|
* terms of capabilities, but should be able to deal with abstract super
|
||||||
|
* classes in addition to proxy interfaces.
|
||||||
|
* <p/>
|
||||||
|
* Must pass in either superClass or interfaces (or both).
|
||||||
|
*
|
||||||
|
* @param superClass The abstract super class (or null if none).
|
||||||
|
* @param interfaces Interfaces to be proxied (or null if none).
|
||||||
|
* @return The proxy class
|
||||||
|
*/
|
||||||
|
public BasicProxyFactory buildBasicProxyFactory(Class superClass, Class[] interfaces);
|
||||||
|
}
|
|
@ -0,0 +1,35 @@
|
||||||
|
package org.hibernate.bytecode;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Represents reflection optimization for a particular class.
|
||||||
|
*
|
||||||
|
* @author Steve Ebersole
|
||||||
|
*/
|
||||||
|
public interface ReflectionOptimizer {
|
||||||
|
|
||||||
|
public InstantiationOptimizer getInstantiationOptimizer();
|
||||||
|
public AccessOptimizer getAccessOptimizer();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Represents optimized entity instantiation.
|
||||||
|
*/
|
||||||
|
public static interface InstantiationOptimizer {
|
||||||
|
/**
|
||||||
|
* Perform instantiation of an instance of the underlying class.
|
||||||
|
*
|
||||||
|
* @return The new instance.
|
||||||
|
*/
|
||||||
|
public Object newInstance();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Represents optimized entity property access.
|
||||||
|
*
|
||||||
|
* @author Steve Ebersole
|
||||||
|
*/
|
||||||
|
public interface AccessOptimizer {
|
||||||
|
public String[] getPropertyNames();
|
||||||
|
public Object[] getPropertyValues(Object object);
|
||||||
|
public void setPropertyValues(Object object, Object[] values);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,102 @@
|
||||||
|
package org.hibernate.bytecode.cglib;
|
||||||
|
|
||||||
|
import org.hibernate.bytecode.ReflectionOptimizer;
|
||||||
|
import org.hibernate.PropertyAccessException;
|
||||||
|
import net.sf.cglib.beans.BulkBean;
|
||||||
|
import net.sf.cglib.beans.BulkBeanException;
|
||||||
|
import net.sf.cglib.reflect.FastClass;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.io.ObjectOutputStream;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.ObjectInputStream;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The {@link ReflectionOptimizer.AccessOptimizer} implementation for CGLIB
|
||||||
|
* which simply acts as an adpater to the {@link BulkBean} class.
|
||||||
|
*
|
||||||
|
* @author Steve Ebersole
|
||||||
|
*/
|
||||||
|
public class AccessOptimizerAdapter implements ReflectionOptimizer.AccessOptimizer, Serializable {
|
||||||
|
|
||||||
|
public static final String PROPERTY_GET_EXCEPTION =
|
||||||
|
"exception getting property value with CGLIB (set hibernate.bytecode.use_reflection_optimizer=false for more info)";
|
||||||
|
|
||||||
|
public static final String PROPERTY_SET_EXCEPTION =
|
||||||
|
"exception setting property value with CGLIB (set hibernate.bytecode.use_reflection_optimizer=false for more info)";
|
||||||
|
|
||||||
|
private Class mappedClass;
|
||||||
|
private BulkBean bulkBean;
|
||||||
|
|
||||||
|
public AccessOptimizerAdapter(BulkBean bulkBean, Class mappedClass) {
|
||||||
|
this.bulkBean = bulkBean;
|
||||||
|
this.mappedClass = mappedClass;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String[] getPropertyNames() {
|
||||||
|
return bulkBean.getGetters();
|
||||||
|
}
|
||||||
|
|
||||||
|
public Object[] getPropertyValues(Object object) {
|
||||||
|
try {
|
||||||
|
return bulkBean.getPropertyValues( object );
|
||||||
|
}
|
||||||
|
catch ( Throwable t ) {
|
||||||
|
throw new PropertyAccessException(
|
||||||
|
t,
|
||||||
|
PROPERTY_GET_EXCEPTION,
|
||||||
|
false,
|
||||||
|
mappedClass,
|
||||||
|
getterName( t, bulkBean )
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setPropertyValues(Object object, Object[] values) {
|
||||||
|
try {
|
||||||
|
bulkBean.setPropertyValues( object, values );
|
||||||
|
}
|
||||||
|
catch ( Throwable t ) {
|
||||||
|
throw new PropertyAccessException(
|
||||||
|
t,
|
||||||
|
PROPERTY_SET_EXCEPTION,
|
||||||
|
true,
|
||||||
|
mappedClass,
|
||||||
|
setterName( t, bulkBean )
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static String setterName(Throwable t, BulkBean optimizer) {
|
||||||
|
if ( t instanceof BulkBeanException ) {
|
||||||
|
return optimizer.getSetters()[( ( BulkBeanException ) t ).getIndex()];
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return "?";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static String getterName(Throwable t, BulkBean optimizer) {
|
||||||
|
if ( t instanceof BulkBeanException ) {
|
||||||
|
return optimizer.getGetters()[( ( BulkBeanException ) t ).getIndex()];
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return "?";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void writeObject(ObjectOutputStream out) throws IOException {
|
||||||
|
out.writeObject( mappedClass );
|
||||||
|
out.writeObject( bulkBean.getGetters() );
|
||||||
|
out.writeObject( bulkBean.getSetters() );
|
||||||
|
out.writeObject( bulkBean.getPropertyTypes() );
|
||||||
|
}
|
||||||
|
|
||||||
|
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
|
||||||
|
Class beanClass = ( Class ) in.readObject();
|
||||||
|
String[] getters = ( String[] ) in.readObject();
|
||||||
|
String[] setters = ( String[] ) in.readObject();
|
||||||
|
Class[] types = ( Class[] ) in.readObject();
|
||||||
|
bulkBean = BulkBean.create( beanClass, getters, setters, types );
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,92 @@
|
||||||
|
package org.hibernate.bytecode.cglib;
|
||||||
|
|
||||||
|
import java.lang.reflect.Modifier;
|
||||||
|
|
||||||
|
import net.sf.cglib.beans.BulkBean;
|
||||||
|
import net.sf.cglib.beans.BulkBeanException;
|
||||||
|
import net.sf.cglib.reflect.FastClass;
|
||||||
|
import net.sf.cglib.transform.ClassFilter;
|
||||||
|
import net.sf.cglib.transform.ClassTransformer;
|
||||||
|
import net.sf.cglib.transform.ClassTransformerFactory;
|
||||||
|
import net.sf.cglib.transform.TransformingClassLoader;
|
||||||
|
import net.sf.cglib.transform.impl.InterceptFieldFilter;
|
||||||
|
import net.sf.cglib.transform.impl.InterceptFieldTransformer;
|
||||||
|
import org.apache.commons.logging.Log;
|
||||||
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
import org.hibernate.bytecode.BytecodeProvider;
|
||||||
|
import org.hibernate.bytecode.ProxyFactoryFactory;
|
||||||
|
import org.hibernate.bytecode.ReflectionOptimizer;
|
||||||
|
import org.hibernate.bytecode.util.FieldFilter;
|
||||||
|
import org.hibernate.util.StringHelper;
|
||||||
|
import org.objectweb.asm.Type;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Bytecode provider implementation for CGLIB.
|
||||||
|
*
|
||||||
|
* @author Steve Ebersole
|
||||||
|
*/
|
||||||
|
public class BytecodeProviderImpl implements BytecodeProvider {
|
||||||
|
|
||||||
|
private static final Log log = LogFactory.getLog( BytecodeProviderImpl.class );
|
||||||
|
|
||||||
|
public ProxyFactoryFactory getProxyFactoryFactory() {
|
||||||
|
return new ProxyFactoryFactoryImpl();
|
||||||
|
}
|
||||||
|
|
||||||
|
public ReflectionOptimizer getReflectionOptimizer(
|
||||||
|
Class clazz,
|
||||||
|
String[] getterNames,
|
||||||
|
String[] setterNames,
|
||||||
|
Class[] types) {
|
||||||
|
FastClass fastClass;
|
||||||
|
BulkBean bulkBean;
|
||||||
|
try {
|
||||||
|
fastClass = FastClass.create( clazz );
|
||||||
|
bulkBean = BulkBean.create( clazz, getterNames, setterNames, types );
|
||||||
|
if ( !clazz.isInterface() && !Modifier.isAbstract( clazz.getModifiers() ) ) {
|
||||||
|
if ( fastClass == null ) {
|
||||||
|
bulkBean = null;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
//test out the optimizer:
|
||||||
|
Object instance = fastClass.newInstance();
|
||||||
|
bulkBean.setPropertyValues( instance, bulkBean.getPropertyValues( instance ) );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch( Throwable t ) {
|
||||||
|
fastClass = null;
|
||||||
|
bulkBean = null;
|
||||||
|
String message = "reflection optimizer disabled for: " +
|
||||||
|
clazz.getName() +
|
||||||
|
" [" +
|
||||||
|
StringHelper.unqualify( t.getClass().getName() ) +
|
||||||
|
": " +
|
||||||
|
t.getMessage();
|
||||||
|
|
||||||
|
if (t instanceof BulkBeanException ) {
|
||||||
|
int index = ( (BulkBeanException) t ).getIndex();
|
||||||
|
if (index >= 0) {
|
||||||
|
message += " (property " + setterNames[index] + ")";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
log.debug( message );
|
||||||
|
}
|
||||||
|
|
||||||
|
if ( fastClass != null && bulkBean != null ) {
|
||||||
|
return new ReflectionOptimizerImpl(
|
||||||
|
new InstantiationOptimizerAdapter( fastClass ),
|
||||||
|
new AccessOptimizerAdapter( bulkBean, clazz )
|
||||||
|
);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public org.hibernate.bytecode.ClassTransformer getTransformer(org.hibernate.bytecode.util.ClassFilter classFilter, FieldFilter fieldFilter) {
|
||||||
|
return new CglibClassTransformer( classFilter, fieldFilter );
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,120 @@
|
||||||
|
//$Id: $
|
||||||
|
package org.hibernate.bytecode.cglib;
|
||||||
|
|
||||||
|
import java.security.ProtectionDomain;
|
||||||
|
import java.io.ByteArrayInputStream;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.ByteArrayOutputStream;
|
||||||
|
|
||||||
|
import net.sf.cglib.transform.ClassTransformer;
|
||||||
|
import net.sf.cglib.transform.TransformingClassGenerator;
|
||||||
|
import net.sf.cglib.transform.ClassReaderGenerator;
|
||||||
|
import net.sf.cglib.transform.impl.InterceptFieldEnabled;
|
||||||
|
import net.sf.cglib.transform.impl.InterceptFieldFilter;
|
||||||
|
import net.sf.cglib.transform.impl.InterceptFieldTransformer;
|
||||||
|
import net.sf.cglib.core.ClassNameReader;
|
||||||
|
import net.sf.cglib.core.DebuggingClassWriter;
|
||||||
|
import org.apache.commons.logging.Log;
|
||||||
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
import org.hibernate.bytecode.AbstractClassTransformerImpl;
|
||||||
|
import org.hibernate.bytecode.util.FieldFilter;
|
||||||
|
import org.hibernate.bytecode.util.ClassFilter;
|
||||||
|
import org.hibernate.HibernateException;
|
||||||
|
import org.objectweb.asm.Attribute;
|
||||||
|
import org.objectweb.asm.Type;
|
||||||
|
import org.objectweb.asm.ClassReader;
|
||||||
|
import org.objectweb.asm.ClassWriter;
|
||||||
|
import org.objectweb.asm.attrs.Attributes;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enhance the classes allowing them to implements InterceptFieldEnabled
|
||||||
|
* This interface is then used by Hibernate for some optimizations.
|
||||||
|
*
|
||||||
|
* @author Emmanuel Bernard
|
||||||
|
*/
|
||||||
|
public class CglibClassTransformer extends AbstractClassTransformerImpl {
|
||||||
|
|
||||||
|
private static Log log = LogFactory.getLog( CglibClassTransformer.class.getName() );
|
||||||
|
|
||||||
|
public CglibClassTransformer(ClassFilter classFilter, FieldFilter fieldFilter) {
|
||||||
|
super( classFilter, fieldFilter );
|
||||||
|
}
|
||||||
|
|
||||||
|
protected byte[] doTransform(
|
||||||
|
ClassLoader loader,
|
||||||
|
String className,
|
||||||
|
Class classBeingRedefined,
|
||||||
|
ProtectionDomain protectionDomain,
|
||||||
|
byte[] classfileBuffer) {
|
||||||
|
ClassReader reader;
|
||||||
|
try {
|
||||||
|
reader = new ClassReader( new ByteArrayInputStream( classfileBuffer ) );
|
||||||
|
}
|
||||||
|
catch (IOException e) {
|
||||||
|
log.error( "Unable to read class", e );
|
||||||
|
throw new HibernateException( "Unable to read class: " + e.getMessage() );
|
||||||
|
}
|
||||||
|
|
||||||
|
String[] names = ClassNameReader.getClassInfo( reader );
|
||||||
|
ClassWriter w = new DebuggingClassWriter( true );
|
||||||
|
ClassTransformer t = getClassTransformer( names );
|
||||||
|
if ( t != null ) {
|
||||||
|
if ( log.isDebugEnabled() ) {
|
||||||
|
log.debug( "Enhancing " + className );
|
||||||
|
}
|
||||||
|
ByteArrayOutputStream out;
|
||||||
|
byte[] result;
|
||||||
|
try {
|
||||||
|
reader = new ClassReader( new ByteArrayInputStream( classfileBuffer ) );
|
||||||
|
new TransformingClassGenerator(
|
||||||
|
new ClassReaderGenerator( reader, attributes(), skipDebug() ), t
|
||||||
|
).generateClass( w );
|
||||||
|
out = new ByteArrayOutputStream();
|
||||||
|
out.write( w.toByteArray() );
|
||||||
|
result = out.toByteArray();
|
||||||
|
out.close();
|
||||||
|
}
|
||||||
|
catch (Exception e) {
|
||||||
|
log.error( "Unable to transform class", e );
|
||||||
|
throw new HibernateException( "Unable to transform class: " + e.getMessage() );
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
return classfileBuffer;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private Attribute[] attributes() {
|
||||||
|
return Attributes.getDefaultAttributes();
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean skipDebug() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
private ClassTransformer getClassTransformer(final String[] classInfo) {
|
||||||
|
if ( isAlreadyInstrumented( classInfo ) ) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return new InterceptFieldTransformer(
|
||||||
|
new InterceptFieldFilter() {
|
||||||
|
public boolean acceptRead(Type owner, String name) {
|
||||||
|
return fieldFilter.shouldTransformFieldAccess( classInfo[0], owner.getClassName(), name );
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean acceptWrite(Type owner, String name) {
|
||||||
|
return fieldFilter.shouldTransformFieldAccess( classInfo[0], owner.getClassName(), name );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean isAlreadyInstrumented(String[] classInfo) {
|
||||||
|
for ( int i = 1; i < classInfo.length; i++ ) {
|
||||||
|
if ( InterceptFieldEnabled.class.getName().equals( classInfo[i] ) ) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,46 @@
|
||||||
|
package org.hibernate.bytecode.cglib;
|
||||||
|
|
||||||
|
import org.hibernate.bytecode.ReflectionOptimizer;
|
||||||
|
import net.sf.cglib.reflect.FastClass;
|
||||||
|
import org.hibernate.InstantiationException;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.io.ObjectOutputStream;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.ObjectInputStream;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The {@link ReflectionOptimizer.InstantiationOptimizer} implementation for CGLIB
|
||||||
|
* which simply acts as an adpater to the {@link FastClass} class.
|
||||||
|
*
|
||||||
|
* @author Steve Ebersole
|
||||||
|
*/
|
||||||
|
public class InstantiationOptimizerAdapter implements ReflectionOptimizer.InstantiationOptimizer, Serializable {
|
||||||
|
private FastClass fastClass;
|
||||||
|
|
||||||
|
public InstantiationOptimizerAdapter(FastClass fastClass) {
|
||||||
|
this.fastClass = fastClass;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Object newInstance() {
|
||||||
|
try {
|
||||||
|
return fastClass.newInstance();
|
||||||
|
}
|
||||||
|
catch ( Throwable t ) {
|
||||||
|
throw new InstantiationException(
|
||||||
|
"Could not instantiate entity with CGLIB optimizer: ",
|
||||||
|
fastClass.getJavaClass(),
|
||||||
|
t
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void writeObject(ObjectOutputStream out) throws IOException {
|
||||||
|
out.writeObject( fastClass.getJavaClass() );
|
||||||
|
}
|
||||||
|
|
||||||
|
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
|
||||||
|
Class beanClass = ( Class ) in.readObject();
|
||||||
|
fastClass = FastClass.create( beanClass );
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,141 @@
|
||||||
|
package org.hibernate.bytecode.cglib;
|
||||||
|
|
||||||
|
import org.hibernate.bytecode.ProxyFactoryFactory;
|
||||||
|
import org.hibernate.bytecode.BasicProxyFactory;
|
||||||
|
import org.hibernate.proxy.ProxyFactory;
|
||||||
|
import org.hibernate.proxy.pojo.cglib.CGLIBProxyFactory;
|
||||||
|
import org.hibernate.AssertionFailure;
|
||||||
|
import org.hibernate.HibernateException;
|
||||||
|
import net.sf.cglib.proxy.Enhancer;
|
||||||
|
import net.sf.cglib.proxy.CallbackFilter;
|
||||||
|
import net.sf.cglib.proxy.MethodInterceptor;
|
||||||
|
import net.sf.cglib.proxy.MethodProxy;
|
||||||
|
import net.sf.cglib.proxy.NoOp;
|
||||||
|
import net.sf.cglib.proxy.Callback;
|
||||||
|
import net.sf.cglib.proxy.Factory;
|
||||||
|
|
||||||
|
import java.lang.reflect.Method;
|
||||||
|
import java.util.HashMap;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A factory for CGLIB-based {@link ProxyFactory} instances.
|
||||||
|
*
|
||||||
|
* @author Steve Ebersole
|
||||||
|
*/
|
||||||
|
public class ProxyFactoryFactoryImpl implements ProxyFactoryFactory {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Builds a CGLIB-based proxy factory.
|
||||||
|
*
|
||||||
|
* @return a new CGLIB-based proxy factory.
|
||||||
|
*/
|
||||||
|
public ProxyFactory buildProxyFactory() {
|
||||||
|
return new CGLIBProxyFactory();
|
||||||
|
}
|
||||||
|
|
||||||
|
public BasicProxyFactory buildBasicProxyFactory(Class superClass, Class[] interfaces) {
|
||||||
|
return new BasicProxyFactoryImpl( superClass, interfaces );
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class BasicProxyFactoryImpl implements BasicProxyFactory {
|
||||||
|
private final Class proxyClass;
|
||||||
|
private final Factory factory;
|
||||||
|
|
||||||
|
public BasicProxyFactoryImpl(Class superClass, Class[] interfaces) {
|
||||||
|
if ( superClass == null && ( interfaces == null || interfaces.length < 1 ) ) {
|
||||||
|
throw new AssertionFailure( "attempting to build proxy without any superclass or interfaces" );
|
||||||
|
}
|
||||||
|
|
||||||
|
Enhancer en = new Enhancer();
|
||||||
|
en.setUseCache( false );
|
||||||
|
en.setInterceptDuringConstruction( false );
|
||||||
|
en.setUseFactory( true );
|
||||||
|
en.setCallbackTypes( CALLBACK_TYPES );
|
||||||
|
en.setCallbackFilter( FINALIZE_FILTER );
|
||||||
|
if ( superClass != null ) {
|
||||||
|
en.setSuperclass( superClass );
|
||||||
|
}
|
||||||
|
if ( interfaces != null && interfaces.length > 0 ) {
|
||||||
|
en.setInterfaces( interfaces );
|
||||||
|
}
|
||||||
|
proxyClass = en.createClass();
|
||||||
|
try {
|
||||||
|
factory = ( Factory ) proxyClass.newInstance();
|
||||||
|
}
|
||||||
|
catch ( Throwable t ) {
|
||||||
|
throw new HibernateException( "Unable to build CGLIB Factory instance" );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public Object getProxy() {
|
||||||
|
try {
|
||||||
|
return factory.newInstance(
|
||||||
|
new Callback[] { new PassThroughInterceptor( proxyClass.getName() ), NoOp.INSTANCE }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
catch ( Throwable t ) {
|
||||||
|
throw new HibernateException( "Unable to instantiate proxy instance" );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static final CallbackFilter FINALIZE_FILTER = new CallbackFilter() {
|
||||||
|
public int accept(Method method) {
|
||||||
|
if ( method.getParameterTypes().length == 0 && method.getName().equals("finalize") ){
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
private static final Class[] CALLBACK_TYPES = new Class[] { MethodInterceptor.class, NoOp.class };
|
||||||
|
|
||||||
|
private static class PassThroughInterceptor implements MethodInterceptor {
|
||||||
|
private HashMap data = new HashMap();
|
||||||
|
private final String proxiedClassName;
|
||||||
|
|
||||||
|
public PassThroughInterceptor(String proxiedClassName) {
|
||||||
|
this.proxiedClassName = proxiedClassName;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Object intercept(
|
||||||
|
Object obj,
|
||||||
|
Method method,
|
||||||
|
Object[] args,
|
||||||
|
MethodProxy proxy) throws Throwable {
|
||||||
|
String name = method.getName();
|
||||||
|
if ( "toString".equals( name ) ) {
|
||||||
|
return proxiedClassName + "@" + System.identityHashCode( obj );
|
||||||
|
}
|
||||||
|
else if ( "equals".equals( name ) ) {
|
||||||
|
return args[0] instanceof Factory && ( ( Factory ) args[0] ).getCallback( 0 ) == this
|
||||||
|
? Boolean.TRUE
|
||||||
|
: Boolean.FALSE;
|
||||||
|
}
|
||||||
|
else if ( "hashCode".equals( name ) ) {
|
||||||
|
return new Integer( System.identityHashCode( obj ) );
|
||||||
|
}
|
||||||
|
boolean hasGetterSignature = method.getParameterTypes().length == 0 && method.getReturnType() != null;
|
||||||
|
boolean hasSetterSignature = method.getParameterTypes().length == 1 && ( method.getReturnType() == null || method.getReturnType() == void.class );
|
||||||
|
if ( name.startsWith( "get" ) && hasGetterSignature ) {
|
||||||
|
String propName = name.substring( 3 );
|
||||||
|
return data.get( propName );
|
||||||
|
}
|
||||||
|
else if ( name.startsWith( "is" ) && hasGetterSignature ) {
|
||||||
|
String propName = name.substring( 2 );
|
||||||
|
return data.get( propName );
|
||||||
|
}
|
||||||
|
else if ( name.startsWith( "set" ) && hasSetterSignature) {
|
||||||
|
String propName = name.substring( 3 );
|
||||||
|
data.put( propName, args[0] );
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// todo : what else to do here?
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue