HHH-5823 - Poor multithread performance in UpdateTimestampsCache class

This commit is contained in:
Steve Ebersole 2011-01-21 12:39:25 -06:00
parent 4d6a68c2d0
commit ccd23dbd3d
1 changed files with 71 additions and 47 deletions

View File

@ -1,10 +1,10 @@
/* /*
* Hibernate, Relational Persistence for Idiomatic Java * Hibernate, Relational Persistence for Idiomatic Java
* *
* Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as * Copyright (c) 2011, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution * indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are * statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Middleware LLC. * distributed under license by Red Hat Inc.
* *
* This copyrighted material is made available to anyone wishing to use, modify, * This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU * copy, or redistribute it subject to the terms and conditions of the GNU
@ -20,14 +20,13 @@
* Free Software Foundation, Inc. * Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor * 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA * Boston, MA 02110-1301 USA
*
*/ */
package org.hibernate.cache; package org.hibernate.cache;
import java.io.Serializable; import java.io.Serializable;
import java.util.Iterator;
import java.util.Properties; import java.util.Properties;
import java.util.Set; import java.util.Set;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -49,6 +48,7 @@ public class UpdateTimestampsCache {
public static final String REGION_NAME = UpdateTimestampsCache.class.getName(); public static final String REGION_NAME = UpdateTimestampsCache.class.getName();
private static final Logger log = LoggerFactory.getLogger( UpdateTimestampsCache.class ); private static final Logger log = LoggerFactory.getLogger( UpdateTimestampsCache.class );
private ReentrantReadWriteLock readWriteLock = new ReentrantReadWriteLock();
private final TimestampsRegion region; private final TimestampsRegion region;
public UpdateTimestampsCache(Settings settings, Properties props) throws HibernateException { public UpdateTimestampsCache(Settings settings, Properties props) throws HibernateException {
@ -58,40 +58,60 @@ public class UpdateTimestampsCache {
this.region = settings.getRegionFactory().buildTimestampsRegion( regionName, props ); this.region = settings.getRegionFactory().buildTimestampsRegion( regionName, props );
} }
public synchronized void preinvalidate(Serializable[] spaces) throws CacheException { @SuppressWarnings({"UnnecessaryBoxing"})
//TODO: to handle concurrent writes correctly, this should return a Lock to the client public void preinvalidate(Serializable[] spaces) throws CacheException {
// TODO: to handle concurrent writes correctly, this should return a Lock to the client
readWriteLock.writeLock().lock();
try {
Long ts = new Long( region.nextTimestamp() + region.getTimeout() ); Long ts = new Long( region.nextTimestamp() + region.getTimeout() );
for ( int i=0; i<spaces.length; i++ ) { for ( Serializable space : spaces ) {
if ( log.isDebugEnabled() ) { if ( log.isDebugEnabled() ) {
log.debug( "Pre-invalidating space [" + spaces[i] + "]" ); log.debug( "Pre-invalidating space [" + space + "]" );
} }
//put() has nowait semantics, is this really appropriate? //put() has nowait semantics, is this really appropriate?
//note that it needs to be async replication, never local or sync //note that it needs to be async replication, never local or sync
region.put( spaces[i], ts ); region.put( space, ts );
} }
//TODO: return new Lock(ts); //TODO: return new Lock(ts);
} }
finally {
readWriteLock.writeLock().unlock();
}
}
public synchronized void invalidate(Serializable[] spaces) throws CacheException { @SuppressWarnings({"UnnecessaryBoxing"})
public void invalidate(Serializable[] spaces) throws CacheException {
//TODO: to handle concurrent writes correctly, the client should pass in a Lock //TODO: to handle concurrent writes correctly, the client should pass in a Lock
readWriteLock.writeLock().lock();
try {
Long ts = new Long( region.nextTimestamp() ); Long ts = new Long( region.nextTimestamp() );
//TODO: if lock.getTimestamp().equals(ts) //TODO: if lock.getTimestamp().equals(ts)
for ( int i=0; i<spaces.length; i++ ) { for (Serializable space : spaces) {
if ( log.isDebugEnabled() ) { if (log.isDebugEnabled()) {
log.debug( "Invalidating space [" + spaces[i] + "], timestamp: " + ts); log.debug( "Invalidating space [" + space + "], timestamp: " + ts );
} }
//put() has nowait semantics, is this really appropriate? //put() has nowait semantics, is this really appropriate?
//note that it needs to be async replication, never local or sync //note that it needs to be async replication, never local or sync
region.put( spaces[i], ts ); region.put( space, ts );
}
}
finally {
readWriteLock.writeLock().unlock();
} }
} }
public synchronized boolean isUpToDate(Set spaces, Long timestamp) throws HibernateException { @SuppressWarnings({"unchecked", "UnnecessaryUnboxing"})
Iterator iter = spaces.iterator(); public boolean isUpToDate(Set spaces, Long timestamp) throws HibernateException {
while ( iter.hasNext() ) { readWriteLock.readLock().lock();
Serializable space = (Serializable) iter.next();
Long lastUpdate = (Long) region.get(space); try {
if ( lastUpdate==null ) { for ( Serializable space : (Set<Serializable>) spaces ) {
Long lastUpdate = (Long) region.get( space );
if ( lastUpdate == null ) {
//the last update timestamp was lost from the cache //the last update timestamp was lost from the cache
//(or there were no updates since startup!) //(or there were no updates since startup!)
//updateTimestamps.put( space, new Long( updateTimestamps.nextTimestamp() ) ); //updateTimestamps.put( space, new Long( updateTimestamps.nextTimestamp() ) );
@ -99,7 +119,7 @@ public class UpdateTimestampsCache {
} }
else { else {
if ( log.isDebugEnabled() ) { if ( log.isDebugEnabled() ) {
log.debug("[" + space + "] last update timestamp: " + lastUpdate + ", result set timestamp: " + timestamp ); log.debug( "[" + space + "] last update timestamp: " + lastUpdate + ", result set timestamp: " + timestamp );
} }
if ( lastUpdate.longValue() >= timestamp.longValue() ) { if ( lastUpdate.longValue() >= timestamp.longValue() ) {
return false; return false;
@ -108,6 +128,10 @@ public class UpdateTimestampsCache {
} }
return true; return true;
} }
finally {
readWriteLock.readLock().unlock();
}
}
public void clear() throws CacheException { public void clear() throws CacheException {
region.evictAll(); region.evictAll();
@ -127,7 +151,7 @@ public class UpdateTimestampsCache {
} }
public String toString() { public String toString() {
return "UpdateTimestampeCache"; return "UpdateTimestampCache";
} }
} }