HHH-7631 - Improve performance of UpdateTimestampsCache

This commit is contained in:
Steve Ebersole 2012-09-21 07:02:16 -05:00
parent a8a3f9b112
commit f85e9247e9
1 changed files with 58 additions and 63 deletions

View File

@ -26,7 +26,6 @@ package org.hibernate.cache.spi;
import java.io.Serializable; import java.io.Serializable;
import java.util.Properties; import java.util.Properties;
import java.util.Set; import java.util.Set;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.jboss.logging.Logger; import org.jboss.logging.Logger;
@ -51,74 +50,72 @@ public class UpdateTimestampsCache {
public static final String REGION_NAME = UpdateTimestampsCache.class.getName(); public static final String REGION_NAME = UpdateTimestampsCache.class.getName();
private static final CoreMessageLogger LOG = Logger.getMessageLogger( CoreMessageLogger.class, UpdateTimestampsCache.class.getName() ); private static final CoreMessageLogger LOG = Logger.getMessageLogger( CoreMessageLogger.class, UpdateTimestampsCache.class.getName() );
private ReentrantReadWriteLock readWriteLock = new ReentrantReadWriteLock();
private final TimestampsRegion region;
private final SessionFactoryImplementor factory; private final SessionFactoryImplementor factory;
private final TimestampsRegion region;
public UpdateTimestampsCache(Settings settings, Properties props, final SessionFactoryImplementor factory) throws HibernateException { public UpdateTimestampsCache(Settings settings, Properties props, final SessionFactoryImplementor factory) throws HibernateException {
this.factory = factory; this.factory = factory;
String prefix = settings.getCacheRegionPrefix(); final String prefix = settings.getCacheRegionPrefix();
String regionName = prefix == null ? REGION_NAME : prefix + '.' + REGION_NAME; final String regionName = prefix == null ? REGION_NAME : prefix + '.' + REGION_NAME;
LOG.startingUpdateTimestampsCache( regionName ); LOG.startingUpdateTimestampsCache( regionName );
this.region = settings.getRegionFactory().buildTimestampsRegion( regionName, props ); this.region = settings.getRegionFactory().buildTimestampsRegion( regionName, props );
} }
@SuppressWarnings({"UnusedDeclaration"}) @SuppressWarnings({"UnusedDeclaration"})
public UpdateTimestampsCache(Settings settings, Properties props) public UpdateTimestampsCache(Settings settings, Properties props) throws HibernateException {
throws HibernateException {
this( settings, props, null ); this( settings, props, null );
} }
@SuppressWarnings({"UnnecessaryBoxing"}) @SuppressWarnings({"UnnecessaryBoxing"})
public void preinvalidate(Serializable[] spaces) throws CacheException { public void preinvalidate(Serializable[] spaces) throws CacheException {
readWriteLock.writeLock().lock(); final boolean debug = LOG.isDebugEnabled();
final boolean stats = factory != null && factory.getStatistics().isStatisticsEnabled();
final Long ts = region.nextTimestamp() + region.getTimeout();
try {
Long ts = region.nextTimestamp() + region.getTimeout();
for ( Serializable space : spaces ) { for ( Serializable space : spaces ) {
if ( debug ) {
LOG.debugf( "Pre-invalidating space [%s], timestamp: %s", space, ts ); LOG.debugf( "Pre-invalidating space [%s], timestamp: %s", space, ts );
}
//put() has nowait semantics, is this really appropriate? //put() has nowait semantics, is this really appropriate?
//note that it needs to be async replication, never local or sync //note that it needs to be async replication, never local or sync
region.put( space, ts ); region.put( space, ts );
if ( factory != null && factory.getStatistics().isStatisticsEnabled() ) { if ( stats ) {
factory.getStatisticsImplementor().updateTimestampsCachePut(); factory.getStatisticsImplementor().updateTimestampsCachePut();
} }
} }
} }
finally {
readWriteLock.writeLock().unlock();
}
}
@SuppressWarnings({"UnnecessaryBoxing"}) @SuppressWarnings({"UnnecessaryBoxing"})
public void invalidate(Serializable[] spaces) throws CacheException { public void invalidate(Serializable[] spaces) throws CacheException {
readWriteLock.writeLock().lock(); final boolean debug = LOG.isDebugEnabled();
final boolean stats = factory != null && factory.getStatistics().isStatisticsEnabled();
final Long ts = region.nextTimestamp();
try {
Long ts = region.nextTimestamp();
for (Serializable space : spaces) { for (Serializable space : spaces) {
if ( debug ) {
LOG.debugf( "Invalidating space [%s], timestamp: %s", space, ts ); LOG.debugf( "Invalidating space [%s], timestamp: %s", space, ts );
}
//put() has nowait semantics, is this really appropriate? //put() has nowait semantics, is this really appropriate?
//note that it needs to be async replication, never local or sync //note that it needs to be async replication, never local or sync
region.put( space, ts ); region.put( space, ts );
if ( factory != null && factory.getStatistics().isStatisticsEnabled() ) { if ( stats ) {
factory.getStatisticsImplementor().updateTimestampsCachePut(); factory.getStatisticsImplementor().updateTimestampsCachePut();
} }
} }
} }
finally {
readWriteLock.writeLock().unlock();
}
}
@SuppressWarnings({"unchecked", "UnnecessaryUnboxing"}) @SuppressWarnings({"unchecked", "UnnecessaryUnboxing"})
public boolean isUpToDate(Set spaces, Long timestamp) throws HibernateException { public boolean isUpToDate(Set spaces, Long timestamp) throws HibernateException {
readWriteLock.readLock().lock(); final boolean debug = LOG.isDebugEnabled();
final boolean stats = factory != null && factory.getStatistics().isStatisticsEnabled();
try {
for ( Serializable space : (Set<Serializable>) spaces ) { for ( Serializable space : (Set<Serializable>) spaces ) {
Long lastUpdate = (Long) region.get( space ); Long lastUpdate = (Long) region.get( space );
if ( lastUpdate == null ) { if ( lastUpdate == null ) {
if ( factory != null && factory.getStatistics().isStatisticsEnabled() ) { if ( stats ) {
factory.getStatisticsImplementor().updateTimestampsCacheMiss(); factory.getStatisticsImplementor().updateTimestampsCacheMiss();
} }
//the last update timestamp was lost from the cache //the last update timestamp was lost from the cache
@ -127,25 +124,23 @@ public class UpdateTimestampsCache {
//result = false; // safer //result = false; // safer
} }
else { else {
if ( LOG.isDebugEnabled() ) { if ( debug ) {
LOG.debugf( LOG.debugf(
"[%s] last update timestamp: %s", "[%s] last update timestamp: %s",
space, space,
lastUpdate + ", result set timestamp: " + timestamp lastUpdate + ", result set timestamp: " + timestamp
); );
} }
if ( factory != null && factory.getStatistics().isStatisticsEnabled() ) { if ( stats ) {
factory.getStatisticsImplementor().updateTimestampsCacheHit(); factory.getStatisticsImplementor().updateTimestampsCacheHit();
} }
if ( lastUpdate >= timestamp ) return false; if ( lastUpdate >= timestamp ) {
return false;
}
} }
} }
return true; return true;
} }
finally {
readWriteLock.readLock().unlock();
}
}
public void clear() throws CacheException { public void clear() throws CacheException {
region.evictAll(); region.evictAll();