Hibernate SVN: r20880 - in core/branches/Branch_3_3_2_GA_CP/core/src/main/java/org/hibernate: impl and 1 other directory.
by hibernate-commits@lists.jboss.org
Author: stliu
Date: 2011-01-25 00:14:46 -0500 (Tue, 25 Jan 2011)
New Revision: 20880
Modified:
core/branches/Branch_3_3_2_GA_CP/core/src/main/java/org/hibernate/cache/UpdateTimestampsCache.java
core/branches/Branch_3_3_2_GA_CP/core/src/main/java/org/hibernate/impl/SessionFactoryImpl.java
Log:
JBPAPP-5814 HHH-5823 HHH-5824 use JUC to improve the Poor multithread performance
Modified: core/branches/Branch_3_3_2_GA_CP/core/src/main/java/org/hibernate/cache/UpdateTimestampsCache.java
===================================================================
--- core/branches/Branch_3_3_2_GA_CP/core/src/main/java/org/hibernate/cache/UpdateTimestampsCache.java 2011-01-21 16:07:05 UTC (rev 20879)
+++ core/branches/Branch_3_3_2_GA_CP/core/src/main/java/org/hibernate/cache/UpdateTimestampsCache.java 2011-01-25 05:14:46 UTC (rev 20880)
@@ -1,10 +1,10 @@
/*
* Hibernate, Relational Persistence for Idiomatic Java
*
- * Copyright (c) 2008, Red Hat Middleware LLC or third-party contributors as
+ * Copyright (c) 2011, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
- * distributed under license by Red Hat Middleware LLC.
+ * distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
@@ -20,7 +20,6 @@
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
- *
*/
package org.hibernate.cache;
@@ -28,6 +27,7 @@
import java.util.Iterator;
import java.util.Properties;
import java.util.Set;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.hibernate.HibernateException;
import org.hibernate.cfg.Settings;
@@ -36,76 +36,97 @@
/**
* Tracks the timestamps of the most recent updates to particular tables. It is
- * important that the cache timeout of the underlying cache implementation be set
- * to a higher value than the timeouts of any of the query caches. In fact, we
- * recommend that the the underlying cache not be configured for expiry at all.
- * Note, in particular, that an LRU cache expiry policy is never appropriate.
- *
+ * important that the cache timeout of the underlying cache implementation be
+ * set to a higher value than the timeouts of any of the query caches. In fact,
+ * we recommend that the the underlying cache not be configured for expire at
+ * all. Note, in particular, that an LRU cache expire policy is never
+ * appropriate.
+ *
* @author Gavin King
* @author Mikheil Kapanadze
*/
public class UpdateTimestampsCache {
public static final String REGION_NAME = UpdateTimestampsCache.class.getName();
private static final Logger log = LoggerFactory.getLogger( UpdateTimestampsCache.class );
-
+ private ReentrantReadWriteLock readWriteLock = new ReentrantReadWriteLock();
private final TimestampsRegion region;
- public UpdateTimestampsCache(Settings settings, Properties props) throws HibernateException {
+ public UpdateTimestampsCache( Settings settings, Properties props ) throws HibernateException {
String prefix = settings.getCacheRegionPrefix();
String regionName = prefix == null ? REGION_NAME : prefix + '.' + REGION_NAME;
log.info( "starting update timestamps cache at region: " + regionName );
this.region = settings.getRegionFactory().buildTimestampsRegion( regionName, props );
}
- public synchronized void preinvalidate(Serializable[] spaces) throws CacheException {
- //TODO: to handle concurrent writes correctly, this should return a Lock to the client
- Long ts = new Long( region.nextTimestamp() + region.getTimeout() );
- for ( int i=0; i<spaces.length; i++ ) {
- if ( log.isDebugEnabled() ) {
- log.debug( "Pre-invalidating space [" + spaces[i] + "]" );
+ public void preinvalidate( Serializable[] spaces ) throws CacheException {
+ readWriteLock.writeLock().lock();
+ try {
+
+ // TODO: to handle concurrent writes correctly, this should return a
+ // Lock to the client
+ Long ts = new Long( region.nextTimestamp() + region.getTimeout() );
+ for ( Serializable space : spaces ) {
+ if ( log.isDebugEnabled() ) {
+ log.debug( "Pre-invalidating space [" + space + "]" );
+ }
+ // put() has nowait semantics, is this really appropriate?
+ // note that it needs to be async replication, never local or
+ // sync
+ region.put( space, ts );
}
- //put() has nowait semantics, is this really appropriate?
- //note that it needs to be async replication, never local or sync
- region.put( spaces[i], ts );
+ } finally {
+ readWriteLock.writeLock().unlock();
}
- //TODO: return new Lock(ts);
+ // TODO: return new Lock(ts);
}
- public synchronized void invalidate(Serializable[] spaces) throws CacheException {
- //TODO: to handle concurrent writes correctly, the client should pass in a Lock
- Long ts = new Long( region.nextTimestamp() );
- //TODO: if lock.getTimestamp().equals(ts)
- for ( int i=0; i<spaces.length; i++ ) {
- if ( log.isDebugEnabled() ) {
- log.debug( "Invalidating space [" + spaces[i] + "], timestamp: " + ts);
+ public void invalidate( Serializable[] spaces ) throws CacheException {
+ // TODO: to handle concurrent writes correctly, the client should pass
+ // in a Lock
+ readWriteLock.writeLock().lock();
+ try {
+ Long ts = new Long( region.nextTimestamp() );
+ // TODO: if lock.getTimestamp().equals(ts)
+ for ( Serializable space : spaces ) {
+ if ( log.isDebugEnabled() ) {
+ log.debug( "Invalidating space [" + space + "], timestamp: " + ts );
+ }
+ // put() has nowait semantics, is this really appropriate?
+ // note that it needs to be async replication, never local or
+ // sync
+ region.put( space, ts );
}
- //put() has nowait semantics, is this really appropriate?
- //note that it needs to be async replication, never local or sync
- region.put( spaces[i], ts );
+ } finally {
+ readWriteLock.writeLock().unlock();
}
}
- public synchronized boolean isUpToDate(Set spaces, Long timestamp) throws HibernateException {
- Iterator iter = spaces.iterator();
- while ( iter.hasNext() ) {
- Serializable space = (Serializable) iter.next();
- Long lastUpdate = (Long) region.get(space);
- if ( lastUpdate==null ) {
- //the last update timestamp was lost from the cache
- //(or there were no updates since startup!)
- //updateTimestamps.put( space, new Long( updateTimestamps.nextTimestamp() ) );
- //result = false; // safer
- }
- else {
- if ( log.isDebugEnabled() ) {
- log.debug("[" + space + "] last update timestamp: " + lastUpdate + ", result set timestamp: " + timestamp );
+ public synchronized boolean isUpToDate( Set spaces, Long timestamp ) throws HibernateException {
+ readWriteLock.readLock().lock();
+ try {
+ Iterator iter = spaces.iterator();
+ while ( iter.hasNext() ) {
+ Serializable space = (Serializable) iter.next();
+ Long lastUpdate = (Long) region.get( space );
+ if ( lastUpdate == null ) {
+ // the last update timestamp was lost from the cache
+ // (or there were no updates since startup!)
+ // updateTimestamps.put( space, new Long(
+ // updateTimestamps.nextTimestamp() ) );
+ // result = false; // safer
+ } else {
+ if ( log.isDebugEnabled() ) {
+ log.debug( "[" + space + "] last update timestamp: " + lastUpdate + ", result set timestamp: " + timestamp );
+ }
+ if ( lastUpdate.longValue() >= timestamp.longValue() ) {
+ return false;
+ }
}
- if ( lastUpdate.longValue() >= timestamp.longValue() ) {
- return false;
- }
}
+ return true;
+ } finally {
+ readWriteLock.readLock().unlock();
}
- return true;
}
public void clear() throws CacheException {
@@ -115,18 +136,17 @@
public void destroy() {
try {
region.destroy();
+ } catch ( Exception e ) {
+ log.warn( "could not destroy UpdateTimestamps cache", e );
}
- catch (Exception e) {
- log.warn("could not destroy UpdateTimestamps cache", e);
- }
}
public TimestampsRegion getRegion() {
return region;
}
-
+
public String toString() {
- return "UpdateTimestampeCache";
+ return "UpdateTimestampCache";
}
}
Modified: core/branches/Branch_3_3_2_GA_CP/core/src/main/java/org/hibernate/impl/SessionFactoryImpl.java
===================================================================
--- core/branches/Branch_3_3_2_GA_CP/core/src/main/java/org/hibernate/impl/SessionFactoryImpl.java 2011-01-21 16:07:05 UTC (rev 20879)
+++ core/branches/Branch_3_3_2_GA_CP/core/src/main/java/org/hibernate/impl/SessionFactoryImpl.java 2011-01-25 05:14:46 UTC (rev 20880)
@@ -40,6 +40,8 @@
import java.util.Map;
import java.util.Properties;
import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
import javax.naming.NamingException;
import javax.naming.Reference;
@@ -168,8 +170,8 @@
private final transient TransactionManager transactionManager;
private final transient QueryCache queryCache;
private final transient UpdateTimestampsCache updateTimestampsCache;
- private final transient Map queryCaches;
- private final transient Map allCacheRegions = new HashMap();
+ private final transient Map<String,QueryCache> queryCaches;
+ private final transient ConcurrentMap<String,Region> allCacheRegions = new ConcurrentHashMap<String, Region>();
private final transient StatisticsImpl statistics = new StatisticsImpl(this);
private final transient EventListeners eventListeners;
private final transient CurrentSessionContext currentSessionContext;
@@ -378,7 +380,7 @@
updateTimestampsCache = new UpdateTimestampsCache(settings, properties);
queryCache = settings.getQueryCacheFactory()
.getQueryCache(null, updateTimestampsCache, settings, properties);
- queryCaches = new HashMap();
+ queryCaches = new HashMap<String,QueryCache>();
allCacheRegions.put( updateTimestampsCache.getRegion().getName(), updateTimestampsCache.getRegion() );
allCacheRegions.put( queryCache.getRegion().getName(), queryCache.getRegion() );
}
@@ -986,27 +988,21 @@
return null;
}
- synchronized ( allCacheRegions ) {
- QueryCache currentQueryCache = ( QueryCache ) queryCaches.get( regionName );
- if ( currentQueryCache == null ) {
- currentQueryCache = settings.getQueryCacheFactory().getQueryCache( regionName, updateTimestampsCache, settings, properties );
- queryCaches.put( regionName, currentQueryCache );
- allCacheRegions.put( currentQueryCache.getRegion().getName(), currentQueryCache.getRegion() );
- }
- return currentQueryCache;
+ QueryCache currentQueryCache = queryCaches.get( regionName );
+ if ( currentQueryCache == null ) {
+ currentQueryCache = settings.getQueryCacheFactory().getQueryCache( regionName, updateTimestampsCache, settings, properties );
+ queryCaches.put( regionName, currentQueryCache );
+ allCacheRegions.put( currentQueryCache.getRegion().getName(), currentQueryCache.getRegion() );
}
+ return currentQueryCache;
}
public Region getSecondLevelCacheRegion(String regionName) {
- synchronized ( allCacheRegions ) {
- return ( Region ) allCacheRegions.get( regionName );
- }
+ return allCacheRegions.get( regionName );
}
public Map getAllSecondLevelCacheRegions() {
- synchronized ( allCacheRegions ) {
- return new HashMap( allCacheRegions );
- }
+ return new HashMap( allCacheRegions );
}
public boolean isClosed() {
@@ -1032,12 +1028,10 @@
throw new NullPointerException("use the zero-argument form to evict the default query cache");
}
else {
- synchronized (allCacheRegions) {
- if ( settings.isQueryCacheEnabled() ) {
- QueryCache currentQueryCache = (QueryCache) queryCaches.get(cacheRegion);
- if ( currentQueryCache != null ) {
- currentQueryCache.clear();
- }
+ if ( settings.isQueryCacheEnabled() ) {
+ QueryCache currentQueryCache = (QueryCache) queryCaches.get(cacheRegion);
+ if ( currentQueryCache != null ) {
+ currentQueryCache.clear();
}
}
}