Author: jason.greene(a)jboss.com
Date: 2008-03-27 17:26:02 -0400 (Thu, 27 Mar 2008)
New Revision: 5472
Modified:
experimental/jsr166/src/jsr166y/ConcurrentReferenceHashMap.java
experimental/jsr166/src/jsr166y/ConcurrentReferenceHashMapGCTestCase.java
Log:
Update docs, test
Modified: experimental/jsr166/src/jsr166y/ConcurrentReferenceHashMap.java
===================================================================
--- experimental/jsr166/src/jsr166y/ConcurrentReferenceHashMap.java 2008-03-27 21:03:09
UTC (rev 5471)
+++ experimental/jsr166/src/jsr166y/ConcurrentReferenceHashMap.java 2008-03-27 21:26:02
UTC (rev 5472)
@@ -64,7 +64,7 @@
* non-strong values may disappear before their corresponding key.
*
* While this table does allow the use of both strong keys and values, it is
- * recommended to use {@link java.util.concurrent.ConcurrentHashMap} for this
+ * recommended to use {@link java.util.concurrent.ConcurrentHashMap} for such a
* configuration, since it is optimized for that case.
*
* Just like {@link java.util.concurrent.ConcurrentHashMap}, this class obeys
@@ -1260,7 +1260,15 @@
}
/**
- * Removes any entries, whose keys have been finalized
+ * Removes any stale entries whose keys have been finalized. Use of this
+ * method is normally not necessary since stale entries are automatically
+ * removed lazily, when blocking operations are required. However, there
+ * are some cases where this operation should be performed eagerly, such
+ * as cleaning up old references to a ClassLoader in a multi-classloader
+ * environment.
+ *
+ * Note: this method will acquire locks, one at a time, across all segments
+ * of this table, so if it is to be used, it should be used sparingly.
*/
public void purgeStaleEntries() {
for (int i = 0; i < segments.length; ++i)
Modified: experimental/jsr166/src/jsr166y/ConcurrentReferenceHashMapGCTestCase.java
===================================================================
--- experimental/jsr166/src/jsr166y/ConcurrentReferenceHashMapGCTestCase.java 2008-03-27
21:03:09 UTC (rev 5471)
+++ experimental/jsr166/src/jsr166y/ConcurrentReferenceHashMapGCTestCase.java 2008-03-27
21:26:02 UTC (rev 5472)
@@ -1,24 +1,68 @@
package jsr166y;
-import java.util.EnumSet;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.Map;
-import jsr166y.ConcurrentReferenceHashMap.Option;
import jsr166y.ConcurrentReferenceHashMap.ReferenceType;
import junit.framework.TestCase;
public class ConcurrentReferenceHashMapGCTestCase extends TestCase {
- public void testBasicCleanup() throws Exception {
+ public void testWeakCleanup() throws Exception {
+ basicCleanup(false);
+ }
+
+ public void testWeakIterators() throws Exception {
+ iterators(false);
+ }
+
+ public void testSoftCleanup() throws Exception {
+ basicCleanup(true);
+ }
+
+ public void testSoftIterators() throws Exception {
+ iterators(true);
+ }
+
+ public void testSoftValues() throws Exception {
+ values(true);
+ }
+
+ public void testWeakValues() throws Exception {
+ values(false);
+ }
+
+ private void values(boolean soft) throws Exception {
+ ConcurrentReferenceHashMap<String, Integer> map =
+ new ConcurrentReferenceHashMap<String, Integer>(16,
+ ReferenceType.STRONG,
+ soft ? ReferenceType.SOFT : ReferenceType.WEAK);
+
+ Integer i = 5;
+ map.put("five", i);
+ map.put("one", new Integer(1));
+ assertEquals(2, map.size());
+ assertEquals(i, map.get("five"));
+ assertEquals(new Integer(1), map.get("one"));
+
+ gc(soft);
+ assertEquals(2, map.size());
+ assertEquals(i, map.get("five"));
+ assertTrue(map.containsKey("one"));
+ assertNull(map.get("one"));
+ }
+
+ private void basicCleanup(boolean soft) throws Exception {
ConcurrentReferenceHashMap<BinClump, Integer> map =
- new ConcurrentReferenceHashMap<BinClump, Integer>(0, .75f, 16,
ReferenceType.SOFT, ReferenceType.STRONG, null);
+ new ConcurrentReferenceHashMap<BinClump, Integer>(16,
+ soft ? ReferenceType.SOFT : ReferenceType.WEAK,
+ ReferenceType.STRONG);
BinClump[] hold = new BinClump[100];
generateClumps(map, hold, 10000);
- gc();
- Thread.sleep(1000);
+ gc(soft);
+ Thread.sleep(500);
// trigger a cleanup without matching any key
for (int i = 0; i < 100; i++)
@@ -27,12 +71,14 @@
assertEquals(100, map.size());
}
- public void testIterators() throws Exception {
+ private void iterators(boolean soft) throws Exception {
ConcurrentReferenceHashMap<BinClump, Integer> map =
- new ConcurrentReferenceHashMap<BinClump, Integer>(0, .75f, 16,
ReferenceType.SOFT, ReferenceType.STRONG, null);
+ new ConcurrentReferenceHashMap<BinClump, Integer>(16,
+ soft ? ReferenceType.SOFT : ReferenceType.WEAK,
+ ReferenceType.STRONG);
BinClump[] hold = new BinClump[100];
generateClumps(map, hold, 10000);
- gc();
+ gc(soft);
Thread.sleep(500);
// Stale entries are not yet cleared
@@ -67,11 +113,10 @@
// Should be stale free now
assertEquals(100, map.size());
Iterator<BinClump> i = map.keySet().iterator();
- while (i.hasNext() && i.next() != hold[0])
- ;
+ while (i.hasNext() && i.next() != hold[0]);
hold = null;
- gc();
+ gc(soft);
Thread.sleep(500);
// trigger a cleanup without matching any key
@@ -83,7 +128,7 @@
// Free iterator
i = null;
- gc();
+ gc(soft);
Thread.sleep(500);
// trigger a cleanup without matching any key
@@ -94,16 +139,21 @@
}
- private void gc() {
+ private void gc(boolean soft) {
System.gc();
- int chunkSize = (int) Math.min(Runtime.getRuntime().maxMemory() / 16,
Integer.MAX_VALUE);
- try {
- LinkedList<long[]> list = new LinkedList<long[]>();
- for (;;)
- list.add(new long[chunkSize]);
- } catch (OutOfMemoryError e)
- {}
- System.gc();
+
+ if (soft) {
+ int chunkSize = (int) Math.min(
+ Runtime.getRuntime().maxMemory() / 16, Integer.MAX_VALUE);
+ try {
+ LinkedList<long[]> list = new LinkedList<long[]>();
+ for (;;)
+ list.add(new long[chunkSize]);
+ } catch (OutOfMemoryError e) {
+ }
+
+ System.gc();
+ }
}
private void generateClumps(ConcurrentReferenceHashMap<BinClump, Integer> map,