[jbosscache-commits] JBoss Cache SVN: r5470 - experimental and 4 other directories.

jbosscache-commits at lists.jboss.org jbosscache-commits at lists.jboss.org
Thu Mar 27 13:09:21 EDT 2008


Author: jason.greene at jboss.com
Date: 2008-03-27 13:09:21 -0400 (Thu, 27 Mar 2008)
New Revision: 5470

Added:
   experimental/
   experimental/jsr166/
   experimental/jsr166/.classpath
   experimental/jsr166/.project
   experimental/jsr166/lib/
   experimental/jsr166/lib/junit.jar
   experimental/jsr166/src/
   experimental/jsr166/src/jsr166y/
   experimental/jsr166/src/jsr166y/ConcurrentReferenceHashMap.java
   experimental/jsr166/src/jsr166y/ConcurrentReferenceHashMapGCTestCase.java
   experimental/jsr166/src/jsr166y/ConcurrentReferenceHashMapTest.java
   experimental/jsr166/src/jsr166y/ConcurrentWeakHashMap.java
   experimental/jsr166/src/jsr166y/ConcurrentWeakHashMapGCTestCase.java
   experimental/jsr166/src/jsr166y/ConcurrentWeakHashMapTest.java
   experimental/jsr166/src/jsr166y/JSR166TestCase.java
   experimental/jsr166/src/jsr166y/LoopHelpers.java
   experimental/jsr166/src/jsr166y/MapCheck.java
   experimental/jsr166/src/jsr166y/MapLoops.java
   experimental/jsr166/src/jsr166y/SynchronizedWeakHashMap.java
Log:
Add expermental JSR166 work


Added: experimental/jsr166/.classpath
===================================================================
--- experimental/jsr166/.classpath	                        (rev 0)
+++ experimental/jsr166/.classpath	2008-03-27 17:09:21 UTC (rev 5470)
@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+	<classpathentry kind="src" path="src"/>
+	<classpathentry kind="lib" path="lib/junit.jar"/>
+	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
+	<classpathentry kind="output" path="bin"/>
+</classpath>

Added: experimental/jsr166/.project
===================================================================
--- experimental/jsr166/.project	                        (rev 0)
+++ experimental/jsr166/.project	2008-03-27 17:09:21 UTC (rev 5470)
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+	<name>jsr166</name>
+	<comment></comment>
+	<projects>
+	</projects>
+	<buildSpec>
+		<buildCommand>
+			<name>org.eclipse.jdt.core.javabuilder</name>
+			<arguments>
+			</arguments>
+		</buildCommand>
+	</buildSpec>
+	<natures>
+		<nature>org.eclipse.jdt.core.javanature</nature>
+	</natures>
+</projectDescription>

Added: experimental/jsr166/lib/junit.jar
===================================================================
(Binary files differ)


Property changes on: experimental/jsr166/lib/junit.jar
___________________________________________________________________
Name: svn:mime-type
   + application/octet-stream

Added: experimental/jsr166/src/jsr166y/ConcurrentReferenceHashMap.java
===================================================================
--- experimental/jsr166/src/jsr166y/ConcurrentReferenceHashMap.java	                        (rev 0)
+++ experimental/jsr166/src/jsr166y/ConcurrentReferenceHashMap.java	2008-03-27 17:09:21 UTC (rev 5470)
@@ -0,0 +1,1594 @@
+/*
+ * Written by Doug Lea with assistance from members of JCP JSR-166
+ * Expert Group and released to the public domain, as explained at
+ * http://creativecommons.org/licenses/publicdomain
+ */
+
+package jsr166y;
+import java.io.IOException;
+import java.io.Serializable;
+import java.lang.ref.Reference;
+import java.lang.ref.ReferenceQueue;
+import java.lang.ref.SoftReference;
+import java.lang.ref.WeakReference;
+import java.util.AbstractCollection;
+import java.util.AbstractMap;
+import java.util.AbstractSet;
+import java.util.Collection;
+import java.util.ConcurrentModificationException;
+import java.util.EnumSet;
+import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.Hashtable;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.NoSuchElementException;
+import java.util.Set;
+import java.util.concurrent.locks.ReentrantLock;
+
+/**
+ * A hash table with <em>weak keys</em>, full concurrency of retrievals, and
+ * adjustable expected concurrency for updates. Similar to
+ * {@link java.util.WeakHashMap}, entries of this table are periodically
+ * removed once their corresponding keys are no longer referenced outside of
+ * this table. In other words, this table will not prevent a key from being
+ * discarded by the garbage collector. Once a key has been discarded by the
+ * collector, the corresponding entry is no longer visible to this table;
+ * however, the entry may occupy space until a future table operation decides to
+ * reclaim it. For this reason, summary functions such as <tt>size</tt> and
+ * <tt>isEmpty</tt> might return a value greater than the observed number of
+ * entries. In order to support a high level of concurrency, stale entries are
+ * only reclaimed during blocking (usually mutating) operations.
+ * 
+ * While keys in this table are only held using a weak reference, values are
+ * held using a normal strong reference. This provides the guarantee that a
+ * value will always have at least the same life-span as it's key. For this
+ * reason, care should be taken to ensure that a value never refers, either
+ * directly or indirectly, to its key, thereby preventing reclamation. If weak
+ * values are desired, one can simply use a {@link WeakReference} for the value
+ * type.
+ * 
+ * Just like {@link java.util.ConcurrentHashMap}, this class obeys the same
+ * functional specification as {@link java.util.Hashtable}, and includes
+ * versions of methods corresponding to each method of <tt>Hashtable</tt>.
+ * However, even though all operations are thread-safe, retrieval operations do
+ * <em>not</em> entail locking, and there is <em>not</em> any support for
+ * locking the entire table in a way that prevents all access. This class is
+ * fully interoperable with <tt>Hashtable</tt> in programs that rely on its
+ * thread safety but not on its synchronization details.
+ * 
+ * <p>
+ * Retrieval operations (including <tt>get</tt>) generally do not block, so
+ * may overlap with update operations (including <tt>put</tt> and
+ * <tt>remove</tt>). Retrievals reflect the results of the most recently
+ * <em>completed</em> update operations holding upon their onset. For
+ * aggregate operations such as <tt>putAll</tt> and <tt>clear</tt>,
+ * concurrent retrievals may reflect insertion or removal of only some entries.
+ * Similarly, Iterators and Enumerations return elements reflecting the state of
+ * the hash table at some point at or since the creation of the
+ * iterator/enumeration. They do <em>not</em> throw
+ * {@link ConcurrentModificationException}. However, iterators are designed to
+ * be used by only one thread at a time.
+ * 
+ * <p>
+ * The allowed concurrency among update operations is guided by the optional
+ * <tt>concurrencyLevel</tt> constructor argument (default <tt>16</tt>),
+ * which is used as a hint for internal sizing. The table is internally
+ * partitioned to try to permit the indicated number of concurrent updates
+ * without contention. Because placement in hash tables is essentially random,
+ * the actual concurrency will vary. Ideally, you should choose a value to
+ * accommodate as many threads as will ever concurrently modify the table. Using
+ * a significantly higher value than you need can waste space and time, and a
+ * significantly lower value can lead to thread contention. But overestimates
+ * and underestimates within an order of magnitude do not usually have much
+ * noticeable impact. A value of one is appropriate when it is known that only
+ * one thread will modify and all others will only read. Also, resizing this or
+ * any other kind of hash table is a relatively slow operation, so, when
+ * possible, it is a good idea to provide estimates of expected table sizes in
+ * constructors.
+ * 
+ * <p>
+ * This class and its views and iterators implement all of the <em>optional</em>
+ * methods of the {@link Map} and {@link Iterator} interfaces.
+ * 
+ * <p>
+ * Like {@link Hashtable} but unlike {@link HashMap}, this class does
+ * <em>not</em> allow <tt>null</tt> to be used as a key or value.
+ * 
+ * <p>
+ * This class is a member of the <a href="{@docRoot}/../technotes/guides/collections/index.html">
+ * Java Collections Framework</a>.
+ * 
+ * @author Doug Lea
+ * @author Jason T. Greene
+ * @param <K> the type of keys maintained by this map
+ * @param <V> the type of mapped values
+ */
+public class ConcurrentReferenceHashMap<K, V> extends AbstractMap<K, V>
+        implements java.util.concurrent.ConcurrentMap<K, V>, Serializable {
+    private static final long serialVersionUID = 7249069246763182397L;
+
+    /*
+     * The basic strategy is to subdivide the table among Segments,
+     * each of which itself is a concurrently readable hash table.
+     */
+
+    public static enum ReferenceType {STRONG, WEAK, SOFT};
+    
+    public static enum Option {IDENTITY_COMPARISONS};
+    
+    /* ---------------- Constants -------------- */
+
+    static final ReferenceType DEFAULT_KEY_TYPE = ReferenceType.WEAK;
+    
+    static final ReferenceType DEFAULT_VALUE_TYPE = ReferenceType.STRONG;
+    
+    
+    /**
+     * The default initial capacity for this table,
+     * used when not otherwise specified in a constructor.
+     */
+    static final int DEFAULT_INITIAL_CAPACITY = 16;
+
+    /**
+     * The default load factor for this table, used when not
+     * otherwise specified in a constructor.
+     */
+    static final float DEFAULT_LOAD_FACTOR = 0.75f;
+
+    /**
+     * The default concurrency level for this table, used when not
+     * otherwise specified in a constructor.
+     */
+    static final int DEFAULT_CONCURRENCY_LEVEL = 16;
+
+    /**
+     * The maximum capacity, used if a higher value is implicitly
+     * specified by either of the constructors with arguments.  MUST
+     * be a power of two <= 1<<30 to ensure that entries are indexable
+     * using ints.
+     */
+    static final int MAXIMUM_CAPACITY = 1 << 30;
+
+    /**
+     * The maximum number of segments to allow; used to bound
+     * constructor arguments.
+     */
+    static final int MAX_SEGMENTS = 1 << 16; // slightly conservative
+
+    /**
+     * Number of unsynchronized retries in size and containsValue
+     * methods before resorting to locking. This is used to avoid
+     * unbounded retries if tables undergo continuous modification
+     * which would make it impossible to obtain an accurate result.
+     */
+    static final int RETRIES_BEFORE_LOCK = 2;
+
+    /* ---------------- Fields -------------- */
+
+    /**
+     * Mask value for indexing into segments. The upper bits of a
+     * key's hash code are used to choose the segment.
+     */
+    final int segmentMask;
+
+    /**
+     * Shift value for indexing within segments.
+     */
+    final int segmentShift;
+
+    /**
+     * The segments, each of which is a specialized hash table
+     */
+    final Segment<K,V>[] segments;
+    
+    boolean identityComparisons;
+
+    transient Set<K> keySet;
+    transient Set<Map.Entry<K,V>> entrySet;
+    transient Collection<V> values;
+
+    /* ---------------- Small Utilities -------------- */
+
+    /**
+     * Applies a supplemental hash function to a given hashCode, which
+     * defends against poor quality hash functions.  This is critical
+     * because ConcurrentWeakHashMap uses power-of-two length hash tables,
+     * that otherwise encounter collisions for hashCodes that do not
+     * differ in lower or upper bits.
+     */
+    private static int hash(int h) {
+        // Spread bits to regularize both segment and index locations,
+        // using variant of single-word Wang/Jenkins hash.
+        h += (h <<  15) ^ 0xffffcd7d;
+        h ^= (h >>> 10);
+        h += (h <<   3);
+        h ^= (h >>>  6);
+        h += (h <<   2) + (h << 14);
+        return h ^ (h >>> 16);
+    }
+
+    /**
+     * Returns the segment that should be used for key with given hash
+     * @param hash the hash code for the key
+     * @return the segment
+     */
+    final Segment<K,V> segmentFor(int hash) {
+        return segments[(hash >>> segmentShift) & segmentMask];
+    }
+   
+    private int hashOf(Object key) {
+        return hash(identityComparisons ? 
+                System.identityHashCode(key) : key.hashCode());
+    }
+    
+    /* ---------------- Inner Classes -------------- */
+    
+    static interface KeyReference {
+        int keyHash();
+    }
+    
+    /**
+     * A weak-key reference which stores the key hash needed for reclamation.
+     */
+    static final class WeakKeyReference<K> extends WeakReference<K>  implements KeyReference {
+        final int hash;
+        WeakKeyReference(K key, int hash, ReferenceQueue<K> refQueue) {
+            super(key, refQueue);
+            this.hash = hash;
+        }
+        public final int keyHash() {
+            return hash;
+        }
+    }
+    
+    /**
+     * A soft-key reference which stores the key hash needed for reclamation.
+     */
+    static final class SoftKeyReference<K> extends SoftReference<K> implements KeyReference {
+        final int hash;
+        SoftKeyReference(K key, int hash, ReferenceQueue<K> refQueue) {
+            super(key, refQueue);
+            this.hash = hash;
+        }
+        public final int keyHash() {
+            return hash;
+        }
+    }
+    
+    /**
+     * ConcurrentWeakHashMap list entry. Note that this is never exported
+     * out as a user-visible Map.Entry.
+     *
+     * Because the value field is volatile, not final, it is legal wrt
+     * the Java Memory Model for an unsynchronized reader to see null
+     * instead of initial value when read via a data race.  Although a
+     * reordering leading to this is not likely to ever actually
+     * occur, the Segment.readValueUnderLock method is used as a
+     * backup in case a null (pre-initialized) value is ever seen in
+     * an unsynchronized access method.
+     */
+    static final class HashEntry<K,V> {
+        final Object keyRef;
+        final int hash;
+        volatile Object valueRef;
+        final HashEntry<K,V> next;
+        final ReferenceType keyType;
+        final ReferenceType valueType;
+
+        HashEntry(K key, int hash,  HashEntry<K,V> next, V value, 
+                ReferenceType keyType, ReferenceType valueType, 
+                ReferenceQueue<K> refQueue) {
+            this.keyType = keyType;
+            this.valueType = valueType;
+            this.keyRef = newKeyReference(key, hash, refQueue);
+            this.hash = hash;
+            this.next = next;
+            this.valueRef = newValueReference(value);
+        }
+        
+        final Object newKeyReference(K key, int hash, ReferenceQueue<K> refQueue) {
+            switch (keyType) {
+                case WEAK:
+                    return new WeakKeyReference<K>(key, hash, refQueue);
+                case SOFT:
+                    return new SoftKeyReference<K>(key, hash, refQueue);
+            }
+            
+            return key;
+        }
+        
+        final Object newValueReference(V value) {
+            switch (valueType) {
+                case WEAK:
+                    return new WeakReference<V>(value);
+                case SOFT:
+                    return new SoftReference<V>(value);
+            }
+            
+            return value;
+        }
+        
+        @SuppressWarnings("unchecked")
+        final K key() {
+            if (keyRef instanceof Reference)
+                return ((Reference<K>)keyRef).get();
+            
+            return (K) keyRef;
+        }
+        
+        final V value() {
+            return dereferenceValue(valueRef);
+        }
+        
+        @SuppressWarnings("unchecked")
+        final V dereferenceValue(Object value) {
+            if (value instanceof Reference)
+                return ((Reference<V>)value).get();
+            
+            return (V) value;
+        }
+        
+        final void setValue(V value) {
+            this.valueRef = newValueReference(value);
+        }
+
+        @SuppressWarnings("unchecked")
+        static final <K,V> HashEntry<K,V>[] newArray(int i) {
+            return new HashEntry[i];
+        }
+    }
+
+    /**
+     * Segments are specialized versions of hash tables.  This
+     * subclasses from ReentrantLock opportunistically, just to
+     * simplify some locking and avoid separate construction.
+     */
+    static final class Segment<K,V> extends ReentrantLock implements Serializable {
+        /*
+         * Segments maintain a table of entry lists that are ALWAYS
+         * kept in a consistent state, so can be read without locking.
+         * Next fields of nodes are immutable (final).  All list
+         * additions are performed at the front of each bin. This
+         * makes it easy to check changes, and also fast to traverse.
+         * When nodes would otherwise be changed, new nodes are
+         * created to replace them. This works well for hash tables
+         * since the bin lists tend to be short. (The average length
+         * is less than two for the default load factor threshold.)
+         *
+         * Read operations can thus proceed without locking, but rely
+         * on selected uses of volatiles to ensure that completed
+         * write operations performed by other threads are
+         * noticed. For most purposes, the "count" field, tracking the
+         * number of elements, serves as that volatile variable
+         * ensuring visibility.  This is convenient because this field
+         * needs to be read in many read operations anyway:
+         *
+         *   - All (unsynchronized) read operations must first read the
+         *     "count" field, and should not look at table entries if
+         *     it is 0.
+         *
+         *   - All (synchronized) write operations should write to
+         *     the "count" field after structurally changing any bin.
+         *     The operations must not take any action that could even
+         *     momentarily cause a concurrent read operation to see
+         *     inconsistent data. This is made easier by the nature of
+         *     the read operations in Map. For example, no operation
+         *     can reveal that the table has grown but the threshold
+         *     has not yet been updated, so there are no atomicity
+         *     requirements for this with respect to reads.
+         *
+         * As a guide, all critical volatile reads and writes to the
+         * count field are marked in code comments.
+         */
+
+        private static final long serialVersionUID = 2249069246763182397L;
+
+        /**
+         * The number of elements in this segment's region.
+         */
+        transient volatile int count;
+
+        /**
+         * Number of updates that alter the size of the table. This is
+         * used during bulk-read methods to make sure they see a
+         * consistent snapshot: If modCounts change during a traversal
+         * of segments computing size or checking containsValue, then
+         * we might have an inconsistent view of state so (usually)
+         * must retry.
+         */
+        transient int modCount;
+
+        /**
+         * The table is rehashed when its size exceeds this threshold.
+         * (The value of this field is always <tt>(int)(capacity *
+         * loadFactor)</tt>.)
+         */
+        transient int threshold;
+
+        /**
+         * The per-segment table.
+         */
+        transient volatile HashEntry<K,V>[] table;
+
+        /**
+         * The load factor for the hash table.  Even though this value
+         * is same for all segments, it is replicated to avoid needing
+         * links to outer object.
+         * @serial
+         */
+        final float loadFactor;
+
+        /**
+         * The collected weak-key reference queue for this segment. 
+         * This should be (re)initialized whenever table is assigned,
+         */
+        transient volatile ReferenceQueue<K> refQueue;
+        
+        final ReferenceType keyType;
+        
+        final ReferenceType valueType;
+        
+        final boolean identityComparisons;
+        
+        Segment(int initialCapacity, float lf, ReferenceType keyType, 
+                ReferenceType valueType, boolean identityComparisons) {
+            loadFactor = lf;
+            this.keyType = keyType;
+            this.valueType = valueType;
+            this.identityComparisons = identityComparisons;
+            setTable(HashEntry.<K,V>newArray(initialCapacity));
+        }
+
+        @SuppressWarnings("unchecked")
+        static final <K,V> Segment<K,V>[] newArray(int i) {
+            return new Segment[i];
+        }
+        
+        private boolean keyEq(Object src, Object dest) {
+            return identityComparisons ? src == dest : src.equals(dest);
+        }
+        
+        /**
+         * Sets table to new HashEntry array.
+         * Call only while holding lock or in constructor.
+         */
+        void setTable(HashEntry<K,V>[] newTable) {
+            threshold = (int)(newTable.length * loadFactor);
+            table = newTable;
+            refQueue = new ReferenceQueue<K>();
+        }
+
+        /**
+         * Returns properly casted first entry of bin for given hash.
+         */
+        HashEntry<K,V> getFirst(int hash) {
+            HashEntry<K,V>[] tab = table;
+            return tab[hash & (tab.length - 1)];
+        }
+        
+        HashEntry<K,V> newHashEntry(K key, int hash, HashEntry<K, V> next, V value) {
+            return new HashEntry<K,V>(key, hash, next, value, keyType, valueType, refQueue);
+        }
+
+        /**
+         * Reads value field of an entry under lock. Called if value
+         * field ever appears to be null. This is possible only if a
+         * compiler happens to reorder a HashEntry initialization with
+         * its table assignment, which is legal under memory model
+         * but is not known to ever occur.
+         */
+        V readValueUnderLock(HashEntry<K,V> e) {
+            lock();
+            try {
+                removeStale();
+                return e.value();
+            } finally {
+                unlock();
+            }
+        }
+
+        /* Specialized implementations of map methods */
+
+        V get(Object key, int hash) {
+            if (count != 0) { // read-volatile
+                HashEntry<K,V> e = getFirst(hash);
+                while (e != null) {
+                    if (e.hash == hash && keyEq(key, e.key())) {
+                        Object opaque = e.valueRef;
+                        if (opaque != null)
+                            return e.dereferenceValue(opaque); 
+                        
+                        return readValueUnderLock(e);  // recheck
+                    }
+                    e = e.next;
+                }
+            }
+            return null;
+        }
+
+        boolean containsKey(Object key, int hash) {
+            if (count != 0) { // read-volatile
+                HashEntry<K,V> e = getFirst(hash);
+                while (e != null) {
+                    if (e.hash == hash && keyEq(key, e.key()))
+                        return true;
+                    e = e.next;
+                }
+            }
+            return false;
+        }
+
+        boolean containsValue(Object value) {
+            if (count != 0) { // read-volatile
+                HashEntry<K,V>[] tab = table;
+                int len = tab.length;
+                for (int i = 0 ; i < len; i++) {
+                    for (HashEntry<K,V> e = tab[i]; e != null; e = e.next) {
+                        Object opaque = e.valueRef;
+                        V v;
+                        
+                        if (opaque == null) 
+                            v = readValueUnderLock(e); // recheck
+                        else 
+                            v = e.dereferenceValue(opaque);
+                        
+                        if (value.equals(v))
+                            return true;
+                    }
+                }
+            }
+            return false;
+        }
+
+        boolean replace(K key, int hash, V oldValue, V newValue) {
+            lock();
+            try {
+                removeStale();
+                HashEntry<K,V> e = getFirst(hash);
+                while (e != null && (e.hash != hash || !keyEq(key, e.key())))
+                    e = e.next;
+
+                boolean replaced = false;
+                if (e != null && oldValue.equals(e.value())) {
+                    replaced = true;
+                    e.setValue(newValue);
+                }
+                return replaced;
+            } finally {
+                unlock();
+            }
+        }
+
+        V replace(K key, int hash, V newValue) {
+            lock();
+            try {
+                removeStale();
+                HashEntry<K,V> e = getFirst(hash);
+                while (e != null && (e.hash != hash || !keyEq(key, e.key())))
+                    e = e.next;
+
+                V oldValue = null;
+                if (e != null) {
+                    oldValue = e.value();
+                    e.setValue(newValue);
+                }
+                return oldValue;
+            } finally {
+                unlock();
+            }
+        }
+
+
+        V put(K key, int hash, V value, boolean onlyIfAbsent) {
+            lock();
+            try {
+                removeStale();
+                int c = count;
+                if (c++ > threshold) {// ensure capacity
+                    int reduced = rehash();
+                    if (reduced > 0)  // adjust from possible weak cleanups
+                        count = (c -= reduced) - 1; // write-volatile      
+                }
+                         
+                HashEntry<K,V>[] tab = table;
+                int index = hash & (tab.length - 1);
+                HashEntry<K,V> first = tab[index];
+                HashEntry<K,V> e = first;
+                while (e != null && (e.hash != hash || !keyEq(key, e.key())))
+                    e = e.next;
+
+                V oldValue;
+                if (e != null) {
+                    oldValue = e.value();
+                    if (!onlyIfAbsent)
+                        e.setValue(value);
+                }
+                else {
+                    oldValue = null;
+                    ++modCount;
+                    tab[index] = newHashEntry(key, hash, first, value);
+                    count = c; // write-volatile
+                }
+                return oldValue;
+            } finally {
+                unlock();
+            }
+        }
+
+        int rehash() {
+            HashEntry<K,V>[] oldTable = table;
+            int oldCapacity = oldTable.length;
+            if (oldCapacity >= MAXIMUM_CAPACITY)
+                return 0;
+
+            /*
+             * Reclassify nodes in each list to new Map.  Because we are
+             * using power-of-two expansion, the elements from each bin
+             * must either stay at same index, or move with a power of two
+             * offset. We eliminate unnecessary node creation by catching
+             * cases where old nodes can be reused because their next
+             * fields won't change. Statistically, at the default
+             * threshold, only about one-sixth of them need cloning when
+             * a table doubles. The nodes they replace will be garbage
+             * collectable as soon as they are no longer referenced by any
+             * reader thread that may be in the midst of traversing table
+             * right now.
+             */
+
+            HashEntry<K,V>[] newTable = HashEntry.newArray(oldCapacity<<1);
+            threshold = (int)(newTable.length * loadFactor);
+            int sizeMask = newTable.length - 1;
+            int reduce = 0;
+            for (int i = 0; i < oldCapacity ; i++) {
+                // We need to guarantee that any existing reads of old Map can
+                //  proceed. So we cannot yet null out each bin.
+                HashEntry<K,V> e = oldTable[i];
+
+                if (e != null) {
+                    HashEntry<K,V> next = e.next;
+                    int idx = e.hash & sizeMask;
+
+                    //  Single node on list
+                    if (next == null)
+                        newTable[idx] = e;
+
+                    else {
+                        // Reuse trailing consecutive sequence at same slot
+                        HashEntry<K,V> lastRun = e;
+                        int lastIdx = idx;
+                        for (HashEntry<K,V> last = next;
+                             last != null;
+                             last = last.next) {
+                            int k = last.hash & sizeMask;
+                            if (k != lastIdx) {
+                                lastIdx = k;
+                                lastRun = last;
+                            }
+                        }
+                        newTable[lastIdx] = lastRun;
+                        // Clone all remaining nodes
+                        for (HashEntry<K,V> p = e; p != lastRun; p = p.next) {
+                            // Skip GC'd weak refs
+                            K key = p.key();
+                            if (key == null) {
+                                reduce++;
+                                continue;
+                            }
+                            int k = p.hash & sizeMask;
+                            HashEntry<K,V> n = newTable[k];
+                            newTable[k] = newHashEntry(key, p.hash, n, p.value());
+                        }
+                    }
+                }
+            }
+            table = newTable;
+            return reduce;
+        }
+
+        /**
+         * Remove; match on key only if value null, else match both.
+         */
+        V remove(Object key, int hash, Object value, boolean weakRemove) {
+            lock();
+            try {
+                if (!weakRemove)
+                    removeStale();
+                int c = count - 1;
+                HashEntry<K,V>[] tab = table;
+                int index = hash & (tab.length - 1);
+                HashEntry<K,V> first = tab[index];
+                HashEntry<K,V> e = first;
+                // a weak remove operation compares the WeakReference instance
+                while (e != null && (!weakRemove || key != e.keyRef) 
+                                 && (e.hash != hash || !keyEq(key, e.key())))
+                    e = e.next;
+
+                V oldValue = null;
+                if (e != null) {
+                    V v = e.value();
+                    if (value == null || value.equals(v)) {
+                        oldValue = v;
+                        // All entries following removed node can stay
+                        // in list, but all preceding ones need to be
+                        // cloned.
+                        ++modCount;
+                        HashEntry<K,V> newFirst = e.next;
+                        for (HashEntry<K,V> p = first; p != e; p = p.next) {
+                            K pKey = p.key();
+                            if (pKey == null) { // Skip GC'd keys
+                                c--;
+                                continue;
+                            }
+                                
+                            newFirst = newHashEntry(pKey, p.hash, newFirst, p.value());
+                        }
+                        tab[index] = newFirst;
+                        count = c; // write-volatile
+                    }
+                }
+                return oldValue;
+            } finally {
+                unlock();
+            }
+        }
+        
+        final void removeStale() {
+            if (keyType == ReferenceType.STRONG)
+                return;
+            
+            KeyReference ref;
+            while ((ref = (KeyReference) refQueue.poll()) != null) {
+                remove(ref, ref.keyHash(), null, true);
+            }
+        }
+
+        void clear() {
+            if (count != 0) {
+                lock();
+                try {
+                    HashEntry<K,V>[] tab = table;
+                    for (int i = 0; i < tab.length ; i++)
+                        tab[i] = null;
+                    ++modCount;
+                    // replace the reference queue to avoid unnecessary stale cleanups
+                    refQueue = new ReferenceQueue<K>();
+                    count = 0; // write-volatile
+                } finally {
+                    unlock();
+                }
+            }
+        }
+    }
+
+
+
+    /* ---------------- Public operations -------------- */
+
+    /**
+     * Creates a new, empty map with the specified initial
+     * capacity, load factor and concurrency level.
+     *
+     * @param initialCapacity the initial capacity. The implementation
+     * performs internal sizing to accommodate this many elements.
+     * @param loadFactor  the load factor threshold, used to control resizing.
+     * Resizing may be performed when the average number of elements per
+     * bin exceeds this threshold.
+     * @param concurrencyLevel the estimated number of concurrently
+     * updating threads. The implementation performs internal sizing
+     * to try to accommodate this many threads.
+     * @throws IllegalArgumentException if the initial capacity is
+     * negative or the load factor or concurrencyLevel are
+     * nonpositive.
+     */
+    public ConcurrentReferenceHashMap(int initialCapacity,
+                             float loadFactor, int concurrencyLevel, 
+                             ReferenceType keyType, ReferenceType valueType,
+                             EnumSet<Option> options) {
+        if (!(loadFactor > 0) || initialCapacity < 0 || concurrencyLevel <= 0)
+            throw new IllegalArgumentException();
+
+        if (concurrencyLevel > MAX_SEGMENTS)
+            concurrencyLevel = MAX_SEGMENTS;
+
+        // Find power-of-two sizes best matching arguments
+        int sshift = 0;
+        int ssize = 1;
+        while (ssize < concurrencyLevel) {
+            ++sshift;
+            ssize <<= 1;
+        }
+        segmentShift = 32 - sshift;
+        segmentMask = ssize - 1;
+        this.segments = Segment.newArray(ssize);
+
+        if (initialCapacity > MAXIMUM_CAPACITY)
+            initialCapacity = MAXIMUM_CAPACITY;
+        int c = initialCapacity / ssize;
+        if (c * ssize < initialCapacity)
+            ++c;
+        int cap = 1;
+        while (cap < c)
+            cap <<= 1;
+        
+        identityComparisons = options != null && options.contains(Option.IDENTITY_COMPARISONS);
+
+        for (int i = 0; i < this.segments.length; ++i)
+            this.segments[i] = new Segment<K,V>(cap, loadFactor, 
+                    keyType, valueType, identityComparisons);
+    }
+
+    /**
+     * Creates a new, empty map with the specified initial
+     * capacity, load factor and concurrency level.
+     *
+     * @param initialCapacity the initial capacity. The implementation
+     * performs internal sizing to accommodate this many elements.
+     * @param loadFactor  the load factor threshold, used to control resizing.
+     * Resizing may be performed when the average number of elements per
+     * bin exceeds this threshold.
+     * @param concurrencyLevel the estimated number of concurrently
+     * updating threads. The implementation performs internal sizing
+     * to try to accommodate this many threads.
+     * @throws IllegalArgumentException if the initial capacity is
+     * negative or the load factor or concurrencyLevel are
+     * nonpositive.
+     */
+    public ConcurrentReferenceHashMap(int initialCapacity,
+                             float loadFactor, int concurrencyLevel) {
+        this(initialCapacity, loadFactor, concurrencyLevel, 
+                DEFAULT_KEY_TYPE, DEFAULT_VALUE_TYPE, null);
+    }
+    
+    /**
+     * Creates a new, empty map with the specified initial capacity
+     * and load factor and with the default concurrencyLevel (16).
+     *
+     * @param initialCapacity The implementation performs internal
+     * sizing to accommodate this many elements.
+     * @param loadFactor  the load factor threshold, used to control resizing.
+     * Resizing may be performed when the average number of elements per
+     * bin exceeds this threshold.
+     * @throws IllegalArgumentException if the initial capacity of
+     * elements is negative or the load factor is nonpositive
+     *
+     * @since 1.6
+     */
+    public ConcurrentReferenceHashMap(int initialCapacity, float loadFactor) {
+        this(initialCapacity, loadFactor, DEFAULT_CONCURRENCY_LEVEL);
+    }
+
+    /**
+     * Creates a new, empty map with the specified initial capacity,
+     * and with default load factor (0.75) and concurrencyLevel (16).
+     *
+     * @param initialCapacity the initial capacity. The implementation
+     * performs internal sizing to accommodate this many elements.
+     * @throws IllegalArgumentException if the initial capacity of
+     * elements is negative.
+     */
+    public ConcurrentReferenceHashMap(int initialCapacity) {
+        this(initialCapacity, DEFAULT_LOAD_FACTOR, DEFAULT_CONCURRENCY_LEVEL);
+    }
+
+    /**
+     * Creates a new, empty map with a default initial capacity (16),
+     * load factor (0.75) and concurrencyLevel (16).
+     */
+    public ConcurrentReferenceHashMap() {
+        this(DEFAULT_INITIAL_CAPACITY, DEFAULT_LOAD_FACTOR, DEFAULT_CONCURRENCY_LEVEL);
+    }
+
+    /**
+     * Creates a new map with the same mappings as the given map.
+     * The map is created with a capacity of 1.5 times the number
+     * of mappings in the given map or 16 (whichever is greater),
+     * and a default load factor (0.75) and concurrencyLevel (16).
+     *
+     * @param m the map
+     */
+    public ConcurrentReferenceHashMap(Map<? extends K, ? extends V> m) {
+        this(Math.max((int) (m.size() / DEFAULT_LOAD_FACTOR) + 1,
+                      DEFAULT_INITIAL_CAPACITY),
+             DEFAULT_LOAD_FACTOR, DEFAULT_CONCURRENCY_LEVEL);
+        putAll(m);
+    }
+
+    /**
+     * Returns <tt>true</tt> if this map contains no key-value mappings.
+     *
+     * @return <tt>true</tt> if this map contains no key-value mappings
+     */
+    public boolean isEmpty() {
+        final Segment<K,V>[] segments = this.segments;
+        /*
+         * We keep track of per-segment modCounts to avoid ABA
+         * problems in which an element in one segment was added and
+         * in another removed during traversal, in which case the
+         * table was never actually empty at any point. Note the
+         * similar use of modCounts in the size() and containsValue()
+         * methods, which are the only other methods also susceptible
+         * to ABA problems.
+         */
+        int[] mc = new int[segments.length];
+        int mcsum = 0;
+        for (int i = 0; i < segments.length; ++i) {
+            if (segments[i].count != 0)
+                return false;
+            else
+                mcsum += mc[i] = segments[i].modCount;
+        }
+        // If mcsum happens to be zero, then we know we got a snapshot
+        // before any modifications at all were made.  This is
+        // probably common enough to bother tracking.
+        if (mcsum != 0) {
+            for (int i = 0; i < segments.length; ++i) {
+                if (segments[i].count != 0 ||
+                    mc[i] != segments[i].modCount)
+                    return false;
+            }
+        }
+        return true;
+    }
+
+    /**
+     * Returns the number of key-value mappings in this map.  If the
+     * map contains more than <tt>Integer.MAX_VALUE</tt> elements, returns
+     * <tt>Integer.MAX_VALUE</tt>.
+     *
+     * @return the number of key-value mappings in this map
+     */
+    public int size() {
+        final Segment<K,V>[] segments = this.segments;
+        long sum = 0;
+        long check = 0;
+        int[] mc = new int[segments.length];
+        // Try a few times to get accurate count. On failure due to
+        // continuous async changes in table, resort to locking.
+        for (int k = 0; k < RETRIES_BEFORE_LOCK; ++k) {
+            check = 0;
+            sum = 0;
+            int mcsum = 0;
+            for (int i = 0; i < segments.length; ++i) {
+                sum += segments[i].count;
+                mcsum += mc[i] = segments[i].modCount;
+            }
+            if (mcsum != 0) {
+                for (int i = 0; i < segments.length; ++i) {
+                    check += segments[i].count;
+                    if (mc[i] != segments[i].modCount) {
+                        check = -1; // force retry
+                        break;
+                    }
+                }
+            }
+            if (check == sum)
+                break;
+        }
+        if (check != sum) { // Resort to locking all segments
+            sum = 0;
+            for (int i = 0; i < segments.length; ++i)
+                segments[i].lock();
+            for (int i = 0; i < segments.length; ++i)
+                sum += segments[i].count;
+            for (int i = 0; i < segments.length; ++i)
+                segments[i].unlock();
+        }
+        if (sum > Integer.MAX_VALUE)
+            return Integer.MAX_VALUE;
+        else
+            return (int)sum;
+    }
+
+    /**
+     * Returns the value to which the specified key is mapped,
+     * or {@code null} if this map contains no mapping for the key.
+     *
+     * <p>More formally, if this map contains a mapping from a key
+     * {@code k} to a value {@code v} such that {@code key.equals(k)},
+     * then this method returns {@code v}; otherwise it returns
+     * {@code null}.  (There can be at most one such mapping.)
+     *
+     * @throws NullPointerException if the specified key is null
+     */
+    public V get(Object key) {
+        int hash = hashOf(key);
+        return segmentFor(hash).get(key, hash);
+    }
+
+    /**
+     * Tests if the specified object is a key in this table.
+     *
+     * @param  key   possible key
+     * @return <tt>true</tt> if and only if the specified object
+     *         is a key in this table, as determined by the
+     *         <tt>equals</tt> method; <tt>false</tt> otherwise.
+     * @throws NullPointerException if the specified key is null
+     */
+    public boolean containsKey(Object key) {
+        int hash = hashOf(key);
+        return segmentFor(hash).containsKey(key, hash);
+    }
+
+    /**
+     * Returns <tt>true</tt> if this map maps one or more keys to the
+     * specified value. Note: This method requires a full internal
+     * traversal of the hash table, and so is much slower than
+     * method <tt>containsKey</tt>.
+     *
+     * @param value value whose presence in this map is to be tested
+     * @return <tt>true</tt> if this map maps one or more keys to the
+     *         specified value
+     * @throws NullPointerException if the specified value is null
+     */
+    public boolean containsValue(Object value) {
+        if (value == null)
+            throw new NullPointerException();
+
+        // See explanation of modCount use above
+
+        final Segment<K,V>[] segments = this.segments;
+        int[] mc = new int[segments.length];
+
+        // Try a few times without locking
+        for (int k = 0; k < RETRIES_BEFORE_LOCK; ++k) {
+            int sum = 0;
+            int mcsum = 0;
+            for (int i = 0; i < segments.length; ++i) {
+                int c = segments[i].count;
+                mcsum += mc[i] = segments[i].modCount;
+                if (segments[i].containsValue(value))
+                    return true;
+            }
+            boolean cleanSweep = true;
+            if (mcsum != 0) {
+                for (int i = 0; i < segments.length; ++i) {
+                    int c = segments[i].count;
+                    if (mc[i] != segments[i].modCount) {
+                        cleanSweep = false;
+                        break;
+                    }
+                }
+            }
+            if (cleanSweep)
+                return false;
+        }
+        // Resort to locking all segments
+        for (int i = 0; i < segments.length; ++i)
+            segments[i].lock();
+        boolean found = false;
+        try {
+            for (int i = 0; i < segments.length; ++i) {
+                if (segments[i].containsValue(value)) {
+                    found = true;
+                    break;
+                }
+            }
+        } finally {
+            for (int i = 0; i < segments.length; ++i)
+                segments[i].unlock();
+        }
+        return found;
+    }
+
+    /**
+     * Legacy method testing if some key maps into the specified value
+     * in this table.  This method is identical in functionality to
+     * {@link #containsValue}, and exists solely to ensure
+     * full compatibility with class {@link java.util.Hashtable},
+     * which supported this method prior to introduction of the
+     * Java Collections framework.
+
+     * @param  value a value to search for
+     * @return <tt>true</tt> if and only if some key maps to the
+     *         <tt>value</tt> argument in this table as
+     *         determined by the <tt>equals</tt> method;
+     *         <tt>false</tt> otherwise
+     * @throws NullPointerException if the specified value is null
+     */
+    public boolean contains(Object value) {
+        return containsValue(value);
+    }
+
+    /**
+     * Maps the specified key to the specified value in this table.
+     * Neither the key nor the value can be null.
+     *
+     * <p> The value can be retrieved by calling the <tt>get</tt> method
+     * with a key that is equal to the original key.
+     *
+     * @param key key with which the specified value is to be associated
+     * @param value value to be associated with the specified key
+     * @return the previous value associated with <tt>key</tt>, or
+     *         <tt>null</tt> if there was no mapping for <tt>key</tt>
+     * @throws NullPointerException if the specified key or value is null
+     */
+    public V put(K key, V value) {
+        if (value == null)
+            throw new NullPointerException();
+        int hash = hashOf(key);
+        return segmentFor(hash).put(key, hash, value, false);
+    }
+
+    /**
+     * {@inheritDoc}
+     *
+     * @return the previous value associated with the specified key,
+     *         or <tt>null</tt> if there was no mapping for the key
+     * @throws NullPointerException if the specified key or value is null
+     */
+    public V putIfAbsent(K key, V value) {
+        if (value == null)
+            throw new NullPointerException();
+        int hash = hashOf(key);
+        return segmentFor(hash).put(key, hash, value, true);
+    }
+
+    /**
+     * Copies all of the mappings from the specified map to this one.
+     * These mappings replace any mappings that this map had for any of the
+     * keys currently in the specified map.
+     *
+     * @param m mappings to be stored in this map
+     */
+    public void putAll(Map<? extends K, ? extends V> m) {
+        for (Map.Entry<? extends K, ? extends V> e : m.entrySet())
+            put(e.getKey(), e.getValue());
+    }
+
+    /**
+     * Removes the key (and its corresponding value) from this map.
+     * This method does nothing if the key is not in the map.
+     *
+     * @param  key the key that needs to be removed
+     * @return the previous value associated with <tt>key</tt>, or
+     *         <tt>null</tt> if there was no mapping for <tt>key</tt>
+     * @throws NullPointerException if the specified key is null
+     */
+    public V remove(Object key) {
+        int hash = hashOf(key);
+        return segmentFor(hash).remove(key, hash, null, false);
+    }
+
+    /**
+     * {@inheritDoc}
+     *
+     * @throws NullPointerException if the specified key is null
+     */
+    public boolean remove(Object key, Object value) {
+        int hash = hashOf(key);
+        if (value == null)
+            return false;
+        return segmentFor(hash).remove(key, hash, value, false) != null;
+    }
+
+    /**
+     * {@inheritDoc}
+     *
+     * @throws NullPointerException if any of the arguments are null
+     */
+    public boolean replace(K key, V oldValue, V newValue) {
+        if (oldValue == null || newValue == null)
+            throw new NullPointerException();
+        int hash = hashOf(key);
+        return segmentFor(hash).replace(key, hash, oldValue, newValue);
+    }
+
+    /**
+     * {@inheritDoc}
+     *
+     * @return the previous value associated with the specified key,
+     *         or <tt>null</tt> if there was no mapping for the key
+     * @throws NullPointerException if the specified key or value is null
+     */
+    public V replace(K key, V value) {
+        if (value == null)
+            throw new NullPointerException();
+        int hash = hashOf(key);
+        return segmentFor(hash).replace(key, hash, value);
+    }
+
+    /**
+     * Removes all of the mappings from this map.
+     */
+    public void clear() {
+        for (int i = 0; i < segments.length; ++i)
+            segments[i].clear();
+    }
+
+    /**
+     * Returns a {@link Set} view of the keys contained in this map.
+     * The set is backed by the map, so changes to the map are
+     * reflected in the set, and vice-versa.  The set supports element
+     * removal, which removes the corresponding mapping from this map,
+     * via the <tt>Iterator.remove</tt>, <tt>Set.remove</tt>,
+     * <tt>removeAll</tt>, <tt>retainAll</tt>, and <tt>clear</tt>
+     * operations.  It does not support the <tt>add</tt> or
+     * <tt>addAll</tt> operations.
+     *
+     * <p>The view's <tt>iterator</tt> is a "weakly consistent" iterator
+     * that will never throw {@link ConcurrentModificationException},
+     * and guarantees to traverse elements as they existed upon
+     * construction of the iterator, and may (but is not guaranteed to)
+     * reflect any modifications subsequent to construction.
+     */
+    public Set<K> keySet() {
+        Set<K> ks = keySet;
+        return (ks != null) ? ks : (keySet = new KeySet());
+    }
+
+    /**
+     * Returns a {@link Collection} view of the values contained in this map.
+     * The collection is backed by the map, so changes to the map are
+     * reflected in the collection, and vice-versa.  The collection
+     * supports element removal, which removes the corresponding
+     * mapping from this map, via the <tt>Iterator.remove</tt>,
+     * <tt>Collection.remove</tt>, <tt>removeAll</tt>,
+     * <tt>retainAll</tt>, and <tt>clear</tt> operations.  It does not
+     * support the <tt>add</tt> or <tt>addAll</tt> operations.
+     *
+     * <p>The view's <tt>iterator</tt> is a "weakly consistent" iterator
+     * that will never throw {@link ConcurrentModificationException},
+     * and guarantees to traverse elements as they existed upon
+     * construction of the iterator, and may (but is not guaranteed to)
+     * reflect any modifications subsequent to construction.
+     */
+    public Collection<V> values() {
+        Collection<V> vs = values;
+        return (vs != null) ? vs : (values = new Values());
+    }
+
+    /**
+     * Returns a {@link Set} view of the mappings contained in this map.
+     * The set is backed by the map, so changes to the map are
+     * reflected in the set, and vice-versa.  The set supports element
+     * removal, which removes the corresponding mapping from the map,
+     * via the <tt>Iterator.remove</tt>, <tt>Set.remove</tt>,
+     * <tt>removeAll</tt>, <tt>retainAll</tt>, and <tt>clear</tt>
+     * operations.  It does not support the <tt>add</tt> or
+     * <tt>addAll</tt> operations.
+     *
+     * <p>The view's <tt>iterator</tt> is a "weakly consistent" iterator
+     * that will never throw {@link ConcurrentModificationException},
+     * and guarantees to traverse elements as they existed upon
+     * construction of the iterator, and may (but is not guaranteed to)
+     * reflect any modifications subsequent to construction.
+     */
+    public Set<Map.Entry<K,V>> entrySet() {
+        Set<Map.Entry<K,V>> es = entrySet;
+        return (es != null) ? es : (entrySet = new EntrySet());
+    }
+
+    /**
+     * Returns an enumeration of the keys in this table.
+     *
+     * @return an enumeration of the keys in this table
+     * @see #keySet()
+     */
+    public Enumeration<K> keys() {
+        return new KeyIterator();
+    }
+
+    /**
+     * Returns an enumeration of the values in this table.
+     *
+     * @return an enumeration of the values in this table
+     * @see #values()
+     */
+    public Enumeration<V> elements() {
+        return new ValueIterator();
+    }
+
+    /* ---------------- Iterator Support -------------- */
+
+    abstract class HashIterator {
+        int nextSegmentIndex;
+        int nextTableIndex;
+        HashEntry<K,V>[] currentTable;
+        HashEntry<K, V> nextEntry;
+        HashEntry<K, V> lastReturned;
+        K currentKey; // Strong reference to weak key (prevents gc)
+
+        HashIterator() {
+            nextSegmentIndex = segments.length - 1;
+            nextTableIndex = -1;
+            advance();
+        }
+
+        public boolean hasMoreElements() { return hasNext(); }
+
+        final void advance() {
+            if (nextEntry != null && (nextEntry = nextEntry.next) != null)
+                return;
+
+            while (nextTableIndex >= 0) {
+                if ( (nextEntry = currentTable[nextTableIndex--]) != null)
+                    return;
+            }
+
+            while (nextSegmentIndex >= 0) {
+                Segment<K,V> seg = segments[nextSegmentIndex--];
+                if (seg.count != 0) {
+                    currentTable = seg.table;
+                    for (int j = currentTable.length - 1; j >= 0; --j) {
+                        if ( (nextEntry = currentTable[j]) != null) {
+                            nextTableIndex = j - 1;
+                            return;
+                        }
+                    }
+                }
+            }
+        }
+
+        public boolean hasNext() { 
+            while (nextEntry != null) {
+                if (nextEntry.key() != null) 
+                    return true;
+                advance();
+            }
+            
+            return false;
+        }
+
+        HashEntry<K,V> nextEntry() {
+            do {
+                if (nextEntry == null)
+                    throw new NoSuchElementException();
+                
+                lastReturned = nextEntry;
+                currentKey = lastReturned.key();
+                advance();
+            } while (currentKey == null); // Skip GC'd keys
+            
+            return lastReturned;
+        }
+
+        public void remove() {
+            if (lastReturned == null)
+                throw new IllegalStateException();
+            ConcurrentReferenceHashMap.this.remove(currentKey);
+            lastReturned = null;
+        }
+    }
+
+    final class KeyIterator
+        extends HashIterator
+        implements Iterator<K>, Enumeration<K>
+    {
+        public K next()        { return super.nextEntry().key(); }
+        public K nextElement() { return super.nextEntry().key(); }
+    }
+
+    final class ValueIterator
+        extends HashIterator
+        implements Iterator<V>, Enumeration<V>
+    {
+        public V next()        { return super.nextEntry().value(); }
+        public V nextElement() { return super.nextEntry().value(); }
+    }
+
+     /*
+      * This class is needed for JDK5 compatibility.
+      */
+     static class SimpleEntry<K, V> implements Entry<K, V>,
+            java.io.Serializable {
+        private static final long serialVersionUID = -8499721149061103585L;
+
+        private final K key;
+        private V value;
+
+        public SimpleEntry(K key, V value) {
+            this.key = key;
+            this.value = value;
+        }
+
+        public SimpleEntry(Entry<? extends K, ? extends V> entry) {
+            this.key = entry.getKey();
+            this.value = entry.getValue();
+        }
+
+        public K getKey() {
+            return key;
+        }
+
+        public V getValue() {
+            return value;
+        }
+
+        public V setValue(V value) {
+            V oldValue = this.value;
+            this.value = value;
+            return oldValue;
+        }
+
+        public boolean equals(Object o) {
+            if (!(o instanceof Map.Entry))
+                return false;
+            @SuppressWarnings("unchecked")
+            Map.Entry e = (Map.Entry) o;
+            return eq(key, e.getKey()) && eq(value, e.getValue());
+        }
+
+        public int hashCode() {
+            return (key == null ? 0 : key.hashCode())
+                    ^ (value == null ? 0 : value.hashCode());
+        }
+
+        public String toString() {
+            return key + "=" + value;
+        }
+
+        private static boolean eq(Object o1, Object o2) {
+            return o1 == null ? o2 == null : o1.equals(o2);
+        }
+    }
+
+
+    /**
+     * Custom Entry class used by EntryIterator.next(), that relays setValue
+     * changes to the underlying map.
+     */
+    final class WriteThroughEntry extends SimpleEntry<K,V>
+    {
+        private static final long serialVersionUID = -7900634345345313646L;
+
+        WriteThroughEntry(K k, V v) {
+            super(k,v);
+        }
+
+        /**
+         * Set our entry's value and write through to the map. The
+         * value to return is somewhat arbitrary here. Since a
+         * WriteThroughEntry does not necessarily track asynchronous
+         * changes, the most recent "previous" value could be
+         * different from what we return (or could even have been
+         * removed in which case the put will re-establish). We do not
+         * and cannot guarantee more.
+         */
+        public V setValue(V value) {
+            if (value == null) throw new NullPointerException();
+            V v = super.setValue(value);
+            ConcurrentReferenceHashMap.this.put(getKey(), value);
+            return v;
+        }
+    }
+
+    final class EntryIterator
+        extends HashIterator
+        implements Iterator<Entry<K,V>>
+    {
+        public Map.Entry<K,V> next() {
+            HashEntry<K,V> e = super.nextEntry();
+            return new WriteThroughEntry(e.key(), e.value());
+        }
+    }
+
+    final class KeySet extends AbstractSet<K> {
+        public Iterator<K> iterator() {
+            return new KeyIterator();
+        }
+        public int size() {
+            return ConcurrentReferenceHashMap.this.size();
+        }
+        public boolean isEmpty() {
+            return ConcurrentReferenceHashMap.this.isEmpty();
+        }
+        public boolean contains(Object o) {
+            return ConcurrentReferenceHashMap.this.containsKey(o);
+        }
+        public boolean remove(Object o) {
+            return ConcurrentReferenceHashMap.this.remove(o) != null;
+        }
+        public void clear() {
+            ConcurrentReferenceHashMap.this.clear();
+        }
+    }
+
+    final class Values extends AbstractCollection<V> {
+        public Iterator<V> iterator() {
+            return new ValueIterator();
+        }
+        public int size() {
+            return ConcurrentReferenceHashMap.this.size();
+        }
+        public boolean isEmpty() {
+            return ConcurrentReferenceHashMap.this.isEmpty();
+        }
+        public boolean contains(Object o) {
+            return ConcurrentReferenceHashMap.this.containsValue(o);
+        }
+        public void clear() {
+            ConcurrentReferenceHashMap.this.clear();
+        }
+    }
+
+    final class EntrySet extends AbstractSet<Map.Entry<K,V>> {
+        public Iterator<Map.Entry<K,V>> iterator() {
+            return new EntryIterator();
+        }
+        public boolean contains(Object o) {
+            if (!(o instanceof Map.Entry))
+                return false;
+            Map.Entry<?,?> e = (Map.Entry<?,?>)o;
+            V v = ConcurrentReferenceHashMap.this.get(e.getKey());
+            return v != null && v.equals(e.getValue());
+        }
+        public boolean remove(Object o) {
+            if (!(o instanceof Map.Entry))
+                return false;
+            Map.Entry<?,?> e = (Map.Entry<?,?>)o;
+            return ConcurrentReferenceHashMap.this.remove(e.getKey(), e.getValue());
+        }
+        public int size() {
+            return ConcurrentReferenceHashMap.this.size();
+        }
+        public boolean isEmpty() {
+            return ConcurrentReferenceHashMap.this.isEmpty();
+        }
+        public void clear() {
+            ConcurrentReferenceHashMap.this.clear();
+        }
+    }
+
+    /* ---------------- Serialization Support -------------- */
+
+    /**
+     * Save the state of the <tt>ConcurrentWeakHashMap</tt> instance to a
+     * stream (i.e., serialize it).
+     * @param s the stream
+     * @serialData
+     * the key (Object) and value (Object)
+     * for each key-value mapping, followed by a null pair.
+     * The key-value mappings are emitted in no particular order.
+     */
+    private void writeObject(java.io.ObjectOutputStream s) throws IOException  {
+        s.defaultWriteObject();
+
+        for (int k = 0; k < segments.length; ++k) {
+            Segment<K,V> seg = segments[k];
+            seg.lock();
+            try {
+                HashEntry<K,V>[] tab = seg.table;
+                for (int i = 0; i < tab.length; ++i) {
+                    for (HashEntry<K,V> e = tab[i]; e != null; e = e.next) {
+                        K key = e.key();
+                        if (key == null) // Skip GC'd keys
+                            continue;
+                        
+                        s.writeObject(key);
+                        s.writeObject(e.value());
+                    }
+                }
+            } finally {
+                seg.unlock();
+            }
+        }
+        s.writeObject(null);
+        s.writeObject(null);
+    }
+
+    /**
+     * Reconstitute the <tt>ConcurrentWeakHashMap</tt> instance from a
+     * stream (i.e., deserialize it).
+     * @param s the stream
+     */
+    @SuppressWarnings("unchecked")
+    private void readObject(java.io.ObjectInputStream s)
+        throws IOException, ClassNotFoundException  {
+        s.defaultReadObject();
+
+        // Initialize each segment to be minimally sized, and let grow.
+        for (int i = 0; i < segments.length; ++i) {
+            segments[i].setTable(new HashEntry[1]);
+        }
+
+        // Read the keys and values, and put the mappings in the table
+        for (;;) {
+            K key = (K) s.readObject();
+            V value = (V) s.readObject();
+            if (key == null)
+                break;
+            put(key, value);
+        }
+    }
+}

Added: experimental/jsr166/src/jsr166y/ConcurrentReferenceHashMapGCTestCase.java
===================================================================
--- experimental/jsr166/src/jsr166y/ConcurrentReferenceHashMapGCTestCase.java	                        (rev 0)
+++ experimental/jsr166/src/jsr166y/ConcurrentReferenceHashMapGCTestCase.java	2008-03-27 17:09:21 UTC (rev 5470)
@@ -0,0 +1,140 @@
+package jsr166y;
+
+import java.util.EnumSet;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.Map;
+
+import jsr166y.ConcurrentReferenceHashMap.Option;
+import jsr166y.ConcurrentReferenceHashMap.ReferenceType;
+import junit.framework.TestCase;
+
+public class ConcurrentReferenceHashMapGCTestCase extends TestCase {
+
+    public void testBasicCleanup() throws Exception {
+        ConcurrentReferenceHashMap<BinClump, Integer> map = 
+            new ConcurrentReferenceHashMap<BinClump, Integer>(0, .75f, 16, ReferenceType.SOFT, ReferenceType.STRONG, null);
+        BinClump[] hold = new BinClump[100];
+        generateClumps(map, hold, 10000);
+        gc();
+        Thread.sleep(1000);
+
+        // trigger a cleanup without matching any key
+        for (int i = 0; i < 100; i++)
+            map.remove(new BinClump(i));
+
+        assertEquals(100, map.size());
+    }
+
+    public void testIterators() throws Exception {
+        ConcurrentReferenceHashMap<BinClump, Integer> map = 
+            new ConcurrentReferenceHashMap<BinClump, Integer>(0, .75f, 16, ReferenceType.SOFT, ReferenceType.STRONG, null);
+        BinClump[] hold = new BinClump[100];
+        generateClumps(map, hold, 10000);
+        gc();
+        Thread.sleep(500);
+
+        // Stale entries are not yet cleared
+        assertEquals(map.size(), 10000);
+
+        HashSet<Integer> keys = new HashSet<Integer>();
+        for (BinClump clump : map.keySet()) {
+            assertTrue(clump.hashCode() < 100);
+            keys.add(clump.hashCode());
+        }
+        assertEquals(100, keys.size());
+
+        HashSet<Integer> values = new HashSet<Integer>(map.values());
+        assertEquals(100, values.size());
+
+        // Still not clear...
+        assertEquals(map.size(), 10000);
+
+        int count = 0;
+        for (Iterator<Map.Entry<BinClump, Integer>> iter = map.entrySet()
+                .iterator(); iter.hasNext();) {
+            Map.Entry<BinClump, Integer> entry = iter.next();
+            assertTrue(keys.contains(entry.getKey().hashCode()));
+            assertTrue(values.contains(entry.getValue()));
+            // Trigger cleanup
+            entry.setValue(entry.getValue());
+            count++;
+        }
+
+        assertEquals(100, count);
+
+        // Should be stale free now
+        assertEquals(100, map.size());
+        Iterator<BinClump> i = map.keySet().iterator();
+        while (i.hasNext() && i.next() != hold[0])
+            ;
+
+        hold = null;
+        gc();
+        Thread.sleep(500);
+
+        // trigger a cleanup without matching any key
+        for (int c = 0; c < 100; c++)
+            map.remove(new BinClump(c));
+
+        // iterator should hold a strong ref
+        assertEquals(1, map.size());
+
+        // Free iterator
+        i = null;
+        gc();
+        Thread.sleep(500);
+
+        // trigger a cleanup without matching any key
+        for (int c = 0; c < 100; c++)
+            map.remove(new BinClump(c));
+
+        assertTrue(map.isEmpty());
+
+    }
+
+    private void gc() {
+        System.gc();
+        int chunkSize = (int) Math.min(Runtime.getRuntime().maxMemory() / 16, Integer.MAX_VALUE);
+        try {
+            LinkedList<long[]> list = new LinkedList<long[]>();
+            for (;;)
+                list.add(new long[chunkSize]);
+        } catch (OutOfMemoryError e)
+        {}
+        System.gc();
+    }
+
+    private void generateClumps(ConcurrentReferenceHashMap<BinClump, Integer> map,
+            BinClump[] hold, int size) {
+        BinClump[] tmp = new BinClump[10000];
+
+        int holdSize = hold.length;
+        for (int c = 0, hc = 0; c < size; c++) {
+
+            BinClump clump = new BinClump(c / holdSize);
+            tmp[c] = clump;
+            if (c % holdSize == 0)
+                hold[hc++] = clump;
+            map.put(clump, c);
+        }
+        assertEquals(size, map.size());
+    }
+
+    public static class BinClump {
+        private int code;
+
+        public BinClump(int code) {
+            this.code = code;
+        }
+
+        public int hashCode() {
+            return code;
+        };
+
+        public String toString() {
+            return "BC(" + code + ")";
+        }
+    }
+}

Added: experimental/jsr166/src/jsr166y/ConcurrentReferenceHashMapTest.java
===================================================================
--- experimental/jsr166/src/jsr166y/ConcurrentReferenceHashMapTest.java	                        (rev 0)
+++ experimental/jsr166/src/jsr166y/ConcurrentReferenceHashMapTest.java	2008-03-27 17:09:21 UTC (rev 5470)
@@ -0,0 +1,620 @@
+package jsr166y;
+
+/*
+ * Written by Doug Lea with assistance from members of JCP JSR-166
+ * Expert Group and released to the public domain, as explained at
+ * http://creativecommons.org/licenses/publicdomain
+ * Other contributors include Andrew Wright, Jeffrey Hayes, 
+ * Pat Fisher, Mike Judd. 
+ */
+
+import java.io.BufferedInputStream;
+import java.io.BufferedOutputStream;
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.ObjectInputStream;
+import java.io.ObjectOutputStream;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Enumeration;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Set;
+
+import junit.framework.Test;
+import junit.framework.TestSuite;
+
+public class ConcurrentReferenceHashMapTest extends JSR166TestCase{
+    public static void main(String[] args) {
+	junit.textui.TestRunner.run (suite());	
+    }
+    public static Test suite() {
+	return new TestSuite(ConcurrentReferenceHashMapTest.class);
+    }
+
+    /**
+     * Create a map from Integers 1-5 to Strings "A"-"E".
+     */
+    private static ConcurrentReferenceHashMap map5() {   
+	ConcurrentReferenceHashMap map = new ConcurrentReferenceHashMap(5);
+        assertTrue(map.isEmpty());
+	map.put(one, "A");
+	map.put(two, "B");
+	map.put(three, "C");
+	map.put(four, "D");
+	map.put(five, "E");
+        assertFalse(map.isEmpty());
+        assertEquals(5, map.size());
+	return map;
+    }
+
+    /**
+     *  clear removes all pairs
+     */
+    public void testClear() {
+        ConcurrentReferenceHashMap map = map5();
+	map.clear();
+	assertEquals(map.size(), 0);
+    }
+
+    /**
+     *  Maps with same contents are equal
+     */
+    public void testEquals() {
+        ConcurrentReferenceHashMap map1 = map5();
+        ConcurrentReferenceHashMap map2 = map5();
+        assertEquals(map1, map2);
+        assertEquals(map2, map1);
+	map1.clear();
+        assertFalse(map1.equals(map2));
+        assertFalse(map2.equals(map1));
+    }
+
+    /**
+     *  contains returns true for contained value
+     */
+    public void testContains() {
+        ConcurrentReferenceHashMap map = map5();
+	assertTrue(map.contains("A"));
+        assertFalse(map.contains("Z"));
+    }
+    
+    /**
+     *  containsKey returns true for contained key
+     */
+    public void testContainsKey() {
+        ConcurrentReferenceHashMap map = map5();
+	assertTrue(map.containsKey(one));
+        assertFalse(map.containsKey(zero));
+    }
+
+    /**
+     *  containsValue returns true for held values
+     */
+    public void testContainsValue() {
+        ConcurrentReferenceHashMap map = map5();
+	assertTrue(map.containsValue("A"));
+        assertFalse(map.containsValue("Z"));
+    }
+
+    /**
+     *   enumeration returns an enumeration containing the correct
+     *   elements
+     */
+    public void testEnumeration() {
+        ConcurrentReferenceHashMap map = map5();
+	Enumeration e = map.elements();
+	int count = 0;
+	while(e.hasMoreElements()){
+	    count++;
+	    e.nextElement();
+	}
+	assertEquals(5, count);
+    }
+
+    /**
+     *  get returns the correct element at the given key,
+     *  or null if not present
+     */
+    public void testGet() {
+        ConcurrentReferenceHashMap map = map5();
+	assertEquals("A", (String)map.get(one));
+        ConcurrentReferenceHashMap empty = new ConcurrentReferenceHashMap();
+        assertNull(map.get("anything"));
+    }
+
+    /**
+     *  isEmpty is true of empty map and false for non-empty
+     */
+    public void testIsEmpty() {
+        ConcurrentReferenceHashMap empty = new ConcurrentReferenceHashMap();
+        ConcurrentReferenceHashMap map = map5();
+	assertTrue(empty.isEmpty());
+        assertFalse(map.isEmpty());
+    }
+
+    /**
+     *   keys returns an enumeration containing all the keys from the map
+     */
+    public void testKeys() {
+        ConcurrentReferenceHashMap map = map5();
+	Enumeration e = map.keys();
+	int count = 0;
+	while(e.hasMoreElements()){
+	    count++;
+	    e.nextElement();
+	}
+	assertEquals(5, count);
+    }
+
+    /**
+     *   keySet returns a Set containing all the keys
+     */
+    public void testKeySet() {
+        ConcurrentReferenceHashMap map = map5();
+	Set s = map.keySet();
+	assertEquals(5, s.size());
+	assertTrue(s.contains(one));
+	assertTrue(s.contains(two));
+	assertTrue(s.contains(three));
+	assertTrue(s.contains(four));
+	assertTrue(s.contains(five));
+    }
+
+    /**
+     *  keySet.toArray returns contains all keys
+     */
+    public void testKeySetToArray() {
+        ConcurrentReferenceHashMap map = map5();
+	Set s = map.keySet();
+        Object[] ar = s.toArray();
+        assertTrue(s.containsAll(Arrays.asList(ar)));
+	assertEquals(5, ar.length);
+        ar[0] = m10;
+        assertFalse(s.containsAll(Arrays.asList(ar)));
+    }
+
+    /**
+     *  Values.toArray contains all values
+     */
+    public void testValuesToArray() {
+        ConcurrentReferenceHashMap map = map5();
+	Collection v = map.values();
+        Object[] ar = v.toArray();
+        ArrayList s = new ArrayList(Arrays.asList(ar));
+	assertEquals(5, ar.length);
+	assertTrue(s.contains("A"));
+	assertTrue(s.contains("B"));
+	assertTrue(s.contains("C"));
+	assertTrue(s.contains("D"));
+	assertTrue(s.contains("E"));
+    }
+
+    /**
+     *  entrySet.toArray contains all entries
+     */
+    public void testEntrySetToArray() {
+        ConcurrentReferenceHashMap map = map5();
+	Set s = map.entrySet();
+        Object[] ar = s.toArray();
+        assertEquals(5, ar.length);
+        for (int i = 0; i < 5; ++i) {
+            assertTrue(map.containsKey(((Map.Entry)(ar[i])).getKey()));
+            assertTrue(map.containsValue(((Map.Entry)(ar[i])).getValue()));
+        }
+    }
+
+    /**
+     * values collection contains all values
+     */
+    public void testValues() {
+        ConcurrentReferenceHashMap map = map5();
+	Collection s = map.values();
+	assertEquals(5, s.size());
+	assertTrue(s.contains("A"));
+	assertTrue(s.contains("B"));
+	assertTrue(s.contains("C"));
+	assertTrue(s.contains("D"));
+	assertTrue(s.contains("E"));
+    }
+
+    /**
+     * entrySet contains all pairs
+     */
+    public void testEntrySet() {
+        ConcurrentReferenceHashMap map = map5();
+	Set s = map.entrySet();
+	assertEquals(5, s.size());
+        Iterator it = s.iterator();
+        while (it.hasNext()) {
+            Map.Entry e = (Map.Entry) it.next();
+            assertTrue( 
+                       (e.getKey().equals(one) && e.getValue().equals("A")) ||
+                       (e.getKey().equals(two) && e.getValue().equals("B")) ||
+                       (e.getKey().equals(three) && e.getValue().equals("C")) ||
+                       (e.getKey().equals(four) && e.getValue().equals("D")) ||
+                       (e.getKey().equals(five) && e.getValue().equals("E")));
+        }
+    }
+
+    /**
+     *   putAll  adds all key-value pairs from the given map
+     */
+    public void testPutAll() {
+        ConcurrentReferenceHashMap empty = new ConcurrentReferenceHashMap();
+        ConcurrentReferenceHashMap map = map5();
+	empty.putAll(map);
+	assertEquals(5, empty.size());
+	assertTrue(empty.containsKey(one));
+	assertTrue(empty.containsKey(two));
+	assertTrue(empty.containsKey(three));
+	assertTrue(empty.containsKey(four));
+	assertTrue(empty.containsKey(five));
+    }
+
+    /**
+     *   putIfAbsent works when the given key is not present
+     */
+    public void testPutIfAbsent() {
+        ConcurrentReferenceHashMap map = map5();
+	map.putIfAbsent(six, "Z");
+        assertTrue(map.containsKey(six));
+    }
+
+    /**
+     *   putIfAbsent does not add the pair if the key is already present
+     */
+    public void testPutIfAbsent2() {
+        ConcurrentReferenceHashMap map = map5();
+        assertEquals("A", map.putIfAbsent(one, "Z"));
+    }
+
+    /**
+     *   replace fails when the given key is not present
+     */
+    public void testReplace() {
+        ConcurrentReferenceHashMap map = map5();
+	assertNull(map.replace(six, "Z"));
+        assertFalse(map.containsKey(six));
+    }
+
+    /**
+     *   replace succeeds if the key is already present
+     */
+    public void testReplace2() {
+        ConcurrentReferenceHashMap map = map5();
+        assertNotNull(map.replace(one, "Z"));
+        assertEquals("Z", map.get(one));
+    }
+
+
+    /**
+     * replace value fails when the given key not mapped to expected value
+     */
+    public void testReplaceValue() {
+        ConcurrentReferenceHashMap map = map5();
+        assertEquals("A", map.get(one));
+	assertFalse(map.replace(one, "Z", "Z"));
+        assertEquals("A", map.get(one));
+    }
+
+    /**
+     * replace value succeeds when the given key mapped to expected value
+     */
+    public void testReplaceValue2() {
+        ConcurrentReferenceHashMap map = map5();
+        assertEquals("A", map.get(one));
+	assertTrue(map.replace(one, "A", "Z"));
+        assertEquals("Z", map.get(one));
+    }
+
+
+    /**
+     *   remove removes the correct key-value pair from the map
+     */
+    public void testRemove() {
+        ConcurrentReferenceHashMap map = map5();
+	map.remove(five);
+	assertEquals(4, map.size());
+	assertFalse(map.containsKey(five));
+    }
+
+    /**
+     * remove(key,value) removes only if pair present
+     */
+    public void testRemove2() {
+        ConcurrentReferenceHashMap map = map5();
+	map.remove(five, "E");
+	assertEquals(4, map.size());
+	assertFalse(map.containsKey(five));
+	map.remove(four, "A");
+	assertEquals(4, map.size());
+	assertTrue(map.containsKey(four));
+
+    }
+
+    /**
+     *   size returns the correct values
+     */
+    public void testSize() {
+        ConcurrentReferenceHashMap map = map5();
+        ConcurrentReferenceHashMap empty = new ConcurrentReferenceHashMap();
+	assertEquals(0, empty.size());
+	assertEquals(5, map.size());
+    }
+
+    /**
+     * toString contains toString of elements
+     */
+    public void testToString() {
+        ConcurrentReferenceHashMap map = map5();
+        String s = map.toString();
+        for (int i = 1; i <= 5; ++i) {
+            assertTrue(s.indexOf(String.valueOf(i)) >= 0);
+        }
+    }        
+
+    // Exception tests
+    
+    /**
+     * Cannot create with negative capacity 
+     */
+    public void testConstructor1() {
+        try {
+            new ConcurrentReferenceHashMap(-1,0,1);
+            shouldThrow();
+        } catch(IllegalArgumentException e){}
+    }
+
+    /**
+     * Cannot create with negative concurrency level
+     */
+    public void testConstructor2() {
+        try {
+            new ConcurrentReferenceHashMap(1,0,-1);
+            shouldThrow();
+        } catch(IllegalArgumentException e){}
+    }
+
+    /**
+     * Cannot create with only negative capacity
+     */
+    public void testConstructor3() {
+        try {
+            new ConcurrentReferenceHashMap(-1);
+            shouldThrow();
+        } catch(IllegalArgumentException e){}
+    }
+
+    /**
+     * get(null) throws NPE
+     */
+    public void testGet_NullPointerException() {
+        try {
+            ConcurrentReferenceHashMap c = new ConcurrentReferenceHashMap(5);
+            c.get(null);
+            shouldThrow();
+        } catch(NullPointerException e){}
+    }
+
+    /**
+     * containsKey(null) throws NPE
+     */
+    public void testContainsKey_NullPointerException() {
+        try {
+            ConcurrentReferenceHashMap c = new ConcurrentReferenceHashMap(5);
+            c.containsKey(null);
+            shouldThrow();
+        } catch(NullPointerException e){}
+    }
+
+    /**
+     * containsValue(null) throws NPE
+     */
+    public void testContainsValue_NullPointerException() {
+        try {
+            ConcurrentReferenceHashMap c = new ConcurrentReferenceHashMap(5);
+            c.containsValue(null);
+            shouldThrow();
+        } catch(NullPointerException e){}
+    }
+
+    /**
+     * contains(null) throws NPE
+     */
+    public void testContains_NullPointerException() {
+        try {
+            ConcurrentReferenceHashMap c = new ConcurrentReferenceHashMap(5);
+            c.contains(null);
+            shouldThrow();
+        } catch(NullPointerException e){}
+    }
+
+    /**
+     * put(null,x) throws NPE
+     */
+    public void testPut1_NullPointerException() {
+        try {
+            ConcurrentReferenceHashMap c = new ConcurrentReferenceHashMap(5);
+            c.put(null, "whatever");
+            shouldThrow();
+        } catch(NullPointerException e){}
+    }
+
+    /**
+     * put(x, null) throws NPE
+     */
+    public void testPut2_NullPointerException() {
+        try {
+            ConcurrentReferenceHashMap c = new ConcurrentReferenceHashMap(5);
+            c.put("whatever", null);
+            shouldThrow();
+        } catch(NullPointerException e){}
+    }
+
+    /**
+     * putIfAbsent(null, x) throws NPE
+     */
+    public void testPutIfAbsent1_NullPointerException() {
+        try {
+            ConcurrentReferenceHashMap c = new ConcurrentReferenceHashMap(5);
+            c.putIfAbsent(null, "whatever");
+            shouldThrow();
+        } catch(NullPointerException e){}
+    }
+
+    /**
+     * replace(null, x) throws NPE
+     */
+    public void testReplace_NullPointerException() {
+        try {
+            ConcurrentReferenceHashMap c = new ConcurrentReferenceHashMap(5);
+            c.replace(null, "whatever");
+            shouldThrow();
+        } catch(NullPointerException e){}
+    }
+
+    /**
+     * replace(null, x, y) throws NPE
+     */
+    public void testReplaceValue_NullPointerException() {
+        try {
+            ConcurrentReferenceHashMap c = new ConcurrentReferenceHashMap(5);
+            c.replace(null, one, "whatever");
+            shouldThrow();
+        } catch(NullPointerException e){}
+    }
+
+    /**
+     * putIfAbsent(x, null) throws NPE
+     */
+    public void testPutIfAbsent2_NullPointerException() {
+        try {
+            ConcurrentReferenceHashMap c = new ConcurrentReferenceHashMap(5);
+            c.putIfAbsent("whatever", null);
+            shouldThrow();
+        } catch(NullPointerException e){}
+    }
+
+
+    /**
+     * replace(x, null) throws NPE
+     */
+    public void testReplace2_NullPointerException() {
+        try {
+            ConcurrentReferenceHashMap c = new ConcurrentReferenceHashMap(5);
+            c.replace("whatever", null);
+            shouldThrow();
+        } catch(NullPointerException e){}
+    }
+
+    /**
+     * replace(x, null, y) throws NPE
+     */
+    public void testReplaceValue2_NullPointerException() {
+        try {
+            ConcurrentReferenceHashMap c = new ConcurrentReferenceHashMap(5);
+            c.replace("whatever", null, "A");
+            shouldThrow();
+        } catch(NullPointerException e){}
+    }
+
+    /**
+     * replace(x, y, null) throws NPE
+     */
+    public void testReplaceValue3_NullPointerException() {
+        try {
+            ConcurrentReferenceHashMap c = new ConcurrentReferenceHashMap(5);
+            c.replace("whatever", one, null);
+            shouldThrow();
+        } catch(NullPointerException e){}
+    }
+
+
+    /**
+     * remove(null) throws NPE
+     */
+    public void testRemove1_NullPointerException() {
+        try {
+            ConcurrentReferenceHashMap c = new ConcurrentReferenceHashMap(5);
+            c.put("sadsdf", "asdads");
+            c.remove(null);
+            shouldThrow();
+        } catch(NullPointerException e){}
+    }
+
+    /**
+     * remove(null, x) throws NPE
+     */
+    public void testRemove2_NullPointerException() {
+        try {
+            ConcurrentReferenceHashMap c = new ConcurrentReferenceHashMap(5);
+            c.put("sadsdf", "asdads");
+            c.remove(null, "whatever");
+            shouldThrow();
+        } catch(NullPointerException e){}
+    }
+
+    /**
+     * remove(x, null) returns false
+     */
+    public void testRemove3() {
+        try {
+            ConcurrentReferenceHashMap c = new ConcurrentReferenceHashMap(5);
+            c.put("sadsdf", "asdads");
+            assertFalse(c.remove("sadsdf", null));
+        } catch(NullPointerException e){
+            fail();
+        }
+    }
+
+    /**
+     * A deserialized map equals original
+     */
+    public void testSerialization() {
+        ConcurrentReferenceHashMap q = map5();
+
+        try {
+            ByteArrayOutputStream bout = new ByteArrayOutputStream(10000);
+            ObjectOutputStream out = new ObjectOutputStream(new BufferedOutputStream(bout));
+            out.writeObject(q);
+            out.close();
+
+            ByteArrayInputStream bin = new ByteArrayInputStream(bout.toByteArray());
+            ObjectInputStream in = new ObjectInputStream(new BufferedInputStream(bin));
+            ConcurrentReferenceHashMap r = (ConcurrentReferenceHashMap)in.readObject();
+            assertEquals(q.size(), r.size());
+            assertTrue(q.equals(r));
+            assertTrue(r.equals(q));
+        } catch(Exception e){
+            e.printStackTrace();
+            unexpectedException();
+        }
+    }
+
+
+    /**
+     * SetValue of an EntrySet entry sets value in the map.
+     */
+    public void testSetValueWriteThrough() {
+        // Adapted from a bug report by Eric Zoerner 
+        ConcurrentReferenceHashMap map = new ConcurrentReferenceHashMap(2, 5.0f, 1);
+        assertTrue(map.isEmpty());
+        for (int i = 0; i < 20; i++)
+            map.put(new Integer(i), new Integer(i));
+        assertFalse(map.isEmpty());
+        Map.Entry entry1 = (Map.Entry)map.entrySet().iterator().next();
+        
+        // assert that entry1 is not 16
+        assertTrue("entry is 16, test not valid",
+                   !entry1.getKey().equals(new Integer(16)));
+        
+        // remove 16 (a different key) from map 
+        // which just happens to cause entry1 to be cloned in map
+        map.remove(new Integer(16));
+        entry1.setValue("XYZ");
+        assertTrue(map.containsValue("XYZ")); // fails
+    }
+    
+}

Added: experimental/jsr166/src/jsr166y/ConcurrentWeakHashMap.java
===================================================================
--- experimental/jsr166/src/jsr166y/ConcurrentWeakHashMap.java	                        (rev 0)
+++ experimental/jsr166/src/jsr166y/ConcurrentWeakHashMap.java	2008-03-27 17:09:21 UTC (rev 5470)
@@ -0,0 +1,1450 @@
+/*
+ * Written by Doug Lea with assistance from members of JCP JSR-166
+ * Expert Group and released to the public domain, as explained at
+ * http://creativecommons.org/licenses/publicdomain
+ */
+
+package jsr166y;
+import java.io.IOException;
+import java.io.Serializable;
+import java.lang.ref.ReferenceQueue;
+import java.lang.ref.WeakReference;
+import java.util.AbstractCollection;
+import java.util.AbstractMap;
+import java.util.AbstractSet;
+import java.util.Collection;
+import java.util.ConcurrentModificationException;
+import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.Hashtable;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.NoSuchElementException;
+import java.util.Set;
+import java.util.concurrent.locks.ReentrantLock;
+
+/**
+ * A hash table with <em>weak keys</em>, full concurrency of retrievals, and
+ * adjustable expected concurrency for updates. Similar to
+ * {@link java.util.WeakHashMap}, entries of this table are periodically
+ * removed once their corresponding keys are no longer referenced outside of
+ * this table. In other words, this table will not prevent a key from being
+ * discarded by the garbage collector. Once a key has been discarded by the
+ * collector, the corresponding entry is no longer visible to this table;
+ * however, the entry may occupy space until a future table operation decides to
+ * reclaim it. For this reason, summary functions such as <tt>size</tt> and
+ * <tt>isEmpty</tt> might return a value greater than the observed number of
+ * entries. In order to support a high level of concurrency, stale entries are
+ * only reclaimed during blocking (usually mutating) operations.
+ * 
+ * While keys in this table are only held using a weak reference, values are
+ * held using a normal strong reference. This provides the guarantee that a
+ * value will always have at least the same life-span as it's key. For this
+ * reason, care should be taken to ensure that a value never refers, either
+ * directly or indirectly, to its key, thereby preventing reclamation. If weak
+ * values are desired, one can simply use a {@link WeakReference} for the value
+ * type.
+ * 
+ * Just like {@link java.util.ConcurrentHashMap}, this class obeys the same
+ * functional specification as {@link java.util.Hashtable}, and includes
+ * versions of methods corresponding to each method of <tt>Hashtable</tt>.
+ * However, even though all operations are thread-safe, retrieval operations do
+ * <em>not</em> entail locking, and there is <em>not</em> any support for
+ * locking the entire table in a way that prevents all access. This class is
+ * fully interoperable with <tt>Hashtable</tt> in programs that rely on its
+ * thread safety but not on its synchronization details.
+ * 
+ * <p>
+ * Retrieval operations (including <tt>get</tt>) generally do not block, so
+ * may overlap with update operations (including <tt>put</tt> and
+ * <tt>remove</tt>). Retrievals reflect the results of the most recently
+ * <em>completed</em> update operations holding upon their onset. For
+ * aggregate operations such as <tt>putAll</tt> and <tt>clear</tt>,
+ * concurrent retrievals may reflect insertion or removal of only some entries.
+ * Similarly, Iterators and Enumerations return elements reflecting the state of
+ * the hash table at some point at or since the creation of the
+ * iterator/enumeration. They do <em>not</em> throw
+ * {@link ConcurrentModificationException}. However, iterators are designed to
+ * be used by only one thread at a time.
+ * 
+ * <p>
+ * The allowed concurrency among update operations is guided by the optional
+ * <tt>concurrencyLevel</tt> constructor argument (default <tt>16</tt>),
+ * which is used as a hint for internal sizing. The table is internally
+ * partitioned to try to permit the indicated number of concurrent updates
+ * without contention. Because placement in hash tables is essentially random,
+ * the actual concurrency will vary. Ideally, you should choose a value to
+ * accommodate as many threads as will ever concurrently modify the table. Using
+ * a significantly higher value than you need can waste space and time, and a
+ * significantly lower value can lead to thread contention. But overestimates
+ * and underestimates within an order of magnitude do not usually have much
+ * noticeable impact. A value of one is appropriate when it is known that only
+ * one thread will modify and all others will only read. Also, resizing this or
+ * any other kind of hash table is a relatively slow operation, so, when
+ * possible, it is a good idea to provide estimates of expected table sizes in
+ * constructors.
+ * 
+ * <p>
+ * This class and its views and iterators implement all of the <em>optional</em>
+ * methods of the {@link Map} and {@link Iterator} interfaces.
+ * 
+ * <p>
+ * Like {@link Hashtable} but unlike {@link HashMap}, this class does
+ * <em>not</em> allow <tt>null</tt> to be used as a key or value.
+ * 
+ * <p>
+ * This class is a member of the <a href="{@docRoot}/../technotes/guides/collections/index.html">
+ * Java Collections Framework</a>.
+ * 
+ * @author Doug Lea
+ * @author Jason T. Greene
+ * @param <K> the type of keys maintained by this map
+ * @param <V> the type of mapped values
+ */
+public class ConcurrentWeakHashMap<K, V> extends AbstractMap<K, V>
+        implements java.util.concurrent.ConcurrentMap<K, V>, Serializable {
+    private static final long serialVersionUID = 7249069246763182397L;
+
+    /*
+     * The basic strategy is to subdivide the table among Segments,
+     * each of which itself is a concurrently readable hash table.
+     */
+
+    /* ---------------- Constants -------------- */
+
+    /**
+     * The default initial capacity for this table,
+     * used when not otherwise specified in a constructor.
+     */
+    static final int DEFAULT_INITIAL_CAPACITY = 16;
+
+    /**
+     * The default load factor for this table, used when not
+     * otherwise specified in a constructor.
+     */
+    static final float DEFAULT_LOAD_FACTOR = 0.75f;
+
+    /**
+     * The default concurrency level for this table, used when not
+     * otherwise specified in a constructor.
+     */
+    static final int DEFAULT_CONCURRENCY_LEVEL = 16;
+
+    /**
+     * The maximum capacity, used if a higher value is implicitly
+     * specified by either of the constructors with arguments.  MUST
+     * be a power of two <= 1<<30 to ensure that entries are indexable
+     * using ints.
+     */
+    static final int MAXIMUM_CAPACITY = 1 << 30;
+
+    /**
+     * The maximum number of segments to allow; used to bound
+     * constructor arguments.
+     */
+    static final int MAX_SEGMENTS = 1 << 16; // slightly conservative
+
+    /**
+     * Number of unsynchronized retries in size and containsValue
+     * methods before resorting to locking. This is used to avoid
+     * unbounded retries if tables undergo continuous modification
+     * which would make it impossible to obtain an accurate result.
+     */
+    static final int RETRIES_BEFORE_LOCK = 2;
+
+    /* ---------------- Fields -------------- */
+
+    /**
+     * Mask value for indexing into segments. The upper bits of a
+     * key's hash code are used to choose the segment.
+     */
+    final int segmentMask;
+
+    /**
+     * Shift value for indexing within segments.
+     */
+    final int segmentShift;
+
+    /**
+     * The segments, each of which is a specialized hash table
+     */
+    final Segment<K,V>[] segments;
+
+    transient Set<K> keySet;
+    transient Set<Map.Entry<K,V>> entrySet;
+    transient Collection<V> values;
+
+    /* ---------------- Small Utilities -------------- */
+
+    /**
+     * Applies a supplemental hash function to a given hashCode, which
+     * defends against poor quality hash functions.  This is critical
+     * because ConcurrentWeakHashMap uses power-of-two length hash tables,
+     * that otherwise encounter collisions for hashCodes that do not
+     * differ in lower or upper bits.
+     */
+    private static int hash(int h) {
+        // Spread bits to regularize both segment and index locations,
+        // using variant of single-word Wang/Jenkins hash.
+        h += (h <<  15) ^ 0xffffcd7d;
+        h ^= (h >>> 10);
+        h += (h <<   3);
+        h ^= (h >>>  6);
+        h += (h <<   2) + (h << 14);
+        return h ^ (h >>> 16);
+    }
+
+    /**
+     * Returns the segment that should be used for key with given hash
+     * @param hash the hash code for the key
+     * @return the segment
+     */
+    final Segment<K,V> segmentFor(int hash) {
+        return segments[(hash >>> segmentShift) & segmentMask];
+    }
+
+    /* ---------------- Inner Classes -------------- */
+
+    /**
+     * A weak-key reference which stores the key hash needed for reclamation.
+     */
+    static final class WeakKeyReference<K> extends WeakReference<K> {
+        final int hash;
+        WeakKeyReference(K key, int hash, ReferenceQueue<K> refQueue) {
+            super(key, refQueue);
+            this.hash = hash;
+        }
+    }
+    
+    /**
+     * ConcurrentWeakHashMap list entry. Note that this is never exported
+     * out as a user-visible Map.Entry.
+     *
+     * Because the value field is volatile, not final, it is legal wrt
+     * the Java Memory Model for an unsynchronized reader to see null
+     * instead of initial value when read via a data race.  Although a
+     * reordering leading to this is not likely to ever actually
+     * occur, the Segment.readValueUnderLock method is used as a
+     * backup in case a null (pre-initialized) value is ever seen in
+     * an unsynchronized access method.
+     */
+    static final class HashEntry<K,V> {
+        final WeakReference<K> keyRef;
+        final int hash;
+        volatile V value;
+        final HashEntry<K,V> next;
+
+        HashEntry(K key, int hash, HashEntry<K,V> next, V value, ReferenceQueue<K> refQueue) {
+            this.keyRef = new WeakKeyReference<K>(key, hash, refQueue);
+            this.hash = hash;
+            this.next = next;
+            this.value = value;
+        }
+
+        @SuppressWarnings("unchecked")
+        static final <K,V> HashEntry<K,V>[] newArray(int i) {
+            return new HashEntry[i];
+        }
+    }
+
+    /**
+     * Segments are specialized versions of hash tables.  This
+     * subclasses from ReentrantLock opportunistically, just to
+     * simplify some locking and avoid separate construction.
+     */
+    static final class Segment<K,V> extends ReentrantLock implements Serializable {
+        /*
+         * Segments maintain a table of entry lists that are ALWAYS
+         * kept in a consistent state, so can be read without locking.
+         * Next fields of nodes are immutable (final).  All list
+         * additions are performed at the front of each bin. This
+         * makes it easy to check changes, and also fast to traverse.
+         * When nodes would otherwise be changed, new nodes are
+         * created to replace them. This works well for hash tables
+         * since the bin lists tend to be short. (The average length
+         * is less than two for the default load factor threshold.)
+         *
+         * Read operations can thus proceed without locking, but rely
+         * on selected uses of volatiles to ensure that completed
+         * write operations performed by other threads are
+         * noticed. For most purposes, the "count" field, tracking the
+         * number of elements, serves as that volatile variable
+         * ensuring visibility.  This is convenient because this field
+         * needs to be read in many read operations anyway:
+         *
+         *   - All (unsynchronized) read operations must first read the
+         *     "count" field, and should not look at table entries if
+         *     it is 0.
+         *
+         *   - All (synchronized) write operations should write to
+         *     the "count" field after structurally changing any bin.
+         *     The operations must not take any action that could even
+         *     momentarily cause a concurrent read operation to see
+         *     inconsistent data. This is made easier by the nature of
+         *     the read operations in Map. For example, no operation
+         *     can reveal that the table has grown but the threshold
+         *     has not yet been updated, so there are no atomicity
+         *     requirements for this with respect to reads.
+         *
+         * As a guide, all critical volatile reads and writes to the
+         * count field are marked in code comments.
+         */
+
+        private static final long serialVersionUID = 2249069246763182397L;
+
+        /**
+         * The number of elements in this segment's region.
+         */
+        transient volatile int count;
+
+        /**
+         * Number of updates that alter the size of the table. This is
+         * used during bulk-read methods to make sure they see a
+         * consistent snapshot: If modCounts change during a traversal
+         * of segments computing size or checking containsValue, then
+         * we might have an inconsistent view of state so (usually)
+         * must retry.
+         */
+        transient int modCount;
+
+        /**
+         * The table is rehashed when its size exceeds this threshold.
+         * (The value of this field is always <tt>(int)(capacity *
+         * loadFactor)</tt>.)
+         */
+        transient int threshold;
+
+        /**
+         * The per-segment table.
+         */
+        transient volatile HashEntry<K,V>[] table;
+
+        /**
+         * The load factor for the hash table.  Even though this value
+         * is same for all segments, it is replicated to avoid needing
+         * links to outer object.
+         * @serial
+         */
+        final float loadFactor;
+
+        /**
+         * The collected weak-key reference queue for this segment. 
+         * This should be (re)initialized whenever table is assigned,
+         */
+        transient volatile ReferenceQueue<K> refQueue;
+        
+        Segment(int initialCapacity, float lf) {
+            loadFactor = lf;
+            setTable(HashEntry.<K,V>newArray(initialCapacity));
+        }
+
+        @SuppressWarnings("unchecked")
+        static final <K,V> Segment<K,V>[] newArray(int i) {
+            return new Segment[i];
+        }
+
+        /**
+         * Sets table to new HashEntry array.
+         * Call only while holding lock or in constructor.
+         */
+        void setTable(HashEntry<K,V>[] newTable) {
+            threshold = (int)(newTable.length * loadFactor);
+            table = newTable;
+            refQueue = new ReferenceQueue<K>();
+        }
+
+        /**
+         * Returns properly casted first entry of bin for given hash.
+         */
+        HashEntry<K,V> getFirst(int hash) {
+            HashEntry<K,V>[] tab = table;
+            return tab[hash & (tab.length - 1)];
+        }
+
+        /**
+         * Reads value field of an entry under lock. Called if value
+         * field ever appears to be null. This is possible only if a
+         * compiler happens to reorder a HashEntry initialization with
+         * its table assignment, which is legal under memory model
+         * but is not known to ever occur.
+         */
+        V readValueUnderLock(HashEntry<K,V> e) {
+            lock();
+            try {
+                removeStale();
+                return e.value;
+            } finally {
+                unlock();
+            }
+        }
+
+        /* Specialized implementations of map methods */
+
+        V get(Object key, int hash) {
+            if (count != 0) { // read-volatile
+                HashEntry<K,V> e = getFirst(hash);
+                while (e != null) {
+                    if (e.hash == hash && key.equals(e.keyRef.get())) {
+                        V v = e.value;
+                        if (v != null)
+                            return v;
+                        return readValueUnderLock(e); // recheck
+                    }
+                    e = e.next;
+                }
+            }
+            return null;
+        }
+
+        boolean containsKey(Object key, int hash) {
+            if (count != 0) { // read-volatile
+                HashEntry<K,V> e = getFirst(hash);
+                while (e != null) {
+                    if (e.hash == hash && key.equals(e.keyRef.get()))
+                        return true;
+                    e = e.next;
+                }
+            }
+            return false;
+        }
+
+        boolean containsValue(Object value) {
+            if (count != 0) { // read-volatile
+                HashEntry<K,V>[] tab = table;
+                int len = tab.length;
+                for (int i = 0 ; i < len; i++) {
+                    for (HashEntry<K,V> e = tab[i]; e != null; e = e.next) {
+                        V v = e.value;
+                        if (v == null) // recheck
+                            v = readValueUnderLock(e);
+                        if (value.equals(v))
+                            return true;
+                    }
+                }
+            }
+            return false;
+        }
+
+        boolean replace(K key, int hash, V oldValue, V newValue) {
+            lock();
+            try {
+                removeStale();
+                HashEntry<K,V> e = getFirst(hash);
+                while (e != null && (e.hash != hash || !key.equals(e.keyRef.get())))
+                    e = e.next;
+
+                boolean replaced = false;
+                if (e != null && oldValue.equals(e.value)) {
+                    replaced = true;
+                    e.value = newValue;
+                }
+                return replaced;
+            } finally {
+                unlock();
+            }
+        }
+
+        V replace(K key, int hash, V newValue) {
+            lock();
+            try {
+                removeStale();
+                HashEntry<K,V> e = getFirst(hash);
+                while (e != null && (e.hash != hash || !key.equals(e.keyRef.get())))
+                    e = e.next;
+
+                V oldValue = null;
+                if (e != null) {
+                    oldValue = e.value;
+                    e.value = newValue;
+                }
+                return oldValue;
+            } finally {
+                unlock();
+            }
+        }
+
+
+        V put(K key, int hash, V value, boolean onlyIfAbsent) {
+            lock();
+            try {
+                removeStale();
+                int c = count;
+                if (c++ > threshold) {// ensure capacity
+                    int reduced = rehash();
+                    if (reduced > 0)  // adjust from possible weak cleanups
+                        count = (c -= reduced) - 1; // write-volatile      
+                }
+                         
+                HashEntry<K,V>[] tab = table;
+                int index = hash & (tab.length - 1);
+                HashEntry<K,V> first = tab[index];
+                HashEntry<K,V> e = first;
+                while (e != null && (e.hash != hash || !key.equals(e.keyRef.get())))
+                    e = e.next;
+
+                V oldValue;
+                if (e != null) {
+                    oldValue = e.value;
+                    if (!onlyIfAbsent)
+                        e.value = value;
+                }
+                else {
+                    oldValue = null;
+                    ++modCount;
+                    tab[index] = new HashEntry<K,V>(key, hash, first, value, refQueue);
+                    count = c; // write-volatile
+                }
+                return oldValue;
+            } finally {
+                unlock();
+            }
+        }
+
+        int rehash() {
+            HashEntry<K,V>[] oldTable = table;
+            int oldCapacity = oldTable.length;
+            if (oldCapacity >= MAXIMUM_CAPACITY)
+                return 0;
+
+            /*
+             * Reclassify nodes in each list to new Map.  Because we are
+             * using power-of-two expansion, the elements from each bin
+             * must either stay at same index, or move with a power of two
+             * offset. We eliminate unnecessary node creation by catching
+             * cases where old nodes can be reused because their next
+             * fields won't change. Statistically, at the default
+             * threshold, only about one-sixth of them need cloning when
+             * a table doubles. The nodes they replace will be garbage
+             * collectable as soon as they are no longer referenced by any
+             * reader thread that may be in the midst of traversing table
+             * right now.
+             */
+
+            HashEntry<K,V>[] newTable = HashEntry.newArray(oldCapacity<<1);
+            threshold = (int)(newTable.length * loadFactor);
+            int sizeMask = newTable.length - 1;
+            int reduce = 0;
+            for (int i = 0; i < oldCapacity ; i++) {
+                // We need to guarantee that any existing reads of old Map can
+                //  proceed. So we cannot yet null out each bin.
+                HashEntry<K,V> e = oldTable[i];
+
+                if (e != null) {
+                    HashEntry<K,V> next = e.next;
+                    int idx = e.hash & sizeMask;
+
+                    //  Single node on list
+                    if (next == null)
+                        newTable[idx] = e;
+
+                    else {
+                        // Reuse trailing consecutive sequence at same slot
+                        HashEntry<K,V> lastRun = e;
+                        int lastIdx = idx;
+                        for (HashEntry<K,V> last = next;
+                             last != null;
+                             last = last.next) {
+                            int k = last.hash & sizeMask;
+                            if (k != lastIdx) {
+                                lastIdx = k;
+                                lastRun = last;
+                            }
+                        }
+                        newTable[lastIdx] = lastRun;
+                        // Clone all remaining nodes
+                        for (HashEntry<K,V> p = e; p != lastRun; p = p.next) {
+                            // Skip GC'd weak refs
+                            K key = p.keyRef.get();
+                            if (key == null) {
+                                reduce++;
+                                continue;
+                            }
+                            int k = p.hash & sizeMask;
+                            HashEntry<K,V> n = newTable[k];
+                            newTable[k] = new HashEntry<K,V>(key, p.hash, n, p.value, refQueue);
+                        }
+                    }
+                }
+            }
+            table = newTable;
+            return reduce;
+        }
+
+        /**
+         * Remove; match on key only if value null, else match both.
+         */
+        V remove(Object key, int hash, Object value, boolean weakRemove) {
+            lock();
+            try {
+                if (!weakRemove)
+                    removeStale();
+                int c = count - 1;
+                HashEntry<K,V>[] tab = table;
+                int index = hash & (tab.length - 1);
+                HashEntry<K,V> first = tab[index];
+                HashEntry<K,V> e = first;
+                // a weak remove operation compares the WeakReference instance
+                while (e != null && (!weakRemove || key != e.keyRef) 
+                                 && (e.hash != hash || !key.equals(e.keyRef.get())))
+                    e = e.next;
+
+                V oldValue = null;
+                if (e != null) {
+                    V v = e.value;
+                    if (value == null || value.equals(v)) {
+                        oldValue = v;
+                        // All entries following removed node can stay
+                        // in list, but all preceding ones need to be
+                        // cloned.
+                        ++modCount;
+                        HashEntry<K,V> newFirst = e.next;
+                        for (HashEntry<K,V> p = first; p != e; p = p.next) {
+                            K pKey = p.keyRef.get();
+                            if (pKey == null) { // Skip GC'd keys
+                                c--;
+                                continue;
+                            }
+                                
+                            newFirst = new HashEntry<K,V>(pKey, p.hash,
+                                                          newFirst, p.value, refQueue);
+                        }
+                        tab[index] = newFirst;
+                        count = c; // write-volatile
+                    }
+                }
+                return oldValue;
+            } finally {
+                unlock();
+            }
+        }
+        
+        @SuppressWarnings("unchecked")
+        void removeStale() {
+            WeakKeyReference<K> ref;
+            while ((ref = (WeakKeyReference<K>) refQueue.poll()) != null) {
+                remove(ref, ref.hash, null, true);
+            }
+        }
+
+        void clear() {
+            if (count != 0) {
+                lock();
+                try {
+                    HashEntry<K,V>[] tab = table;
+                    for (int i = 0; i < tab.length ; i++)
+                        tab[i] = null;
+                    ++modCount;
+                    // replace the reference queue to avoid unnecessary stale cleanups
+                    refQueue = new ReferenceQueue<K>();
+                    count = 0; // write-volatile
+                } finally {
+                    unlock();
+                }
+            }
+        }
+    }
+
+
+
+    /* ---------------- Public operations -------------- */
+
+    /**
+     * Creates a new, empty map with the specified initial
+     * capacity, load factor and concurrency level.
+     *
+     * @param initialCapacity the initial capacity. The implementation
+     * performs internal sizing to accommodate this many elements.
+     * @param loadFactor  the load factor threshold, used to control resizing.
+     * Resizing may be performed when the average number of elements per
+     * bin exceeds this threshold.
+     * @param concurrencyLevel the estimated number of concurrently
+     * updating threads. The implementation performs internal sizing
+     * to try to accommodate this many threads.
+     * @throws IllegalArgumentException if the initial capacity is
+     * negative or the load factor or concurrencyLevel are
+     * nonpositive.
+     */
+    public ConcurrentWeakHashMap(int initialCapacity,
+                             float loadFactor, int concurrencyLevel) {
+        if (!(loadFactor > 0) || initialCapacity < 0 || concurrencyLevel <= 0)
+            throw new IllegalArgumentException();
+
+        if (concurrencyLevel > MAX_SEGMENTS)
+            concurrencyLevel = MAX_SEGMENTS;
+
+        // Find power-of-two sizes best matching arguments
+        int sshift = 0;
+        int ssize = 1;
+        while (ssize < concurrencyLevel) {
+            ++sshift;
+            ssize <<= 1;
+        }
+        segmentShift = 32 - sshift;
+        segmentMask = ssize - 1;
+        this.segments = Segment.newArray(ssize);
+
+        if (initialCapacity > MAXIMUM_CAPACITY)
+            initialCapacity = MAXIMUM_CAPACITY;
+        int c = initialCapacity / ssize;
+        if (c * ssize < initialCapacity)
+            ++c;
+        int cap = 1;
+        while (cap < c)
+            cap <<= 1;
+
+        for (int i = 0; i < this.segments.length; ++i)
+            this.segments[i] = new Segment<K,V>(cap, loadFactor);
+    }
+
+    /**
+     * Creates a new, empty map with the specified initial capacity
+     * and load factor and with the default concurrencyLevel (16).
+     *
+     * @param initialCapacity The implementation performs internal
+     * sizing to accommodate this many elements.
+     * @param loadFactor  the load factor threshold, used to control resizing.
+     * Resizing may be performed when the average number of elements per
+     * bin exceeds this threshold.
+     * @throws IllegalArgumentException if the initial capacity of
+     * elements is negative or the load factor is nonpositive
+     *
+     * @since 1.6
+     */
+    public ConcurrentWeakHashMap(int initialCapacity, float loadFactor) {
+        this(initialCapacity, loadFactor, DEFAULT_CONCURRENCY_LEVEL);
+    }
+
+    /**
+     * Creates a new, empty map with the specified initial capacity,
+     * and with default load factor (0.75) and concurrencyLevel (16).
+     *
+     * @param initialCapacity the initial capacity. The implementation
+     * performs internal sizing to accommodate this many elements.
+     * @throws IllegalArgumentException if the initial capacity of
+     * elements is negative.
+     */
+    public ConcurrentWeakHashMap(int initialCapacity) {
+        this(initialCapacity, DEFAULT_LOAD_FACTOR, DEFAULT_CONCURRENCY_LEVEL);
+    }
+
+    /**
+     * Creates a new, empty map with a default initial capacity (16),
+     * load factor (0.75) and concurrencyLevel (16).
+     */
+    public ConcurrentWeakHashMap() {
+        this(DEFAULT_INITIAL_CAPACITY, DEFAULT_LOAD_FACTOR, DEFAULT_CONCURRENCY_LEVEL);
+    }
+
+    /**
+     * Creates a new map with the same mappings as the given map.
+     * The map is created with a capacity of 1.5 times the number
+     * of mappings in the given map or 16 (whichever is greater),
+     * and a default load factor (0.75) and concurrencyLevel (16).
+     *
+     * @param m the map
+     */
+    public ConcurrentWeakHashMap(Map<? extends K, ? extends V> m) {
+        this(Math.max((int) (m.size() / DEFAULT_LOAD_FACTOR) + 1,
+                      DEFAULT_INITIAL_CAPACITY),
+             DEFAULT_LOAD_FACTOR, DEFAULT_CONCURRENCY_LEVEL);
+        putAll(m);
+    }
+
+    /**
+     * Returns <tt>true</tt> if this map contains no key-value mappings.
+     *
+     * @return <tt>true</tt> if this map contains no key-value mappings
+     */
+    public boolean isEmpty() {
+        final Segment<K,V>[] segments = this.segments;
+        /*
+         * We keep track of per-segment modCounts to avoid ABA
+         * problems in which an element in one segment was added and
+         * in another removed during traversal, in which case the
+         * table was never actually empty at any point. Note the
+         * similar use of modCounts in the size() and containsValue()
+         * methods, which are the only other methods also susceptible
+         * to ABA problems.
+         */
+        int[] mc = new int[segments.length];
+        int mcsum = 0;
+        for (int i = 0; i < segments.length; ++i) {
+            if (segments[i].count != 0)
+                return false;
+            else
+                mcsum += mc[i] = segments[i].modCount;
+        }
+        // If mcsum happens to be zero, then we know we got a snapshot
+        // before any modifications at all were made.  This is
+        // probably common enough to bother tracking.
+        if (mcsum != 0) {
+            for (int i = 0; i < segments.length; ++i) {
+                if (segments[i].count != 0 ||
+                    mc[i] != segments[i].modCount)
+                    return false;
+            }
+        }
+        return true;
+    }
+
+    /**
+     * Returns the number of key-value mappings in this map.  If the
+     * map contains more than <tt>Integer.MAX_VALUE</tt> elements, returns
+     * <tt>Integer.MAX_VALUE</tt>.
+     *
+     * @return the number of key-value mappings in this map
+     */
+    public int size() {
+        final Segment<K,V>[] segments = this.segments;
+        long sum = 0;
+        long check = 0;
+        int[] mc = new int[segments.length];
+        // Try a few times to get accurate count. On failure due to
+        // continuous async changes in table, resort to locking.
+        for (int k = 0; k < RETRIES_BEFORE_LOCK; ++k) {
+            check = 0;
+            sum = 0;
+            int mcsum = 0;
+            for (int i = 0; i < segments.length; ++i) {
+                sum += segments[i].count;
+                mcsum += mc[i] = segments[i].modCount;
+            }
+            if (mcsum != 0) {
+                for (int i = 0; i < segments.length; ++i) {
+                    check += segments[i].count;
+                    if (mc[i] != segments[i].modCount) {
+                        check = -1; // force retry
+                        break;
+                    }
+                }
+            }
+            if (check == sum)
+                break;
+        }
+        if (check != sum) { // Resort to locking all segments
+            sum = 0;
+            for (int i = 0; i < segments.length; ++i)
+                segments[i].lock();
+            for (int i = 0; i < segments.length; ++i)
+                sum += segments[i].count;
+            for (int i = 0; i < segments.length; ++i)
+                segments[i].unlock();
+        }
+        if (sum > Integer.MAX_VALUE)
+            return Integer.MAX_VALUE;
+        else
+            return (int)sum;
+    }
+
+    /**
+     * Returns the value to which the specified key is mapped,
+     * or {@code null} if this map contains no mapping for the key.
+     *
+     * <p>More formally, if this map contains a mapping from a key
+     * {@code k} to a value {@code v} such that {@code key.equals(k)},
+     * then this method returns {@code v}; otherwise it returns
+     * {@code null}.  (There can be at most one such mapping.)
+     *
+     * @throws NullPointerException if the specified key is null
+     */
+    public V get(Object key) {
+        int hash = hash(key.hashCode());
+        return segmentFor(hash).get(key, hash);
+    }
+
+    /**
+     * Tests if the specified object is a key in this table.
+     *
+     * @param  key   possible key
+     * @return <tt>true</tt> if and only if the specified object
+     *         is a key in this table, as determined by the
+     *         <tt>equals</tt> method; <tt>false</tt> otherwise.
+     * @throws NullPointerException if the specified key is null
+     */
+    public boolean containsKey(Object key) {
+        int hash = hash(key.hashCode());
+        return segmentFor(hash).containsKey(key, hash);
+    }
+
+    /**
+     * Returns <tt>true</tt> if this map maps one or more keys to the
+     * specified value. Note: This method requires a full internal
+     * traversal of the hash table, and so is much slower than
+     * method <tt>containsKey</tt>.
+     *
+     * @param value value whose presence in this map is to be tested
+     * @return <tt>true</tt> if this map maps one or more keys to the
+     *         specified value
+     * @throws NullPointerException if the specified value is null
+     */
+    public boolean containsValue(Object value) {
+        if (value == null)
+            throw new NullPointerException();
+
+        // See explanation of modCount use above
+
+        final Segment<K,V>[] segments = this.segments;
+        int[] mc = new int[segments.length];
+
+        // Try a few times without locking
+        for (int k = 0; k < RETRIES_BEFORE_LOCK; ++k) {
+            int sum = 0;
+            int mcsum = 0;
+            for (int i = 0; i < segments.length; ++i) {
+                int c = segments[i].count;
+                mcsum += mc[i] = segments[i].modCount;
+                if (segments[i].containsValue(value))
+                    return true;
+            }
+            boolean cleanSweep = true;
+            if (mcsum != 0) {
+                for (int i = 0; i < segments.length; ++i) {
+                    int c = segments[i].count;
+                    if (mc[i] != segments[i].modCount) {
+                        cleanSweep = false;
+                        break;
+                    }
+                }
+            }
+            if (cleanSweep)
+                return false;
+        }
+        // Resort to locking all segments
+        for (int i = 0; i < segments.length; ++i)
+            segments[i].lock();
+        boolean found = false;
+        try {
+            for (int i = 0; i < segments.length; ++i) {
+                if (segments[i].containsValue(value)) {
+                    found = true;
+                    break;
+                }
+            }
+        } finally {
+            for (int i = 0; i < segments.length; ++i)
+                segments[i].unlock();
+        }
+        return found;
+    }
+
+    /**
+     * Legacy method testing if some key maps into the specified value
+     * in this table.  This method is identical in functionality to
+     * {@link #containsValue}, and exists solely to ensure
+     * full compatibility with class {@link java.util.Hashtable},
+     * which supported this method prior to introduction of the
+     * Java Collections framework.
+
+     * @param  value a value to search for
+     * @return <tt>true</tt> if and only if some key maps to the
+     *         <tt>value</tt> argument in this table as
+     *         determined by the <tt>equals</tt> method;
+     *         <tt>false</tt> otherwise
+     * @throws NullPointerException if the specified value is null
+     */
+    public boolean contains(Object value) {
+        return containsValue(value);
+    }
+
+    /**
+     * Maps the specified key to the specified value in this table.
+     * Neither the key nor the value can be null.
+     *
+     * <p> The value can be retrieved by calling the <tt>get</tt> method
+     * with a key that is equal to the original key.
+     *
+     * @param key key with which the specified value is to be associated
+     * @param value value to be associated with the specified key
+     * @return the previous value associated with <tt>key</tt>, or
+     *         <tt>null</tt> if there was no mapping for <tt>key</tt>
+     * @throws NullPointerException if the specified key or value is null
+     */
+    public V put(K key, V value) {
+        if (value == null)
+            throw new NullPointerException();
+        int hash = hash(key.hashCode());
+        return segmentFor(hash).put(key, hash, value, false);
+    }
+
+    /**
+     * {@inheritDoc}
+     *
+     * @return the previous value associated with the specified key,
+     *         or <tt>null</tt> if there was no mapping for the key
+     * @throws NullPointerException if the specified key or value is null
+     */
+    public V putIfAbsent(K key, V value) {
+        if (value == null)
+            throw new NullPointerException();
+        int hash = hash(key.hashCode());
+        return segmentFor(hash).put(key, hash, value, true);
+    }
+
+    /**
+     * Copies all of the mappings from the specified map to this one.
+     * These mappings replace any mappings that this map had for any of the
+     * keys currently in the specified map.
+     *
+     * @param m mappings to be stored in this map
+     */
+    public void putAll(Map<? extends K, ? extends V> m) {
+        for (Map.Entry<? extends K, ? extends V> e : m.entrySet())
+            put(e.getKey(), e.getValue());
+    }
+
+    /**
+     * Removes the key (and its corresponding value) from this map.
+     * This method does nothing if the key is not in the map.
+     *
+     * @param  key the key that needs to be removed
+     * @return the previous value associated with <tt>key</tt>, or
+     *         <tt>null</tt> if there was no mapping for <tt>key</tt>
+     * @throws NullPointerException if the specified key is null
+     */
+    public V remove(Object key) {
+        int hash = hash(key.hashCode());
+        return segmentFor(hash).remove(key, hash, null, false);
+    }
+
+    /**
+     * {@inheritDoc}
+     *
+     * @throws NullPointerException if the specified key is null
+     */
+    public boolean remove(Object key, Object value) {
+        int hash = hash(key.hashCode());
+        if (value == null)
+            return false;
+        return segmentFor(hash).remove(key, hash, value, false) != null;
+    }
+
+    /**
+     * {@inheritDoc}
+     *
+     * @throws NullPointerException if any of the arguments are null
+     */
+    public boolean replace(K key, V oldValue, V newValue) {
+        if (oldValue == null || newValue == null)
+            throw new NullPointerException();
+        int hash = hash(key.hashCode());
+        return segmentFor(hash).replace(key, hash, oldValue, newValue);
+    }
+
+    /**
+     * {@inheritDoc}
+     *
+     * @return the previous value associated with the specified key,
+     *         or <tt>null</tt> if there was no mapping for the key
+     * @throws NullPointerException if the specified key or value is null
+     */
+    public V replace(K key, V value) {
+        if (value == null)
+            throw new NullPointerException();
+        int hash = hash(key.hashCode());
+        return segmentFor(hash).replace(key, hash, value);
+    }
+
+    /**
+     * Removes all of the mappings from this map.
+     */
+    public void clear() {
+        for (int i = 0; i < segments.length; ++i)
+            segments[i].clear();
+    }
+
+    /**
+     * Returns a {@link Set} view of the keys contained in this map.
+     * The set is backed by the map, so changes to the map are
+     * reflected in the set, and vice-versa.  The set supports element
+     * removal, which removes the corresponding mapping from this map,
+     * via the <tt>Iterator.remove</tt>, <tt>Set.remove</tt>,
+     * <tt>removeAll</tt>, <tt>retainAll</tt>, and <tt>clear</tt>
+     * operations.  It does not support the <tt>add</tt> or
+     * <tt>addAll</tt> operations.
+     *
+     * <p>The view's <tt>iterator</tt> is a "weakly consistent" iterator
+     * that will never throw {@link ConcurrentModificationException},
+     * and guarantees to traverse elements as they existed upon
+     * construction of the iterator, and may (but is not guaranteed to)
+     * reflect any modifications subsequent to construction.
+     */
+    public Set<K> keySet() {
+        Set<K> ks = keySet;
+        return (ks != null) ? ks : (keySet = new KeySet());
+    }
+
+    /**
+     * Returns a {@link Collection} view of the values contained in this map.
+     * The collection is backed by the map, so changes to the map are
+     * reflected in the collection, and vice-versa.  The collection
+     * supports element removal, which removes the corresponding
+     * mapping from this map, via the <tt>Iterator.remove</tt>,
+     * <tt>Collection.remove</tt>, <tt>removeAll</tt>,
+     * <tt>retainAll</tt>, and <tt>clear</tt> operations.  It does not
+     * support the <tt>add</tt> or <tt>addAll</tt> operations.
+     *
+     * <p>The view's <tt>iterator</tt> is a "weakly consistent" iterator
+     * that will never throw {@link ConcurrentModificationException},
+     * and guarantees to traverse elements as they existed upon
+     * construction of the iterator, and may (but is not guaranteed to)
+     * reflect any modifications subsequent to construction.
+     */
+    public Collection<V> values() {
+        Collection<V> vs = values;
+        return (vs != null) ? vs : (values = new Values());
+    }
+
+    /**
+     * Returns a {@link Set} view of the mappings contained in this map.
+     * The set is backed by the map, so changes to the map are
+     * reflected in the set, and vice-versa.  The set supports element
+     * removal, which removes the corresponding mapping from the map,
+     * via the <tt>Iterator.remove</tt>, <tt>Set.remove</tt>,
+     * <tt>removeAll</tt>, <tt>retainAll</tt>, and <tt>clear</tt>
+     * operations.  It does not support the <tt>add</tt> or
+     * <tt>addAll</tt> operations.
+     *
+     * <p>The view's <tt>iterator</tt> is a "weakly consistent" iterator
+     * that will never throw {@link ConcurrentModificationException},
+     * and guarantees to traverse elements as they existed upon
+     * construction of the iterator, and may (but is not guaranteed to)
+     * reflect any modifications subsequent to construction.
+     */
+    public Set<Map.Entry<K,V>> entrySet() {
+        Set<Map.Entry<K,V>> es = entrySet;
+        return (es != null) ? es : (entrySet = new EntrySet());
+    }
+
+    /**
+     * Returns an enumeration of the keys in this table.
+     *
+     * @return an enumeration of the keys in this table
+     * @see #keySet()
+     */
+    public Enumeration<K> keys() {
+        return new KeyIterator();
+    }
+
+    /**
+     * Returns an enumeration of the values in this table.
+     *
+     * @return an enumeration of the values in this table
+     * @see #values()
+     */
+    public Enumeration<V> elements() {
+        return new ValueIterator();
+    }
+
+    /* ---------------- Iterator Support -------------- */
+
+    abstract class HashIterator {
+        int nextSegmentIndex;
+        int nextTableIndex;
+        HashEntry<K,V>[] currentTable;
+        HashEntry<K, V> nextEntry;
+        HashEntry<K, V> lastReturned;
+        K currentKey; // Strong reference to weak key (prevents gc)
+
+        HashIterator() {
+            nextSegmentIndex = segments.length - 1;
+            nextTableIndex = -1;
+            advance();
+        }
+
+        public boolean hasMoreElements() { return hasNext(); }
+
+        final void advance() {
+            if (nextEntry != null && (nextEntry = nextEntry.next) != null)
+                return;
+
+            while (nextTableIndex >= 0) {
+                if ( (nextEntry = currentTable[nextTableIndex--]) != null)
+                    return;
+            }
+
+            while (nextSegmentIndex >= 0) {
+                Segment<K,V> seg = segments[nextSegmentIndex--];
+                if (seg.count != 0) {
+                    currentTable = seg.table;
+                    for (int j = currentTable.length - 1; j >= 0; --j) {
+                        if ( (nextEntry = currentTable[j]) != null) {
+                            nextTableIndex = j - 1;
+                            return;
+                        }
+                    }
+                }
+            }
+        }
+
+        public boolean hasNext() { 
+            while (nextEntry != null) {
+                if (nextEntry.keyRef.get() != null) 
+                    return true;
+                advance();
+            }
+            
+            return false;
+        }
+
+        HashEntry<K,V> nextEntry() {
+            do {
+                if (nextEntry == null)
+                    throw new NoSuchElementException();
+                
+                lastReturned = nextEntry;
+                currentKey = lastReturned.keyRef.get();
+                advance();
+            } while (currentKey == null); // Skip GC'd keys
+            
+            return lastReturned;
+        }
+
+        public void remove() {
+            if (lastReturned == null)
+                throw new IllegalStateException();
+            ConcurrentWeakHashMap.this.remove(currentKey);
+            lastReturned = null;
+        }
+    }
+
+    final class KeyIterator
+        extends HashIterator
+        implements Iterator<K>, Enumeration<K>
+    {
+        public K next()        { return super.nextEntry().keyRef.get(); }
+        public K nextElement() { return super.nextEntry().keyRef.get(); }
+    }
+
+    final class ValueIterator
+        extends HashIterator
+        implements Iterator<V>, Enumeration<V>
+    {
+        public V next()        { return super.nextEntry().value; }
+        public V nextElement() { return super.nextEntry().value; }
+    }
+
+     /*
+      * This class is needed for JDK5 compatibility.
+      */
+     static class SimpleEntry<K, V> implements Entry<K, V>,
+            java.io.Serializable {
+        private static final long serialVersionUID = -8499721149061103585L;
+
+        private final K key;
+        private V value;
+
+        public SimpleEntry(K key, V value) {
+            this.key = key;
+            this.value = value;
+        }
+
+        public SimpleEntry(Entry<? extends K, ? extends V> entry) {
+            this.key = entry.getKey();
+            this.value = entry.getValue();
+        }
+
+        public K getKey() {
+            return key;
+        }
+
+        public V getValue() {
+            return value;
+        }
+
+        public V setValue(V value) {
+            V oldValue = this.value;
+            this.value = value;
+            return oldValue;
+        }
+
+        public boolean equals(Object o) {
+            if (!(o instanceof Map.Entry))
+                return false;
+            @SuppressWarnings("unchecked")
+            Map.Entry e = (Map.Entry) o;
+            return eq(key, e.getKey()) && eq(value, e.getValue());
+        }
+
+        public int hashCode() {
+            return (key == null ? 0 : key.hashCode())
+                    ^ (value == null ? 0 : value.hashCode());
+        }
+
+        public String toString() {
+            return key + "=" + value;
+        }
+
+        private static boolean eq(Object o1, Object o2) {
+            return o1 == null ? o2 == null : o1.equals(o2);
+        }
+    }
+
+
+    /**
+     * Custom Entry class used by EntryIterator.next(), that relays setValue
+     * changes to the underlying map.
+     */
+    final class WriteThroughEntry extends SimpleEntry<K,V>
+    {
+        private static final long serialVersionUID = -7900634345345313646L;
+
+        WriteThroughEntry(K k, V v) {
+            super(k,v);
+        }
+
+        /**
+         * Set our entry's value and write through to the map. The
+         * value to return is somewhat arbitrary here. Since a
+         * WriteThroughEntry does not necessarily track asynchronous
+         * changes, the most recent "previous" value could be
+         * different from what we return (or could even have been
+         * removed in which case the put will re-establish). We do not
+         * and cannot guarantee more.
+         */
+        public V setValue(V value) {
+            if (value == null) throw new NullPointerException();
+            V v = super.setValue(value);
+            ConcurrentWeakHashMap.this.put(getKey(), value);
+            return v;
+        }
+    }
+
+    final class EntryIterator
+        extends HashIterator
+        implements Iterator<Entry<K,V>>
+    {
+        public Map.Entry<K,V> next() {
+            HashEntry<K,V> e = super.nextEntry();
+            return new WriteThroughEntry(e.keyRef.get(), e.value);
+        }
+    }
+
+    final class KeySet extends AbstractSet<K> {
+        public Iterator<K> iterator() {
+            return new KeyIterator();
+        }
+        public int size() {
+            return ConcurrentWeakHashMap.this.size();
+        }
+        public boolean isEmpty() {
+            return ConcurrentWeakHashMap.this.isEmpty();
+        }
+        public boolean contains(Object o) {
+            return ConcurrentWeakHashMap.this.containsKey(o);
+        }
+        public boolean remove(Object o) {
+            return ConcurrentWeakHashMap.this.remove(o) != null;
+        }
+        public void clear() {
+            ConcurrentWeakHashMap.this.clear();
+        }
+    }
+
+    final class Values extends AbstractCollection<V> {
+        public Iterator<V> iterator() {
+            return new ValueIterator();
+        }
+        public int size() {
+            return ConcurrentWeakHashMap.this.size();
+        }
+        public boolean isEmpty() {
+            return ConcurrentWeakHashMap.this.isEmpty();
+        }
+        public boolean contains(Object o) {
+            return ConcurrentWeakHashMap.this.containsValue(o);
+        }
+        public void clear() {
+            ConcurrentWeakHashMap.this.clear();
+        }
+    }
+
+    final class EntrySet extends AbstractSet<Map.Entry<K,V>> {
+        public Iterator<Map.Entry<K,V>> iterator() {
+            return new EntryIterator();
+        }
+        public boolean contains(Object o) {
+            if (!(o instanceof Map.Entry))
+                return false;
+            Map.Entry<?,?> e = (Map.Entry<?,?>)o;
+            V v = ConcurrentWeakHashMap.this.get(e.getKey());
+            return v != null && v.equals(e.getValue());
+        }
+        public boolean remove(Object o) {
+            if (!(o instanceof Map.Entry))
+                return false;
+            Map.Entry<?,?> e = (Map.Entry<?,?>)o;
+            return ConcurrentWeakHashMap.this.remove(e.getKey(), e.getValue());
+        }
+        public int size() {
+            return ConcurrentWeakHashMap.this.size();
+        }
+        public boolean isEmpty() {
+            return ConcurrentWeakHashMap.this.isEmpty();
+        }
+        public void clear() {
+            ConcurrentWeakHashMap.this.clear();
+        }
+    }
+
+    /* ---------------- Serialization Support -------------- */
+
+    /**
+     * Save the state of the <tt>ConcurrentWeakHashMap</tt> instance to a
+     * stream (i.e., serialize it).
+     * @param s the stream
+     * @serialData
+     * the key (Object) and value (Object)
+     * for each key-value mapping, followed by a null pair.
+     * The key-value mappings are emitted in no particular order.
+     */
+    private void writeObject(java.io.ObjectOutputStream s) throws IOException  {
+        s.defaultWriteObject();
+
+        for (int k = 0; k < segments.length; ++k) {
+            Segment<K,V> seg = segments[k];
+            seg.lock();
+            try {
+                HashEntry<K,V>[] tab = seg.table;
+                for (int i = 0; i < tab.length; ++i) {
+                    for (HashEntry<K,V> e = tab[i]; e != null; e = e.next) {
+                        K key = e.keyRef.get();
+                        if (key == null) // Skip GC'd keys
+                            continue;
+                        
+                        s.writeObject(key);
+                        s.writeObject(e.value);
+                    }
+                }
+            } finally {
+                seg.unlock();
+            }
+        }
+        s.writeObject(null);
+        s.writeObject(null);
+    }
+
+    /**
+     * Reconstitute the <tt>ConcurrentWeakHashMap</tt> instance from a
+     * stream (i.e., deserialize it).
+     * @param s the stream
+     */
+    @SuppressWarnings("unchecked")
+    private void readObject(java.io.ObjectInputStream s)
+        throws IOException, ClassNotFoundException  {
+        s.defaultReadObject();
+
+        // Initialize each segment to be minimally sized, and let grow.
+        for (int i = 0; i < segments.length; ++i) {
+            segments[i].setTable(new HashEntry[1]);
+        }
+
+        // Read the keys and values, and put the mappings in the table
+        for (;;) {
+            K key = (K) s.readObject();
+            V value = (V) s.readObject();
+            if (key == null)
+                break;
+            put(key, value);
+        }
+    }
+}

Added: experimental/jsr166/src/jsr166y/ConcurrentWeakHashMapGCTestCase.java
===================================================================
--- experimental/jsr166/src/jsr166y/ConcurrentWeakHashMapGCTestCase.java	                        (rev 0)
+++ experimental/jsr166/src/jsr166y/ConcurrentWeakHashMapGCTestCase.java	2008-03-27 17:09:21 UTC (rev 5470)
@@ -0,0 +1,122 @@
+package jsr166y;
+
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.Map;
+
+import junit.framework.TestCase;
+
+public class ConcurrentWeakHashMapGCTestCase extends TestCase {
+
+    public void testBasicCleanup() throws Exception {
+        ConcurrentWeakHashMap<BinClump, Integer> map = new ConcurrentWeakHashMap<BinClump, Integer>();
+        BinClump[] hold = new BinClump[100];
+        generateClumps(map, hold, 10000);
+        System.gc();
+        Thread.sleep(1000);
+
+        // trigger a cleanup without matching any key
+        for (int i = 0; i < 100; i++)
+            map.remove(new BinClump(i));
+
+        assertEquals(100, map.size());
+    }
+
+    public void testIterators() throws Exception {
+        ConcurrentWeakHashMap<BinClump, Integer> map = new ConcurrentWeakHashMap<BinClump, Integer>();
+        BinClump[] hold = new BinClump[100];
+        generateClumps(map, hold, 10000);
+        System.gc();
+        Thread.sleep(500);
+
+        // Stale entries are not yet cleared
+        assertEquals(map.size(), 10000);
+
+        HashSet<Integer> keys = new HashSet<Integer>();
+        for (BinClump clump : map.keySet()) {
+            assertTrue(clump.hashCode() < 100);
+            keys.add(clump.hashCode());
+        }
+        assertEquals(100, keys.size());
+
+        HashSet<Integer> values = new HashSet<Integer>(map.values());
+        assertEquals(100, values.size());
+
+        // Still not clear...
+        assertEquals(map.size(), 10000);
+
+        int count = 0;
+        for (Iterator<Map.Entry<BinClump, Integer>> iter = map.entrySet()
+                .iterator(); iter.hasNext();) {
+            Map.Entry<BinClump, Integer> entry = iter.next();
+            assertTrue(keys.contains(entry.getKey().hashCode()));
+            assertTrue(values.contains(entry.getValue()));
+            // Trigger cleanup
+            entry.setValue(entry.getValue());
+            count++;
+        }
+
+        assertEquals(100, count);
+
+        // Should be stale free now
+        assertEquals(100, map.size());
+        Iterator<BinClump> i = map.keySet().iterator();
+        while (i.hasNext() && i.next() != hold[0])
+            ;
+
+        hold = null;
+        System.gc();
+        Thread.sleep(500);
+
+        // trigger a cleanup without matching any key
+        for (int c = 0; c < 100; c++)
+            map.remove(new BinClump(c));
+
+        // iterator should hold a strong ref
+        assertEquals(1, map.size());
+
+        // Free iterator
+        i = null;
+        System.gc();
+        Thread.sleep(500);
+
+        // trigger a cleanup without matching any key
+        for (int c = 0; c < 100; c++)
+            map.remove(new BinClump(c));
+
+        assertTrue(map.isEmpty());
+
+    }
+
+    private void generateClumps(ConcurrentWeakHashMap<BinClump, Integer> map,
+            BinClump[] hold, int size) {
+        BinClump[] tmp = new BinClump[10000];
+
+        int holdSize = hold.length;
+        for (int c = 0, hc = 0; c < size; c++) {
+
+            BinClump clump = new BinClump(c / holdSize);
+            tmp[c] = clump;
+            if (c % holdSize == 0)
+                hold[hc++] = clump;
+            map.put(clump, c);
+        }
+        assertEquals(size, map.size());
+    }
+
+    public static class BinClump {
+        private int code;
+
+        public BinClump(int code) {
+            this.code = code;
+        }
+
+        public int hashCode() {
+            return code;
+        };
+
+        public String toString() {
+            return "BC(" + code + ")";
+        }
+    }
+}

Added: experimental/jsr166/src/jsr166y/ConcurrentWeakHashMapTest.java
===================================================================
--- experimental/jsr166/src/jsr166y/ConcurrentWeakHashMapTest.java	                        (rev 0)
+++ experimental/jsr166/src/jsr166y/ConcurrentWeakHashMapTest.java	2008-03-27 17:09:21 UTC (rev 5470)
@@ -0,0 +1,620 @@
+package jsr166y;
+
+/*
+ * Written by Doug Lea with assistance from members of JCP JSR-166
+ * Expert Group and released to the public domain, as explained at
+ * http://creativecommons.org/licenses/publicdomain
+ * Other contributors include Andrew Wright, Jeffrey Hayes, 
+ * Pat Fisher, Mike Judd. 
+ */
+
+import java.io.BufferedInputStream;
+import java.io.BufferedOutputStream;
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.ObjectInputStream;
+import java.io.ObjectOutputStream;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Enumeration;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Set;
+
+import junit.framework.Test;
+import junit.framework.TestSuite;
+
+public class ConcurrentWeakHashMapTest extends JSR166TestCase{
+    public static void main(String[] args) {
+	junit.textui.TestRunner.run (suite());	
+    }
+    public static Test suite() {
+	return new TestSuite(ConcurrentWeakHashMapTest.class);
+    }
+
+    /**
+     * Create a map from Integers 1-5 to Strings "A"-"E".
+     */
+    private static ConcurrentWeakHashMap map5() {   
+	ConcurrentWeakHashMap map = new ConcurrentWeakHashMap(5);
+        assertTrue(map.isEmpty());
+	map.put(one, "A");
+	map.put(two, "B");
+	map.put(three, "C");
+	map.put(four, "D");
+	map.put(five, "E");
+        assertFalse(map.isEmpty());
+        assertEquals(5, map.size());
+	return map;
+    }
+
+    /**
+     *  clear removes all pairs
+     */
+    public void testClear() {
+        ConcurrentWeakHashMap map = map5();
+	map.clear();
+	assertEquals(map.size(), 0);
+    }
+
+    /**
+     *  Maps with same contents are equal
+     */
+    public void testEquals() {
+        ConcurrentWeakHashMap map1 = map5();
+        ConcurrentWeakHashMap map2 = map5();
+        assertEquals(map1, map2);
+        assertEquals(map2, map1);
+	map1.clear();
+        assertFalse(map1.equals(map2));
+        assertFalse(map2.equals(map1));
+    }
+
+    /**
+     *  contains returns true for contained value
+     */
+    public void testContains() {
+        ConcurrentWeakHashMap map = map5();
+	assertTrue(map.contains("A"));
+        assertFalse(map.contains("Z"));
+    }
+    
+    /**
+     *  containsKey returns true for contained key
+     */
+    public void testContainsKey() {
+        ConcurrentWeakHashMap map = map5();
+	assertTrue(map.containsKey(one));
+        assertFalse(map.containsKey(zero));
+    }
+
+    /**
+     *  containsValue returns true for held values
+     */
+    public void testContainsValue() {
+        ConcurrentWeakHashMap map = map5();
+	assertTrue(map.containsValue("A"));
+        assertFalse(map.containsValue("Z"));
+    }
+
+    /**
+     *   enumeration returns an enumeration containing the correct
+     *   elements
+     */
+    public void testEnumeration() {
+        ConcurrentWeakHashMap map = map5();
+	Enumeration e = map.elements();
+	int count = 0;
+	while(e.hasMoreElements()){
+	    count++;
+	    e.nextElement();
+	}
+	assertEquals(5, count);
+    }
+
+    /**
+     *  get returns the correct element at the given key,
+     *  or null if not present
+     */
+    public void testGet() {
+        ConcurrentWeakHashMap map = map5();
+	assertEquals("A", (String)map.get(one));
+        ConcurrentWeakHashMap empty = new ConcurrentWeakHashMap();
+        assertNull(map.get("anything"));
+    }
+
+    /**
+     *  isEmpty is true of empty map and false for non-empty
+     */
+    public void testIsEmpty() {
+        ConcurrentWeakHashMap empty = new ConcurrentWeakHashMap();
+        ConcurrentWeakHashMap map = map5();
+	assertTrue(empty.isEmpty());
+        assertFalse(map.isEmpty());
+    }
+
+    /**
+     *   keys returns an enumeration containing all the keys from the map
+     */
+    public void testKeys() {
+        ConcurrentWeakHashMap map = map5();
+	Enumeration e = map.keys();
+	int count = 0;
+	while(e.hasMoreElements()){
+	    count++;
+	    e.nextElement();
+	}
+	assertEquals(5, count);
+    }
+
+    /**
+     *   keySet returns a Set containing all the keys
+     */
+    public void testKeySet() {
+        ConcurrentWeakHashMap map = map5();
+	Set s = map.keySet();
+	assertEquals(5, s.size());
+	assertTrue(s.contains(one));
+	assertTrue(s.contains(two));
+	assertTrue(s.contains(three));
+	assertTrue(s.contains(four));
+	assertTrue(s.contains(five));
+    }
+
+    /**
+     *  keySet.toArray returns contains all keys
+     */
+    public void testKeySetToArray() {
+        ConcurrentWeakHashMap map = map5();
+	Set s = map.keySet();
+        Object[] ar = s.toArray();
+        assertTrue(s.containsAll(Arrays.asList(ar)));
+	assertEquals(5, ar.length);
+        ar[0] = m10;
+        assertFalse(s.containsAll(Arrays.asList(ar)));
+    }
+
+    /**
+     *  Values.toArray contains all values
+     */
+    public void testValuesToArray() {
+        ConcurrentWeakHashMap map = map5();
+	Collection v = map.values();
+        Object[] ar = v.toArray();
+        ArrayList s = new ArrayList(Arrays.asList(ar));
+	assertEquals(5, ar.length);
+	assertTrue(s.contains("A"));
+	assertTrue(s.contains("B"));
+	assertTrue(s.contains("C"));
+	assertTrue(s.contains("D"));
+	assertTrue(s.contains("E"));
+    }
+
+    /**
+     *  entrySet.toArray contains all entries
+     */
+    public void testEntrySetToArray() {
+        ConcurrentWeakHashMap map = map5();
+	Set s = map.entrySet();
+        Object[] ar = s.toArray();
+        assertEquals(5, ar.length);
+        for (int i = 0; i < 5; ++i) {
+            assertTrue(map.containsKey(((Map.Entry)(ar[i])).getKey()));
+            assertTrue(map.containsValue(((Map.Entry)(ar[i])).getValue()));
+        }
+    }
+
+    /**
+     * values collection contains all values
+     */
+    public void testValues() {
+        ConcurrentWeakHashMap map = map5();
+	Collection s = map.values();
+	assertEquals(5, s.size());
+	assertTrue(s.contains("A"));
+	assertTrue(s.contains("B"));
+	assertTrue(s.contains("C"));
+	assertTrue(s.contains("D"));
+	assertTrue(s.contains("E"));
+    }
+
+    /**
+     * entrySet contains all pairs
+     */
+    public void testEntrySet() {
+        ConcurrentWeakHashMap map = map5();
+	Set s = map.entrySet();
+	assertEquals(5, s.size());
+        Iterator it = s.iterator();
+        while (it.hasNext()) {
+            Map.Entry e = (Map.Entry) it.next();
+            assertTrue( 
+                       (e.getKey().equals(one) && e.getValue().equals("A")) ||
+                       (e.getKey().equals(two) && e.getValue().equals("B")) ||
+                       (e.getKey().equals(three) && e.getValue().equals("C")) ||
+                       (e.getKey().equals(four) && e.getValue().equals("D")) ||
+                       (e.getKey().equals(five) && e.getValue().equals("E")));
+        }
+    }
+
+    /**
+     *   putAll  adds all key-value pairs from the given map
+     */
+    public void testPutAll() {
+        ConcurrentWeakHashMap empty = new ConcurrentWeakHashMap();
+        ConcurrentWeakHashMap map = map5();
+	empty.putAll(map);
+	assertEquals(5, empty.size());
+	assertTrue(empty.containsKey(one));
+	assertTrue(empty.containsKey(two));
+	assertTrue(empty.containsKey(three));
+	assertTrue(empty.containsKey(four));
+	assertTrue(empty.containsKey(five));
+    }
+
+    /**
+     *   putIfAbsent works when the given key is not present
+     */
+    public void testPutIfAbsent() {
+        ConcurrentWeakHashMap map = map5();
+	map.putIfAbsent(six, "Z");
+        assertTrue(map.containsKey(six));
+    }
+
+    /**
+     *   putIfAbsent does not add the pair if the key is already present
+     */
+    public void testPutIfAbsent2() {
+        ConcurrentWeakHashMap map = map5();
+        assertEquals("A", map.putIfAbsent(one, "Z"));
+    }
+
+    /**
+     *   replace fails when the given key is not present
+     */
+    public void testReplace() {
+        ConcurrentWeakHashMap map = map5();
+	assertNull(map.replace(six, "Z"));
+        assertFalse(map.containsKey(six));
+    }
+
+    /**
+     *   replace succeeds if the key is already present
+     */
+    public void testReplace2() {
+        ConcurrentWeakHashMap map = map5();
+        assertNotNull(map.replace(one, "Z"));
+        assertEquals("Z", map.get(one));
+    }
+
+
+    /**
+     * replace value fails when the given key not mapped to expected value
+     */
+    public void testReplaceValue() {
+        ConcurrentWeakHashMap map = map5();
+        assertEquals("A", map.get(one));
+	assertFalse(map.replace(one, "Z", "Z"));
+        assertEquals("A", map.get(one));
+    }
+
+    /**
+     * replace value succeeds when the given key mapped to expected value
+     */
+    public void testReplaceValue2() {
+        ConcurrentWeakHashMap map = map5();
+        assertEquals("A", map.get(one));
+	assertTrue(map.replace(one, "A", "Z"));
+        assertEquals("Z", map.get(one));
+    }
+
+
+    /**
+     *   remove removes the correct key-value pair from the map
+     */
+    public void testRemove() {
+        ConcurrentWeakHashMap map = map5();
+	map.remove(five);
+	assertEquals(4, map.size());
+	assertFalse(map.containsKey(five));
+    }
+
+    /**
+     * remove(key,value) removes only if pair present
+     */
+    public void testRemove2() {
+        ConcurrentWeakHashMap map = map5();
+	map.remove(five, "E");
+	assertEquals(4, map.size());
+	assertFalse(map.containsKey(five));
+	map.remove(four, "A");
+	assertEquals(4, map.size());
+	assertTrue(map.containsKey(four));
+
+    }
+
+    /**
+     *   size returns the correct values
+     */
+    public void testSize() {
+        ConcurrentWeakHashMap map = map5();
+        ConcurrentWeakHashMap empty = new ConcurrentWeakHashMap();
+	assertEquals(0, empty.size());
+	assertEquals(5, map.size());
+    }
+
+    /**
+     * toString contains toString of elements
+     */
+    public void testToString() {
+        ConcurrentWeakHashMap map = map5();
+        String s = map.toString();
+        for (int i = 1; i <= 5; ++i) {
+            assertTrue(s.indexOf(String.valueOf(i)) >= 0);
+        }
+    }        
+
+    // Exception tests
+    
+    /**
+     * Cannot create with negative capacity 
+     */
+    public void testConstructor1() {
+        try {
+            new ConcurrentWeakHashMap(-1,0,1);
+            shouldThrow();
+        } catch(IllegalArgumentException e){}
+    }
+
+    /**
+     * Cannot create with negative concurrency level
+     */
+    public void testConstructor2() {
+        try {
+            new ConcurrentWeakHashMap(1,0,-1);
+            shouldThrow();
+        } catch(IllegalArgumentException e){}
+    }
+
+    /**
+     * Cannot create with only negative capacity
+     */
+    public void testConstructor3() {
+        try {
+            new ConcurrentWeakHashMap(-1);
+            shouldThrow();
+        } catch(IllegalArgumentException e){}
+    }
+
+    /**
+     * get(null) throws NPE
+     */
+    public void testGet_NullPointerException() {
+        try {
+            ConcurrentWeakHashMap c = new ConcurrentWeakHashMap(5);
+            c.get(null);
+            shouldThrow();
+        } catch(NullPointerException e){}
+    }
+
+    /**
+     * containsKey(null) throws NPE
+     */
+    public void testContainsKey_NullPointerException() {
+        try {
+            ConcurrentWeakHashMap c = new ConcurrentWeakHashMap(5);
+            c.containsKey(null);
+            shouldThrow();
+        } catch(NullPointerException e){}
+    }
+
+    /**
+     * containsValue(null) throws NPE
+     */
+    public void testContainsValue_NullPointerException() {
+        try {
+            ConcurrentWeakHashMap c = new ConcurrentWeakHashMap(5);
+            c.containsValue(null);
+            shouldThrow();
+        } catch(NullPointerException e){}
+    }
+
+    /**
+     * contains(null) throws NPE
+     */
+    public void testContains_NullPointerException() {
+        try {
+            ConcurrentWeakHashMap c = new ConcurrentWeakHashMap(5);
+            c.contains(null);
+            shouldThrow();
+        } catch(NullPointerException e){}
+    }
+
+    /**
+     * put(null,x) throws NPE
+     */
+    public void testPut1_NullPointerException() {
+        try {
+            ConcurrentWeakHashMap c = new ConcurrentWeakHashMap(5);
+            c.put(null, "whatever");
+            shouldThrow();
+        } catch(NullPointerException e){}
+    }
+
+    /**
+     * put(x, null) throws NPE
+     */
+    public void testPut2_NullPointerException() {
+        try {
+            ConcurrentWeakHashMap c = new ConcurrentWeakHashMap(5);
+            c.put("whatever", null);
+            shouldThrow();
+        } catch(NullPointerException e){}
+    }
+
+    /**
+     * putIfAbsent(null, x) throws NPE
+     */
+    public void testPutIfAbsent1_NullPointerException() {
+        try {
+            ConcurrentWeakHashMap c = new ConcurrentWeakHashMap(5);
+            c.putIfAbsent(null, "whatever");
+            shouldThrow();
+        } catch(NullPointerException e){}
+    }
+
+    /**
+     * replace(null, x) throws NPE
+     */
+    public void testReplace_NullPointerException() {
+        try {
+            ConcurrentWeakHashMap c = new ConcurrentWeakHashMap(5);
+            c.replace(null, "whatever");
+            shouldThrow();
+        } catch(NullPointerException e){}
+    }
+
+    /**
+     * replace(null, x, y) throws NPE
+     */
+    public void testReplaceValue_NullPointerException() {
+        try {
+            ConcurrentWeakHashMap c = new ConcurrentWeakHashMap(5);
+            c.replace(null, one, "whatever");
+            shouldThrow();
+        } catch(NullPointerException e){}
+    }
+
+    /**
+     * putIfAbsent(x, null) throws NPE
+     */
+    public void testPutIfAbsent2_NullPointerException() {
+        try {
+            ConcurrentWeakHashMap c = new ConcurrentWeakHashMap(5);
+            c.putIfAbsent("whatever", null);
+            shouldThrow();
+        } catch(NullPointerException e){}
+    }
+
+
+    /**
+     * replace(x, null) throws NPE
+     */
+    public void testReplace2_NullPointerException() {
+        try {
+            ConcurrentWeakHashMap c = new ConcurrentWeakHashMap(5);
+            c.replace("whatever", null);
+            shouldThrow();
+        } catch(NullPointerException e){}
+    }
+
+    /**
+     * replace(x, null, y) throws NPE
+     */
+    public void testReplaceValue2_NullPointerException() {
+        try {
+            ConcurrentWeakHashMap c = new ConcurrentWeakHashMap(5);
+            c.replace("whatever", null, "A");
+            shouldThrow();
+        } catch(NullPointerException e){}
+    }
+
+    /**
+     * replace(x, y, null) throws NPE
+     */
+    public void testReplaceValue3_NullPointerException() {
+        try {
+            ConcurrentWeakHashMap c = new ConcurrentWeakHashMap(5);
+            c.replace("whatever", one, null);
+            shouldThrow();
+        } catch(NullPointerException e){}
+    }
+
+
+    /**
+     * remove(null) throws NPE
+     */
+    public void testRemove1_NullPointerException() {
+        try {
+            ConcurrentWeakHashMap c = new ConcurrentWeakHashMap(5);
+            c.put("sadsdf", "asdads");
+            c.remove(null);
+            shouldThrow();
+        } catch(NullPointerException e){}
+    }
+
+    /**
+     * remove(null, x) throws NPE
+     */
+    public void testRemove2_NullPointerException() {
+        try {
+            ConcurrentWeakHashMap c = new ConcurrentWeakHashMap(5);
+            c.put("sadsdf", "asdads");
+            c.remove(null, "whatever");
+            shouldThrow();
+        } catch(NullPointerException e){}
+    }
+
+    /**
+     * remove(x, null) returns false
+     */
+    public void testRemove3() {
+        try {
+            ConcurrentWeakHashMap c = new ConcurrentWeakHashMap(5);
+            c.put("sadsdf", "asdads");
+            assertFalse(c.remove("sadsdf", null));
+        } catch(NullPointerException e){
+            fail();
+        }
+    }
+
+    /**
+     * A deserialized map equals original
+     */
+    public void testSerialization() {
+        ConcurrentWeakHashMap q = map5();
+
+        try {
+            ByteArrayOutputStream bout = new ByteArrayOutputStream(10000);
+            ObjectOutputStream out = new ObjectOutputStream(new BufferedOutputStream(bout));
+            out.writeObject(q);
+            out.close();
+
+            ByteArrayInputStream bin = new ByteArrayInputStream(bout.toByteArray());
+            ObjectInputStream in = new ObjectInputStream(new BufferedInputStream(bin));
+            ConcurrentWeakHashMap r = (ConcurrentWeakHashMap)in.readObject();
+            assertEquals(q.size(), r.size());
+            assertTrue(q.equals(r));
+            assertTrue(r.equals(q));
+        } catch(Exception e){
+            e.printStackTrace();
+            unexpectedException();
+        }
+    }
+
+
+    /**
+     * SetValue of an EntrySet entry sets value in the map.
+     */
+    public void testSetValueWriteThrough() {
+        // Adapted from a bug report by Eric Zoerner 
+        ConcurrentWeakHashMap map = new ConcurrentWeakHashMap(2, 5.0f, 1);
+        assertTrue(map.isEmpty());
+        for (int i = 0; i < 20; i++)
+            map.put(new Integer(i), new Integer(i));
+        assertFalse(map.isEmpty());
+        Map.Entry entry1 = (Map.Entry)map.entrySet().iterator().next();
+        
+        // assert that entry1 is not 16
+        assertTrue("entry is 16, test not valid",
+                   !entry1.getKey().equals(new Integer(16)));
+        
+        // remove 16 (a different key) from map 
+        // which just happens to cause entry1 to be cloned in map
+        map.remove(new Integer(16));
+        entry1.setValue("XYZ");
+        assertTrue(map.containsValue("XYZ")); // fails
+    }
+    
+}


Property changes on: experimental/jsr166/src/jsr166y/ConcurrentWeakHashMapTest.java
___________________________________________________________________
Name: svn:executable
   + *

Added: experimental/jsr166/src/jsr166y/JSR166TestCase.java
===================================================================
--- experimental/jsr166/src/jsr166y/JSR166TestCase.java	                        (rev 0)
+++ experimental/jsr166/src/jsr166y/JSR166TestCase.java	2008-03-27 17:09:21 UTC (rev 5470)
@@ -0,0 +1,525 @@
+package jsr166y;
+
+/*
+ * Written by Doug Lea with assistance from members of JCP JSR-166
+ * Expert Group and released to the public domain, as explained at
+ * http://creativecommons.org/licenses/publicdomain
+ * Other contributors include Andrew Wright, Jeffrey Hayes,
+ * Pat Fisher, Mike Judd.
+ */
+
+import junit.framework.*;
+import java.util.*;
+import java.util.concurrent.*;
+import java.io.*;
+import java.security.*;
+
+/**
+ * Base class for JSR166 Junit TCK tests.  Defines some constants,
+ * utility methods and classes, as well as a simple framework for
+ * helping to make sure that assertions failing in generated threads
+ * cause the associated test that generated them to itself fail (which
+ * JUnit does not otherwise arrange).  The rules for creating such
+ * tests are:
+ *
+ * <ol>
+ *
+ * <li> All assertions in code running in generated threads must use
+ * the forms {@link #threadFail}, {@link #threadAssertTrue}, {@link
+ * #threadAssertEquals}, or {@link #threadAssertNull}, (not
+ * <tt>fail</tt>, <tt>assertTrue</tt>, etc.) It is OK (but not
+ * particularly recommended) for other code to use these forms too.
+ * Only the most typically used JUnit assertion methods are defined
+ * this way, but enough to live with.</li>
+ *
+ * <li> If you override {@link #setUp} or {@link #tearDown}, make sure
+ * to invoke <tt>super.setUp</tt> and <tt>super.tearDown</tt> within
+ * them. These methods are used to clear and check for thread
+ * assertion failures.</li>
+ *
+ * <li>All delays and timeouts must use one of the constants <tt>
+ * SHORT_DELAY_MS</tt>, <tt> SMALL_DELAY_MS</tt>, <tt> MEDIUM_DELAY_MS</tt>,
+ * <tt> LONG_DELAY_MS</tt>. The idea here is that a SHORT is always
+ * discriminable from zero time, and always allows enough time for the
+ * small amounts of computation (creating a thread, calling a few
+ * methods, etc) needed to reach a timeout point. Similarly, a SMALL
+ * is always discriminable as larger than SHORT and smaller than
+ * MEDIUM.  And so on. These constants are set to conservative values,
+ * but even so, if there is ever any doubt, they can all be increased
+ * in one spot to rerun tests on slower platforms.</li>
+ *
+ * <li> All threads generated must be joined inside each test case
+ * method (or <tt>fail</tt> to do so) before returning from the
+ * method. The <tt> joinPool</tt> method can be used to do this when
+ * using Executors.</li>
+ *
+ * </ol>
+ *
+ * <p> <b>Other notes</b>
+ * <ul>
+ *
+ * <li> Usually, there is one testcase method per JSR166 method
+ * covering "normal" operation, and then as many exception-testing
+ * methods as there are exceptions the method can throw. Sometimes
+ * there are multiple tests per JSR166 method when the different
+ * "normal" behaviors differ significantly. And sometimes testcases
+ * cover multiple methods when they cannot be tested in
+ * isolation.</li>
+ *
+ * <li> The documentation style for testcases is to provide as javadoc
+ * a simple sentence or two describing the property that the testcase
+ * method purports to test. The javadocs do not say anything about how
+ * the property is tested. To find out, read the code.</li>
+ *
+ * <li> These tests are "conformance tests", and do not attempt to
+ * test throughput, latency, scalability or other performance factors
+ * (see the separate "jtreg" tests for a set intended to check these
+ * for the most central aspects of functionality.) So, most tests use
+ * the smallest sensible numbers of threads, collection sizes, etc
+ * needed to check basic conformance.</li>
+ *
+ * <li>The test classes currently do not declare inclusion in
+ * any particular package to simplify things for people integrating
+ * them in TCK test suites.</li>
+ *
+ * <li> As a convenience, the <tt>main</tt> of this class (JSR166TestCase)
+ * runs all JSR166 unit tests.</li>
+ *
+ * </ul>
+ */
+public class JSR166TestCase extends TestCase {
+    /**
+     * Runs all JSR166 unit tests using junit.textui.TestRunner
+     */
+    public static void main (String[] args) {
+        int iters = 1;
+        if (args.length > 0)
+            iters = Integer.parseInt(args[0]);
+        Test s = suite();
+        for (int i = 0; i < iters; ++i) {
+            junit.textui.TestRunner.run (s);
+            System.gc();
+            System.runFinalization();
+        }
+        System.exit(0);
+    }
+
+    /**
+     * Collects all JSR166 unit tests as one suite
+     */
+    public static Test suite ( ) {
+        TestSuite suite = new TestSuite("JSR166 Unit Tests");
+
+        suite.addTest(new TestSuite(ConcurrentWeakHashMapTest.class));
+
+        return suite;
+    }
+
+
+    public static long SHORT_DELAY_MS;
+    public static long SMALL_DELAY_MS;
+    public static long MEDIUM_DELAY_MS;
+    public static long LONG_DELAY_MS;
+
+
+    /**
+     * Returns the shortest timed delay. This could
+     * be reimplemented to use for example a Property.
+     */
+    protected long getShortDelay() {
+        return 50;
+    }
+
+
+    /**
+     * Sets delays as multiples of SHORT_DELAY.
+     */
+    protected  void setDelays() {
+        SHORT_DELAY_MS = getShortDelay();
+        SMALL_DELAY_MS = SHORT_DELAY_MS * 5;
+        MEDIUM_DELAY_MS = SHORT_DELAY_MS * 10;
+        LONG_DELAY_MS = SHORT_DELAY_MS * 50;
+    }
+
+    /**
+     * Flag set true if any threadAssert methods fail
+     */
+    volatile boolean threadFailed;
+
+    /**
+     * Initializes test to indicate that no thread assertions have failed
+     */
+    public void setUp() {
+        setDelays();
+        threadFailed = false;
+    }
+
+    /**
+     * Triggers test case failure if any thread assertions have failed
+     */
+    public void tearDown() {
+        assertFalse(threadFailed);
+    }
+
+    /**
+     * Fail, also setting status to indicate current testcase should fail
+     */
+    public void threadFail(String reason) {
+        threadFailed = true;
+        fail(reason);
+    }
+
+    /**
+     * If expression not true, set status to indicate current testcase
+     * should fail
+     */
+    public void threadAssertTrue(boolean b) {
+        if (!b) {
+            threadFailed = true;
+            assertTrue(b);
+        }
+    }
+
+    /**
+     * If expression not false, set status to indicate current testcase
+     * should fail
+     */
+    public void threadAssertFalse(boolean b) {
+        if (b) {
+            threadFailed = true;
+            assertFalse(b);
+        }
+    }
+
+    /**
+     * If argument not null, set status to indicate current testcase
+     * should fail
+     */
+    public void threadAssertNull(Object x) {
+        if (x != null) {
+            threadFailed = true;
+            assertNull(x);
+        }
+    }
+
+    /**
+     * If arguments not equal, set status to indicate current testcase
+     * should fail
+     */
+    public void threadAssertEquals(long x, long y) {
+        if (x != y) {
+            threadFailed = true;
+            assertEquals(x, y);
+        }
+    }
+
+    /**
+     * If arguments not equal, set status to indicate current testcase
+     * should fail
+     */
+    public void threadAssertEquals(Object x, Object y) {
+        if (x != y && (x == null || !x.equals(y))) {
+            threadFailed = true;
+            assertEquals(x, y);
+        }
+    }
+
+    /**
+     * threadFail with message "should throw exception"
+     */
+    public void threadShouldThrow() {
+        threadFailed = true;
+        fail("should throw exception");
+    }
+
+    /**
+     * threadFail with message "Unexpected exception"
+     */
+    public void threadUnexpectedException() {
+        threadFailed = true;
+        fail("Unexpected exception");
+    }
+
+
+    /**
+     * Wait out termination of a thread pool or fail doing so
+     */
+    public void joinPool(ExecutorService exec) {
+        try {
+            exec.shutdown();
+            assertTrue(exec.awaitTermination(LONG_DELAY_MS, TimeUnit.MILLISECONDS));
+        } catch(SecurityException ok) {
+            // Allowed in case test doesn't have privs
+        } catch(InterruptedException ie) {
+            fail("Unexpected exception");
+        }
+    }
+
+
+    /**
+     * fail with message "should throw exception"
+     */
+    public void shouldThrow() {
+        fail("Should throw exception");
+    }
+
+    /**
+     * fail with message "Unexpected exception"
+     */
+    public void unexpectedException() {
+        fail("Unexpected exception");
+    }
+
+
+    /**
+     * The number of elements to place in collections, arrays, etc.
+     */
+    static final int SIZE = 20;
+
+    // Some convenient Integer constants
+
+    static final Integer zero = new Integer(0);
+    static final Integer one = new Integer(1);
+    static final Integer two = new Integer(2);
+    static final Integer three  = new Integer(3);
+    static final Integer four  = new Integer(4);
+    static final Integer five  = new Integer(5);
+    static final Integer six = new Integer(6);
+    static final Integer seven = new Integer(7);
+    static final Integer eight = new Integer(8);
+    static final Integer nine = new Integer(9);
+    static final Integer m1  = new Integer(-1);
+    static final Integer m2  = new Integer(-2);
+    static final Integer m3  = new Integer(-3);
+    static final Integer m4 = new Integer(-4);
+    static final Integer m5 = new Integer(-5);
+    static final Integer m6 = new Integer(-6);
+    static final Integer m10 = new Integer(-10);
+
+
+    /**
+     * A security policy where new permissions can be dynamically added
+     * or all cleared.
+     */
+    static class AdjustablePolicy extends java.security.Policy {
+        Permissions perms = new Permissions();
+        AdjustablePolicy() { }
+        void addPermission(Permission perm) { perms.add(perm); }
+        void clearPermissions() { perms = new Permissions(); }
+	public PermissionCollection getPermissions(CodeSource cs) {
+	    return perms;
+	}
+	public PermissionCollection getPermissions(ProtectionDomain pd) {
+	    return perms;
+	}
+	public boolean implies(ProtectionDomain pd, Permission p) {
+	    return perms.implies(p);
+	}
+	public void refresh() {}
+    }
+
+
+    // Some convenient Runnable classes
+
+    static class NoOpRunnable implements Runnable {
+        public void run() {}
+    }
+
+    static class NoOpCallable implements Callable {
+        public Object call() { return Boolean.TRUE; }
+    }
+
+    static final String TEST_STRING = "a test string";
+
+    static class StringTask implements Callable<String> {
+        public String call() { return TEST_STRING; }
+    }
+
+    static class NPETask implements Callable<String> {
+        public String call() { throw new NullPointerException(); }
+    }
+
+    static class CallableOne implements Callable<Integer> {
+        public Integer call() { return one; }
+    }
+
+    class ShortRunnable implements Runnable {
+        public void run() {
+            try {
+                Thread.sleep(SHORT_DELAY_MS);
+            }
+            catch(Exception e) {
+                threadUnexpectedException();
+            }
+        }
+    }
+
+    class ShortInterruptedRunnable implements Runnable {
+        public void run() {
+            try {
+                Thread.sleep(SHORT_DELAY_MS);
+                threadShouldThrow();
+            }
+            catch(InterruptedException success) {
+            }
+        }
+    }
+
+    class SmallRunnable implements Runnable {
+        public void run() {
+            try {
+                Thread.sleep(SMALL_DELAY_MS);
+            }
+            catch(Exception e) {
+                threadUnexpectedException();
+            }
+        }
+    }
+
+    class SmallPossiblyInterruptedRunnable implements Runnable {
+        public void run() {
+            try {
+                Thread.sleep(SMALL_DELAY_MS);
+            }
+            catch(Exception e) {
+            }
+        }
+    }
+
+    class SmallCallable implements Callable {
+        public Object call() {
+            try {
+                Thread.sleep(SMALL_DELAY_MS);
+            }
+            catch(Exception e) {
+                threadUnexpectedException();
+            }
+            return Boolean.TRUE;
+        }
+    }
+
+    class SmallInterruptedRunnable implements Runnable {
+        public void run() {
+            try {
+                Thread.sleep(SMALL_DELAY_MS);
+                threadShouldThrow();
+            }
+            catch(InterruptedException success) {
+            }
+        }
+    }
+
+
+    class MediumRunnable implements Runnable {
+        public void run() {
+            try {
+                Thread.sleep(MEDIUM_DELAY_MS);
+            }
+            catch(Exception e) {
+                threadUnexpectedException();
+            }
+        }
+    }
+
+    class MediumInterruptedRunnable implements Runnable {
+        public void run() {
+            try {
+                Thread.sleep(MEDIUM_DELAY_MS);
+                threadShouldThrow();
+            }
+            catch(InterruptedException success) {
+            }
+        }
+    }
+
+    class MediumPossiblyInterruptedRunnable implements Runnable {
+        public void run() {
+            try {
+                Thread.sleep(MEDIUM_DELAY_MS);
+            }
+            catch(InterruptedException success) {
+            }
+        }
+    }
+
+    class LongPossiblyInterruptedRunnable implements Runnable {
+        public void run() {
+            try {
+                Thread.sleep(LONG_DELAY_MS);
+            }
+            catch(InterruptedException success) {
+            }
+        }
+    }
+
+    /**
+     * For use as ThreadFactory in constructors
+     */
+    static class SimpleThreadFactory implements ThreadFactory{
+        public Thread newThread(Runnable r){
+            return new Thread(r);
+        }
+    }
+
+    static class TrackedShortRunnable implements Runnable {
+        volatile boolean done = false;
+        public void run() {
+            try {
+                Thread.sleep(SMALL_DELAY_MS);
+                done = true;
+            } catch(Exception e){
+            }
+        }
+    }
+
+    static class TrackedMediumRunnable implements Runnable {
+        volatile boolean done = false;
+        public void run() {
+            try {
+                Thread.sleep(MEDIUM_DELAY_MS);
+                done = true;
+            } catch(Exception e){
+            }
+        }
+    }
+
+    static class TrackedLongRunnable implements Runnable {
+        volatile boolean done = false;
+        public void run() {
+            try {
+                Thread.sleep(LONG_DELAY_MS);
+                done = true;
+            } catch(Exception e){
+            }
+        }
+    }
+
+    static class TrackedNoOpRunnable implements Runnable {
+        volatile boolean done = false;
+        public void run() {
+            done = true;
+        }
+    }
+
+    static class TrackedCallable implements Callable {
+        volatile boolean done = false;
+        public Object call() {
+            try {
+                Thread.sleep(SMALL_DELAY_MS);
+                done = true;
+            } catch(Exception e){
+            }
+            return Boolean.TRUE;
+        }
+    }
+
+
+    /**
+     * For use as RejectedExecutionHandler in constructors
+     */
+    static class NoOpREHandler implements RejectedExecutionHandler{
+        public void rejectedExecution(Runnable r, ThreadPoolExecutor executor){}
+    }
+
+
+}

Added: experimental/jsr166/src/jsr166y/LoopHelpers.java
===================================================================
--- experimental/jsr166/src/jsr166y/LoopHelpers.java	                        (rev 0)
+++ experimental/jsr166/src/jsr166y/LoopHelpers.java	2008-03-27 17:09:21 UTC (rev 5470)
@@ -0,0 +1,104 @@
+package jsr166y;
+
+/*
+ * Written by Doug Lea with assistance from members of JCP JSR-166
+ * Expert Group and released to the public domain, as explained at
+ * http://creativecommons.org/licenses/publicdomain
+ */
+
+/**
+ * Misc utilities in JSR166 performance tests
+ */
+
+import java.util.concurrent.*;
+import java.util.concurrent.atomic.*;
+
+class LoopHelpers {
+
+    // Some mindless computation to do between synchronizations...
+
+    /**
+     * generates 32 bit pseudo-random numbers.
+     * Adapted from http://www.snippets.org
+     */
+    public static int compute1(int x) {
+        int lo = 16807 * (x & 0xFFFF);
+        int hi = 16807 * (x >>> 16);
+        lo += (hi & 0x7FFF) << 16;
+        if ((lo & 0x80000000) != 0) {
+            lo &= 0x7fffffff;
+            ++lo;
+        }
+        lo += hi >>> 15;
+        if (lo == 0 || (lo & 0x80000000) != 0) {
+            lo &= 0x7fffffff;
+            ++lo;
+        }
+        return lo;
+    }
+
+    /**
+     *  Computes a linear congruential random number a random number
+     *  of times.
+     */
+    public static int compute2(int x) {
+        int loops = (x >>> 4) & 7;
+        while (loops-- > 0) {
+            x = (x * 2147483647) % 16807;
+        }
+        return x;
+    }
+
+    /**
+     * An actually useful random number generator, but unsynchronized.
+     * Basically same as java.util.Random.
+     */
+    public static class SimpleRandom {
+        private final static long multiplier = 0x5DEECE66DL;
+        private final static long addend = 0xBL;
+        private final static long mask = (1L << 48) - 1;
+        static final AtomicLong seq = new AtomicLong(1);
+        private long seed = System.nanoTime() + seq.getAndIncrement();
+
+        public void setSeed(long s) {
+            seed = s;
+        }
+
+        public int next() {
+            long nextseed = (seed * multiplier + addend) & mask;
+            seed = nextseed;
+            return ((int)(nextseed >>> 17)) & 0x7FFFFFFF;
+        }
+    }
+
+    public static class BarrierTimer implements Runnable {
+        public volatile long startTime;
+        public volatile long endTime;
+        public void run() {
+            long t = System.nanoTime();
+            if (startTime == 0)
+                startTime = t;
+            else
+                endTime = t;
+        }
+        public void clear() {
+            startTime = 0;
+            endTime = 0;
+        }
+        public long getTime() {
+            return endTime - startTime;
+        }
+    }
+
+    public static String rightJustify(long n) {
+        // There's probably a better way to do this...
+        String field = "         ";
+        String num = Long.toString(n);
+        if (num.length() >= field.length())
+            return num;
+        StringBuffer b = new StringBuffer(field);
+        b.replace(b.length()-num.length(), b.length(), num);
+        return b.toString();
+    }
+
+}

Added: experimental/jsr166/src/jsr166y/MapCheck.java
===================================================================
--- experimental/jsr166/src/jsr166y/MapCheck.java	                        (rev 0)
+++ experimental/jsr166/src/jsr166y/MapCheck.java	2008-03-27 17:09:21 UTC (rev 5470)
@@ -0,0 +1,607 @@
+package jsr166y;
+
+/*
+ * Written by Doug Lea with assistance from members of JCP JSR-166
+ * Expert Group and released to the public domain, as explained at
+ * http://creativecommons.org/licenses/publicdomain
+ */
+
+/*
+ * @test %I% %E%
+ * @bug 4486658
+ * @compile -source 1.5 MapCheck.java
+ * @run main/timeout=240 MapCheck
+ * @summary Times and checks basic map operations
+ */
+
+import java.io.BufferedInputStream;
+import java.io.BufferedOutputStream;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.ObjectInputStream;
+import java.io.ObjectOutputStream;
+import java.io.Serializable;
+import java.util.Enumeration;
+import java.util.Hashtable;
+import java.util.IdentityHashMap;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Random;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+
+public class MapCheck {
+
+    static final int absentSize = 1 << 17;
+    static final int absentMask = absentSize - 1;
+    static Object[] absent = new Object[absentSize];
+
+    static final Object MISSING = new Object();
+
+    static TestTimer timer = new TestTimer();
+
+    static void reallyAssert(boolean b) {
+        if (!b) throw new Error("Failed Assertion");
+    }
+
+    public static void main(String[] args) throws Exception {
+        Class mapClass = ConcurrentReferenceHashMap.class;
+        int numTests = 100;
+        int size = 50000;
+
+        if (args.length > 0) {
+            try {
+                mapClass = Class.forName(args[0]);
+            } catch(ClassNotFoundException e) {
+                throw new RuntimeException("Class " + args[0] + " not found.");
+            }
+        }
+
+
+        if (args.length > 1)
+            numTests = Integer.parseInt(args[1]);
+
+        if (args.length > 2)
+            size = Integer.parseInt(args[2]);
+
+        boolean doSerializeTest = args.length > 3;
+
+        System.out.println("Testing " + mapClass.getName() + " trials: " + numTests + " size: " + size);
+
+        for (int i = 0; i < absentSize; ++i) absent[i] = new Object();
+
+        Object[] key = new Object[size];
+        for (int i = 0; i < size; ++i) key[i] = new Object();
+
+        forceMem(size * 8);
+
+        for (int rep = 0; rep < numTests; ++rep) {
+            runTest(newMap(mapClass), key);
+        }
+
+        TestTimer.printStats();
+
+
+        if (doSerializeTest)
+            stest(newMap(mapClass), size);
+    }
+
+    static Map newMap(Class cl) {
+        try {
+            Map m = (Map)cl.newInstance();
+            return m;
+        } catch(Exception e) {
+            throw new RuntimeException("Can't instantiate " + cl + ": " + e);
+        }
+    }
+
+
+    static void runTest(Map s, Object[] key) {
+        shuffle(key);
+        int size = key.length;
+        long startTime = System.currentTimeMillis();
+        test(s, key);
+        long time = System.currentTimeMillis() - startTime;
+    }
+
+    static void forceMem(int n) {
+        // force enough memory
+        Long[] junk = new Long[n];
+        for (int i = 0; i < junk.length; ++i) junk[i] = new Long(i);
+        int sum = 0;
+        for (int i = 0; i < junk.length; ++i)
+            sum += (int)(junk[i].longValue() + i);
+        if (sum == 0) System.out.println("Useless number = " + sum);
+        junk = null;
+        //        System.gc();
+    }
+
+
+    static void t1(String nm, int n, Map s, Object[] key, int expect) {
+        int sum = 0;
+        int iters = 4;
+        timer.start(nm, n * iters);
+        for (int j = 0; j < iters; ++j) {
+            for (int i = 0; i < n; i++) {
+                if (s.get(key[i]) != null) ++sum;
+            }
+        }
+        timer.finish();
+        reallyAssert (sum == expect * iters);
+    }
+
+    static void t2(String nm, int n, Map s, Object[] key, int expect) {
+        int sum = 0;
+        timer.start(nm, n);
+        for (int i = 0; i < n; i++) {
+            if (s.remove(key[i]) != null) ++sum;
+        }
+        timer.finish();
+        reallyAssert (sum == expect);
+    }
+
+    static void t3(String nm, int n, Map s, Object[] key, int expect) {
+        int sum = 0;
+        timer.start(nm, n);
+        for (int i = 0; i < n; i++) {
+            if (s.put(key[i], absent[i & absentMask]) == null) 
+            	++sum;
+        }
+        timer.finish();
+        reallyAssert (sum == expect);
+    }
+
+    static void t4(String nm, int n, Map s, Object[] key, int expect) {
+        int sum = 0;
+        timer.start(nm, n);
+        for (int i = 0; i < n; i++) {
+            if (s.containsKey(key[i])) ++sum;
+        }
+        timer.finish();
+        reallyAssert (sum == expect);
+    }
+
+    static void t5(String nm, int n, Map s, Object[] key, int expect) {
+        int sum = 0;
+        timer.start(nm, n/2);
+        for (int i = n-2; i >= 0; i-=2) {
+            if (s.remove(key[i]) != null) ++sum;
+        }
+        timer.finish();
+        reallyAssert (sum == expect);
+    }
+
+    static void t6(String nm, int n, Map s, Object[] k1, Object[] k2) {
+        int sum = 0;
+        timer.start(nm, n * 2);
+        for (int i = 0; i < n; i++) {
+            if (s.get(k1[i]) != null) ++sum;
+            if (s.get(k2[i & absentMask]) != null) ++sum;
+        }
+        timer.finish();
+        reallyAssert (sum == n);
+    }
+
+    static void t7(String nm, int n, Map s, Object[] k1, Object[] k2) {
+        int sum = 0;
+        timer.start(nm, n * 2);
+        for (int i = 0; i < n; i++) {
+            if (s.containsKey(k1[i])) ++sum;
+            if (s.containsKey(k2[i & absentMask])) ++sum;
+        }
+        timer.finish();
+        reallyAssert (sum == n);
+    }
+
+    static void t8(String nm, int n, Map s, Object[] key, int expect) {
+        int sum = 0;
+        timer.start(nm, n);
+        for (int i = 0; i < n; i++) {
+            if (s.get(key[i]) != null) ++sum;
+        }
+        timer.finish();
+        reallyAssert (sum == expect);
+    }
+
+
+    static void t9(Map s) {
+        int sum = 0;
+        int iters = 20;
+        timer.start("ContainsValue (/n)     ", iters * s.size());
+        int step = absentSize / iters;
+        for (int i = 0; i < absentSize; i += step)
+            if (s.containsValue(absent[i])) ++sum;
+        timer.finish();
+        reallyAssert (sum != 0);
+    }
+
+
+    static void ktest(Map s, int size, Object[] key) {
+        timer.start("ContainsKey            ", size);
+        Set ks = s.keySet();
+        int sum = 0;
+        for (int i = 0; i < size; i++) {
+            if (ks.contains(key[i])) ++sum;
+        }
+        timer.finish();
+        reallyAssert (sum == size);
+    }
+
+
+    static void ittest1(Map s, int size) {
+        int sum = 0;
+        timer.start("Iter Key               ", size);
+        for (Iterator it = s.keySet().iterator(); it.hasNext(); ) {
+            if(it.next() != MISSING)
+                ++sum;
+        }
+        timer.finish();
+        reallyAssert (sum == size);
+    }
+
+    static void ittest2(Map s, int size) {
+        int sum = 0;
+        timer.start("Iter Value             ", size);
+        for (Iterator it = s.values().iterator(); it.hasNext(); ) {
+            if(it.next() != MISSING)
+                ++sum;
+        }
+        timer.finish();
+        reallyAssert (sum == size);
+    }
+    static void ittest3(Map s, int size) {
+        int sum = 0;
+        timer.start("Iter Entry             ", size);
+        for (Iterator it = s.entrySet().iterator(); it.hasNext(); ) {
+            if(it.next() != MISSING)
+                ++sum;
+        }
+        timer.finish();
+        reallyAssert (sum == size);
+    }
+
+    static void ittest4(Map s, int size, int pos) {
+        IdentityHashMap seen = new IdentityHashMap(size);
+        reallyAssert (s.size() == size);
+        int sum = 0;
+        timer.start("Iter XEntry            ", size);
+        Iterator it = s.entrySet().iterator();
+        Object k = null;
+        Object v = null;
+        for (int i = 0; i < size-pos; ++i) {
+            Map.Entry x = (Map.Entry)(it.next());
+            k = x.getKey();
+            v = x.getValue();
+            seen.put(k, k);
+            if (x != MISSING)
+                ++sum;
+        }
+        reallyAssert (s.containsKey(k));
+        it.remove();
+        reallyAssert (!s.containsKey(k));
+        while (it.hasNext()) {
+            Map.Entry x = (Map.Entry)(it.next());
+            Object k2 = x.getKey();
+            seen.put(k2, k2);
+            if (x != MISSING)
+                ++sum;
+        }
+
+        reallyAssert (s.size() == size-1);
+        s.put(k, v);
+        reallyAssert (seen.size() == size);
+        timer.finish();
+        reallyAssert (sum == size);
+        reallyAssert (s.size() == size);
+    }
+
+
+    static void ittest(Map s, int size) {
+        ittest1(s, size);
+        ittest2(s, size);
+        ittest3(s, size);
+        //        for (int i = 0; i < size-1; ++i)
+        //            ittest4(s, size, i);
+    }
+
+    static void entest1(Hashtable ht, int size) {
+        int sum = 0;
+
+        timer.start("Iter Enumeration Key   ", size);
+        for (Enumeration en = ht.keys(); en.hasMoreElements(); ) {
+            if (en.nextElement() != MISSING)
+                ++sum;
+        }
+        timer.finish();
+        reallyAssert (sum == size);
+    }
+
+    static void entest2(Hashtable ht, int size) {
+        int sum = 0;
+        timer.start("Iter Enumeration Value ", size);
+        for (Enumeration en = ht.elements(); en.hasMoreElements(); ) {
+            if (en.nextElement() != MISSING)
+                ++sum;
+        }
+        timer.finish();
+        reallyAssert (sum == size);
+    }
+
+
+    static void entest3(Hashtable ht, int size) {
+        int sum = 0;
+
+        timer.start("Iterf Enumeration Key  ", size);
+        Enumeration en = ht.keys();
+        for (int i = 0; i < size; ++i) {
+            if (en.nextElement() != MISSING)
+                ++sum;
+        }
+        timer.finish();
+        reallyAssert (sum == size);
+    }
+
+    static void entest4(Hashtable ht, int size) {
+        int sum = 0;
+        timer.start("Iterf Enumeration Value", size);
+        Enumeration en = ht.elements();
+        for (int i = 0; i < size; ++i) {
+            if (en.nextElement() != MISSING)
+                ++sum;
+        }
+        timer.finish();
+        reallyAssert (sum == size);
+    }
+
+    static void entest(Map s, int size) {
+        if (s instanceof Hashtable) {
+            Hashtable ht = (Hashtable)s;
+            //            entest3(ht, size);
+            //            entest4(ht, size);
+            entest1(ht, size);
+            entest2(ht, size);
+            entest1(ht, size);
+            entest2(ht, size);
+            entest1(ht, size);
+            entest2(ht, size);
+        }
+    }
+
+    static void rtest(Map s, int size) {
+        timer.start("Remove (iterator)      ", size);
+        for (Iterator it = s.keySet().iterator(); it.hasNext(); ) {
+            it.next();
+            it.remove();
+        }
+        timer.finish();
+    }
+
+    static void rvtest(Map s, int size) {
+        timer.start("Remove (iterator)      ", size);
+        for (Iterator it = s.values().iterator(); it.hasNext(); ) {
+            it.next();
+            it.remove();
+        }
+        timer.finish();
+    }
+
+
+    static void dtest(Map s, int size, Object[] key) {
+        timer.start("Put (putAll)           ", size * 2);
+        Map s2 = null;
+        try {
+            s2 = (Map) (s.getClass().newInstance());
+            s2.putAll(s);
+        }
+        catch (Exception e) { e.printStackTrace(); return; }
+        timer.finish();
+
+        timer.start("Iter Equals            ", size * 2);
+        boolean eqt = s2.equals(s) && s.equals(s2);
+        reallyAssert (eqt);
+        timer.finish();
+
+        timer.start("Iter HashCode          ", size * 2);
+        int shc = s.hashCode();
+        int s2hc = s2.hashCode();
+        reallyAssert (shc == s2hc);
+        timer.finish();
+
+        timer.start("Put (present)          ", size);
+        s2.putAll(s);
+        timer.finish();
+
+        timer.start("Iter EntrySet contains ", size * 2);
+        Set es2 = s2.entrySet();
+        int sum = 0;
+        for (Iterator i1 = s.entrySet().iterator(); i1.hasNext(); ) {
+            Object entry = i1.next();
+            if (es2.contains(entry)) ++sum;
+        }
+        timer.finish();
+        reallyAssert (sum == size);
+
+        t6("Get                    ", size, s2, key, absent);
+
+        Object hold = s2.get(key[size-1]);
+        s2.put(key[size-1], absent[0]);
+        timer.start("Iter Equals            ", size * 2);
+        eqt = s2.equals(s) && s.equals(s2);
+        reallyAssert (!eqt);
+        timer.finish();
+
+        timer.start("Iter HashCode          ", size * 2);
+        int s1h = s.hashCode();
+        int s2h = s2.hashCode();
+        reallyAssert (s1h != s2h);
+        timer.finish();
+
+        s2.put(key[size-1], hold);
+        timer.start("Remove (iterator)      ", size * 2);
+        Iterator s2i = s2.entrySet().iterator();
+        Set es = s.entrySet();
+        while (s2i.hasNext())
+            es.remove(s2i.next());
+        timer.finish();
+
+        reallyAssert (s.isEmpty());
+
+        timer.start("Clear                  ", size);
+        s2.clear();
+        timer.finish();
+        reallyAssert (s2.isEmpty() && s.isEmpty());
+    }
+
+    static void stest(Map s, int size) throws Exception {
+        if (!(s instanceof Serializable))
+            return;
+        System.out.print("Serialize              : ");
+
+        for (int i = 0; i < size; i++) {
+            s.put(new Integer(i), Boolean.TRUE);
+        }
+
+        long startTime = System.currentTimeMillis();
+
+        FileOutputStream fs = new FileOutputStream("MapCheck.dat");
+        ObjectOutputStream out = new ObjectOutputStream(new BufferedOutputStream(fs));
+        out.writeObject(s);
+        out.close();
+
+        FileInputStream is = new FileInputStream("MapCheck.dat");
+        ObjectInputStream in = new ObjectInputStream(new BufferedInputStream(is));
+        Map m = (Map)in.readObject();
+
+        long endTime = System.currentTimeMillis();
+        long time = endTime - startTime;
+
+        System.out.print(time + "ms");
+
+        if (s instanceof IdentityHashMap) return;
+        reallyAssert (s.equals(m));
+    }
+
+
+    static void test(Map s, Object[] key) {
+        int size = key.length;
+
+        t3("Put (absent)           ", size, s, key, size);
+        t3("Put (present)          ", size, s, key, 0);
+        t7("ContainsKey            ", size, s, key, absent);
+        t4("ContainsKey            ", size, s, key, size);
+        ktest(s, size, key);
+        t4("ContainsKey            ", absentSize, s, absent, 0);
+        t6("Get                    ", size, s, key, absent);
+        t1("Get (present)          ", size, s, key, size);
+        t1("Get (absent)           ", absentSize, s, absent, 0);
+        t2("Remove (absent)        ", absentSize, s, absent, 0);
+        t5("Remove (present)       ", size, s, key, size / 2);
+        t3("Put (half present)     ", size, s, key, size / 2);
+
+        ittest(s, size);
+        entest(s, size);
+        t9(s);
+        rtest(s, size);
+
+        t4("ContainsKey            ", size, s, key, 0);
+        t2("Remove (absent)        ", size, s, key, 0);
+        t3("Put (presized)         ", size, s, key, size);
+        dtest(s, size, key);
+    }
+
+    static class TestTimer {
+        private String name;
+        private long numOps;
+        private long startTime;
+        private String cname;
+
+        static final java.util.TreeMap accum = new java.util.TreeMap();
+
+        static void printStats() {
+            for (Iterator it = accum.entrySet().iterator(); it.hasNext(); ) {
+                Map.Entry e = (Map.Entry)(it.next());
+                Stats stats = ((Stats)(e.getValue()));
+                int n = stats.number;
+                double t;
+                if (n > 0)
+                    t = stats.sum / n;
+                else
+                    t = stats.least;
+                long nano = Math.round(1000000.0 * t);
+                System.out.println(e.getKey() + ": " + nano);
+            }
+        }
+
+        void start(String name, long numOps) {
+            this.name = name;
+            this.cname = classify();
+            this.numOps = numOps;
+            startTime = System.currentTimeMillis();
+        }
+
+
+        String classify() {
+            if (name.startsWith("Get"))
+                return "Get                    ";
+            else if (name.startsWith("Put"))
+                return "Put                    ";
+            else if (name.startsWith("Remove"))
+                return "Remove                 ";
+            else if (name.startsWith("Iter"))
+                return "Iter                   ";
+            else
+                return null;
+        }
+
+        void finish() {
+            long endTime = System.currentTimeMillis();
+            long time = endTime - startTime;
+            double timePerOp = ((double)time)/numOps;
+
+            Object st = accum.get(name);
+            if (st == null)
+                accum.put(name, new Stats(timePerOp));
+            else {
+                Stats stats = (Stats) st;
+                stats.sum += timePerOp;
+                stats.number++;
+                if (timePerOp < stats.least) stats.least = timePerOp;
+            }
+
+            if (cname != null) {
+                st = accum.get(cname);
+                if (st == null)
+                    accum.put(cname, new Stats(timePerOp));
+                else {
+                    Stats stats = (Stats) st;
+                    stats.sum += timePerOp;
+                    stats.number++;
+                    if (timePerOp < stats.least) stats.least = timePerOp;
+                }
+            }
+
+        }
+
+    }
+
+    static class Stats {
+        double sum = 0;
+        double least;
+        int number = 0;
+        Stats(double t) { least = t; }
+    }
+
+    static Random rng = new Random();
+
+    static void shuffle(Object[] keys) {
+        int size = keys.length;
+        for (int i=size; i>1; i--) {
+            int r = rng.nextInt(i);
+            Object t = keys[i-1];
+            keys[i-1] = keys[r];
+            keys[r] = t;
+        }
+    }
+
+}
+

Added: experimental/jsr166/src/jsr166y/MapLoops.java
===================================================================
--- experimental/jsr166/src/jsr166y/MapLoops.java	                        (rev 0)
+++ experimental/jsr166/src/jsr166y/MapLoops.java	2008-03-27 17:09:21 UTC (rev 5470)
@@ -0,0 +1,211 @@
+package jsr166y;
+
+/*
+ * Written by Doug Lea with assistance from members of JCP JSR-166
+ * Expert Group and released to the public domain, as explained at
+ * http://creativecommons.org/licenses/publicdomain
+ */
+
+/*
+ * @test %I% %E%
+ * @bug 4486658
+ * @compile -source 1.5 MapLoops.java
+ * @run main/timeout=1600 MapLoops
+ * @summary Exercise multithreaded maps, by default ConcurrentHashMap.
+ * Multithreaded hash table test.  Each thread does a random walk
+ * though elements of "key" array. On each iteration, it checks if
+ * table includes key.  If absent, with probability pinsert it
+ * inserts it, and if present, with probability premove it removes
+ * it.  (pinsert and premove are expressed as percentages to simplify
+ * parsing from command line.)
+ */
+
+import java.util.*;
+import java.util.concurrent.*;
+
+public class MapLoops {
+    static int nkeys       = 10000;
+    static int pinsert     = 60;
+    static int premove     = 2;
+    static int maxThreads  = 100;
+    static int nops        = 100000;
+    static int removesPerMaxRandom;
+    static int insertsPerMaxRandom;
+
+    static final ExecutorService pool = Executors.newCachedThreadPool();
+
+    static final List<Throwable> throwables
+	= new CopyOnWriteArrayList<Throwable>();
+
+    public static void main(String[] args) throws Exception {
+
+        Class mapClass = null;
+        if (args.length > 0) {
+            try {
+                mapClass = Class.forName(args[0]);
+            } catch (ClassNotFoundException e) {
+                throw new RuntimeException("Class " + args[0] + " not found.");
+            }
+        }
+        else
+            mapClass = ConcurrentWeakHashMap.class;
+
+        if (args.length > 1)
+            maxThreads = Integer.parseInt(args[1]);
+
+        if (args.length > 2)
+            nkeys = Integer.parseInt(args[2]);
+
+        if (args.length > 3)
+            pinsert = Integer.parseInt(args[3]);
+
+        if (args.length > 4)
+            premove = Integer.parseInt(args[4]);
+
+        if (args.length > 5)
+            nops = Integer.parseInt(args[5]);
+
+        // normalize probabilities wrt random number generator
+        removesPerMaxRandom = (int)(((double)premove/100.0 * 0x7FFFFFFFL));
+        insertsPerMaxRandom = (int)(((double)pinsert/100.0 * 0x7FFFFFFFL));
+
+        System.out.print("Class: " + mapClass.getName());
+        System.out.print(" threads: " + maxThreads);
+        System.out.print(" size: " + nkeys);
+        System.out.print(" ins: " + pinsert);
+        System.out.print(" rem: " + premove);
+        System.out.print(" ops: " + nops);
+        System.out.println();
+
+        int k = 1;
+        int warmups = 2;
+        for (int i = 1; i <= maxThreads;) {
+            Thread.sleep(100);
+            test(i, nkeys, mapClass);
+            if (warmups > 0)
+                --warmups;
+            else if (i == k) {
+                k = i << 1;
+                i = i + (i >>> 1);
+            }
+            else if (i == 1 && k == 2) {
+                i = k;
+                warmups = 1;
+            }
+            else
+                i = k;
+        }
+        pool.shutdown();
+	if (! pool.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS))
+	    throw new Error();
+
+	if (! throwables.isEmpty())
+	    throw new Error
+		(throwables.size() + " thread(s) terminated abruptly.");
+    }
+
+    static Integer[] makeKeys(int n) {
+        LoopHelpers.SimpleRandom rng = new LoopHelpers.SimpleRandom();
+        Integer[] key = new Integer[n];
+        for (int i = 0; i < key.length; ++i)
+            key[i] = new Integer(rng.next());
+        return key;
+    }
+
+    static void shuffleKeys(Integer[] key) {
+        Random rng = new Random();
+        for (int i = key.length; i > 1; --i) {
+            int j = rng.nextInt(i);
+            Integer tmp = key[j];
+            key[j] = key[i-1];
+            key[i-1] = tmp;
+        }
+    }
+
+    static void test(int i, int nkeys, Class mapClass) throws Exception {
+        System.out.print("Threads: " + i + "\t:");
+        Map<Integer, Integer> map = (Map<Integer,Integer>)mapClass.newInstance();
+        Integer[] key = makeKeys(nkeys);
+        // Uncomment to start with a non-empty table
+        //        for (int j = 0; j < nkeys; j += 4) // start 1/4 occupied
+        //            map.put(key[j], key[j]);
+        LoopHelpers.BarrierTimer timer = new LoopHelpers.BarrierTimer();
+        CyclicBarrier barrier = new CyclicBarrier(i+1, timer);
+        for (int t = 0; t < i; ++t)
+            pool.execute(new Runner(map, key, barrier));
+        barrier.await();
+        barrier.await();
+        long time = timer.getTime();
+        long tpo = time / (i * (long)nops);
+        System.out.print(LoopHelpers.rightJustify(tpo) + " ns per op");
+        double secs = (double)(time) / 1000000000.0;
+        System.out.println("\t " + secs + "s run time");
+        map.clear();
+    }
+
+    static class Runner implements Runnable {
+        final Map<Integer,Integer> map;
+        final Integer[] key;
+        final LoopHelpers.SimpleRandom rng = new LoopHelpers.SimpleRandom();
+        final CyclicBarrier barrier;
+        int position;
+        int total;
+
+        Runner(Map<Integer,Integer> map, Integer[] key,  CyclicBarrier barrier) {
+            this.map = map;
+            this.key = key;
+            this.barrier = barrier;
+            position = key.length / 2;
+        }
+
+        int step() {
+            // random-walk around key positions,  bunching accesses
+            int r = rng.next();
+            position += (r & 7) - 3;
+            while (position >= key.length) position -= key.length;
+            while (position < 0) position += key.length;
+
+            Integer k = key[position];
+            Integer x = map.get(k);
+
+            if (x != null) {
+                if (x.intValue() != k.intValue())
+                    throw new Error("bad mapping: " + x + " to " + k);
+
+                if (r < removesPerMaxRandom) {
+                    if (map.remove(k) != null) {
+                        position = total % key.length; // move from position
+                        return 2;
+                    }
+                }
+            }
+            else if (r < insertsPerMaxRandom) {
+                ++position;
+                map.put(k, k);
+                return 2;
+            }
+
+            // Uncomment to add a little computation between accesses
+            //            total += LoopHelpers.compute1(k.intValue());
+            total += r;
+            return 1;
+        }
+
+        public void run() {
+            try {
+                barrier.await();
+                int ops = nops;
+                while (ops > 0)
+                    ops -= step();
+                barrier.await();
+            }
+            catch (Throwable throwable) {
+		synchronized(System.err) {
+		    System.err.println("--------------------------------");
+		    throwable.printStackTrace();
+		}
+		throwables.add(throwable);
+            }
+        }
+    }
+}

Added: experimental/jsr166/src/jsr166y/SynchronizedWeakHashMap.java
===================================================================
--- experimental/jsr166/src/jsr166y/SynchronizedWeakHashMap.java	                        (rev 0)
+++ experimental/jsr166/src/jsr166y/SynchronizedWeakHashMap.java	2008-03-27 17:09:21 UTC (rev 5470)
@@ -0,0 +1,73 @@
+package jsr166y;
+
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Map;
+import java.util.Set;
+import java.util.WeakHashMap;
+
+// for comparison
+public class SynchronizedWeakHashMap<K, V> implements Map<K, V> {
+	private Map<K, V> map;
+	
+	public SynchronizedWeakHashMap()
+	{
+		map = Collections.synchronizedMap(new WeakHashMap<K,V>());
+	}
+
+	public void clear() {
+		map.clear();
+	}
+
+	public boolean containsKey(Object key) {
+		return map.containsKey(key);
+	}
+
+	public boolean containsValue(Object value) {
+		return map.containsValue(value);
+	}
+
+	public Set<Entry<K, V>> entrySet() {
+		return map.entrySet();
+	}
+
+	public boolean equals(Object o) {
+		return map.equals(o);
+	}
+
+	public V get(Object key) {
+		return map.get(key);
+	}
+
+	public int hashCode() {
+		return map.hashCode();
+	}
+
+	public boolean isEmpty() {
+		return map.isEmpty();
+	}
+
+	public Set<K> keySet() {
+		return map.keySet();
+	}
+
+	public V put(K key, V value) {
+		return map.put(key, value);
+	}
+
+	public void putAll(Map<? extends K, ? extends V> t) {
+		map.putAll(t);
+	}
+
+	public V remove(Object key) {
+		return map.remove(key);
+	}
+
+	public int size() {
+		return map.size();
+	}
+
+	public Collection<V> values() {
+		return map.values();
+	}
+}




More information about the jbosscache-commits mailing list