[jboss-cvs] JBossCache/src/org/jboss/cache ...

Elias Ross genman at noderunner.net
Sun Nov 19 22:53:54 EST 2006


  User: genman  
  Date: 06/11/19 22:53:54

  Modified:    src/org/jboss/cache               AbstractNode.java
                        CacheSPI.java DataNode.java Fqn.java Node.java
                        NodeImpl.java NodeSPI.java OptimisticTreeNode.java
                        RegionManager.java TransactionEntry.java
                        TransactionTable.java TreeCache.java
                        TreeCacheProxyImpl.java TreeNode.java
  Log:
  JBCACHE-867, Move methods from the ProxyImpl to Node, refactor Node classes as well
  
  Revision  Changes    Path
  1.22      +1 -197    JBossCache/src/org/jboss/cache/AbstractNode.java
  
  (In the diff below, changes in quantity of whitespace are not shown.)
  
  Index: AbstractNode.java
  ===================================================================
  RCS file: /cvsroot/jboss/JBossCache/src/org/jboss/cache/AbstractNode.java,v
  retrieving revision 1.21
  retrieving revision 1.22
  diff -u -b -r1.21 -r1.22
  --- AbstractNode.java	15 Sep 2006 18:01:01 -0000	1.21
  +++ AbstractNode.java	20 Nov 2006 03:53:53 -0000	1.22
  @@ -3,207 +3,11 @@
    */
   package org.jboss.cache;
   
  -import org.apache.commons.logging.Log;
  -import org.apache.commons.logging.LogFactory;
  -
  -import java.util.Collection;
  -import java.util.Collections;
  -import java.util.HashMap;
  -import java.util.Iterator;
  -import java.util.Map;
  -
   /**
    * Base class for {@link NodeImpl}.
    *
    * @author manik
    */
  -public abstract class AbstractNode implements DataNode
  +public abstract class AbstractNode implements DataNode, NodeSPI
   {
  -
  -   private static Log log = LogFactory.getLog(AbstractNode.class);
  -
  -   /**
  -    * Default output indent for printing.
  -    */
  -   protected static final int INDENT = 4;
  -
  -   /**
  -    * Name of the node.
  -    */
  -   protected Fqn fqn;
  -
  -   /**
  -    * Map of children names to children.
  -    */
  -   protected Map children;
  -
  -   /**
  -    * Map of general data keys to values.
  -    */
  -   protected Map data;
  -
  -   /**
  -    * Returns the name of this node.
  -    */
  -   public Object getName()
  -   {
  -      return fqn.getLast();
  -   }
  -
  -   /**
  -    * Returns the name of this node.
  -    */
  -   public Fqn getFqn()
  -   {
  -      return fqn;
  -   }
  -
  -   public void setFqn(Fqn f)
  -   {
  -      fqn = f;
  -   }
  -
  -   public TreeNode getChild(Object child_name)
  -   {
  -      if (child_name == null) return null;
  -      return children == null ? null : (DataNode) children.get(child_name);
  -   }
  -
  -   /**
  -    * Returns null, children may override this method.
  -    */
  -   public TreeNode getParent()
  -   {
  -      return null;
  -   }
  -
  -   public boolean childExists(Object child_name)
  -   {
  -      if (child_name == null) return false;
  -      return children != null && children.containsKey(child_name);
  -   }
  -
  -   public Map getChildren()
  -   {
  -      return children == null ? Collections.emptyMap() : children;
  -   }
  -
  -   public void setChildren(Map children)
  -   {
  -      this.children = children;
  -   }
  -
  -   public boolean hasChildren()
  -   {
  -      return children != null && children.size() > 0;
  -   }
  -
  -   public void put(Map data)
  -   {
  -      put(data, false);
  -   }
  -
  -   public void removeChild(Object child_name)
  -   {
  -      if (children != null)
  -      {
  -         children.remove(child_name);
  -         if (log.isTraceEnabled())
  -         {
  -            log.trace("removed child " + child_name);
  -         }
  -      }
  -   }
  -
  -   public void removeAllChildren()
  -   {
  -      if (children != null) children.clear();
  -   }
  -
  -   public void print(StringBuffer sb, int indent)
  -   {
  -      printIndent(sb, indent);
  -      sb.append(Fqn.SEPARATOR).append(getName());
  -      if (children != null && children.size() > 0)
  -      {
  -         Collection values = children.values();
  -         for (Iterator it = values.iterator(); it.hasNext();)
  -         {
  -            sb.append("\n");
  -            ((DataNode) it.next()).print(sb, indent + INDENT);
  -         }
  -      }
  -   }
  -
  -   public void printIndent(StringBuffer sb, int indent)
  -   {
  -      if (sb != null)
  -      {
  -         for (int i = 0; i < indent; i++)
  -         {
  -            sb.append(" ");
  -         }
  -      }
  -   }
  -
  -   public void addChild(Object child_name, TreeNode n)
  -   {
  -      if (child_name != null)
  -      {
  -         children().put(child_name, n);
  -      }
  -   }
  -
  -   /**
  -    * Returns null or the Map in use with the data. This needs to be called with a lock if concurrency is a concern.
  -    */
  -   protected final Map data()
  -   {
  -      if (data == null)
  -      {
  -         data = new HashMap();
  -      }
  -      return data;
  -   }
  -
  -   /**
  -    * Override this if concurrent thread access may occur, in which case return a concurrently modifiable Map.
  -    */
  -   protected Map children()
  -   {
  -      if (children == null) children = new HashMap();
  -      return children;
  -   }
  -
  -   /**
  -    * Adds details of the node into a map as strings.
  -    */
  -   protected void printDetailsInMap(StringBuffer sb, int indent, Map map)
  -   {
  -      Map.Entry entry;
  -      printIndent(sb, indent);
  -      indent += 2;   // increse it
  -      sb.append(Fqn.SEPARATOR).append(getName());
  -      sb.append("\n");
  -      if (map != null)
  -      {
  -         for (Iterator it = map.entrySet().iterator(); it.hasNext();)
  -         {
  -            entry = (Map.Entry) it.next();
  -            sb.append(entry.getKey()).append(": ").append(entry.getValue()).append("\n");
  -         }
  -      }
  -      if (children != null && children.size() > 0)
  -      {
  -         Collection values = children.values();
  -         for (Iterator it = values.iterator(); it.hasNext();)
  -         {
  -            sb.append("\n");
  -            ((DataNode) it.next()).printDetails(sb, indent);
  -         }
  -      }
  -   }
  -
  -   public abstract DataNode clone() throws CloneNotSupportedException;
  -
   }
  
  
  
  1.21      +23 -3     JBossCache/src/org/jboss/cache/CacheSPI.java
  
  (In the diff below, changes in quantity of whitespace are not shown.)
  
  Index: CacheSPI.java
  ===================================================================
  RCS file: /cvsroot/jboss/JBossCache/src/org/jboss/cache/CacheSPI.java,v
  retrieving revision 1.20
  retrieving revision 1.21
  diff -u -b -r1.20 -r1.21
  --- CacheSPI.java	14 Nov 2006 14:17:11 -0000	1.20
  +++ CacheSPI.java	20 Nov 2006 03:53:54 -0000	1.21
  @@ -7,6 +7,7 @@
   package org.jboss.cache;
   
   import org.jboss.cache.buddyreplication.BuddyManager;
  +import org.jboss.cache.buddyreplication.GravitateResult;
   import org.jboss.cache.interceptors.Interceptor;
   import org.jboss.cache.loader.CacheLoader;
   import org.jboss.cache.loader.CacheLoaderManager;
  @@ -17,6 +18,8 @@
   import java.util.List;
   import java.util.Map;
   
  +import javax.transaction.Transaction;
  +
   /**
    * A more detailed interface to {@link Cache}, which is used when writing plugins for or extending JBoss Cache.
    *
  @@ -41,7 +44,6 @@
       */
      void addInterceptor(Interceptor i, int position);
   
  -
      /**
       * Removes the interceptor at a specified position, where the first interceptor in the chain
       * is at position 0 and the last one at getInterceptorChain().size() - 1.
  @@ -50,7 +52,6 @@
       */
      void removeInterceptor(int position);
   
  -
      /**
       * Retrieves the configured {@link CacheLoader}.  If more than one {@link CacheLoader} is configured, this method
       * returns an instance of {@link org.jboss.cache.loader.ChainingCacheLoader}.
  @@ -113,9 +114,28 @@
   
      RegionManager getRegionManager();
   
  -   /*
  +   /**
  +    * Returns the global transaction for this local transaction.
  +    * Optionally creates a new global transaction if it does not exist.
  +    * @param createIfNotExists if true creates a new transaction if none exists
  +    */
  +   GlobalTransaction getCurrentTransaction(Transaction tx, boolean createIfNotExists);
  +
  +   /**
       * Retrieves the notifier attached with this instance of the cache.  See {@link Notifier}, a class
       * that is responsible for emitting notifications to registered {@link CacheListener}s.
       */
      Notifier getNotifier();
  +   
  +   /**
  +    * Returns a node without accessing the interceptor chain.
  +    * @return a node if one exists or null
  +    */
  +   Node peek(Fqn fqn);
  +   
  +   /**
  +    * Used with buddy replication's data gravitation interceptor 
  +    * @return a List which should be a data structure
  +    */
  +   GravitateResult gravitateData(Fqn fqn, boolean searchSubtrees, boolean marshal);
   }
  
  
  
  1.19      +1 -25     JBossCache/src/org/jboss/cache/DataNode.java
  
  (In the diff below, changes in quantity of whitespace are not shown.)
  
  Index: DataNode.java
  ===================================================================
  RCS file: /cvsroot/jboss/JBossCache/src/org/jboss/cache/DataNode.java,v
  retrieving revision 1.18
  retrieving revision 1.19
  diff -u -b -r1.18 -r1.19
  --- DataNode.java	29 Aug 2006 11:54:10 -0000	1.18
  +++ DataNode.java	20 Nov 2006 03:53:54 -0000	1.19
  @@ -7,6 +7,7 @@
   package org.jboss.cache;
   
   import org.jboss.cache.lock.LockingException;
  +import org.jboss.cache.lock.NodeLock;
   import org.jboss.cache.lock.TimeoutException;
   
   /**
  @@ -16,29 +17,4 @@
    */
   public interface DataNode extends TreeNode
   {
  -
  -   public enum LockType
  -   {
  -      NONE, READ, WRITE}
  -
  -   /**
  -    * Initialized property for debugging "print_lock_details"
  -    */
  -   boolean PRINT_LOCK_DETAILS = Boolean.getBoolean("print_lock_details");
  -
  -   /**
  -    * Returns true if a lock is acquired.
  -    *
  -    * @param lock_acquisition_timeout milliseconds to wait
  -    * @param lockTypeWrite            lock type to use
  -    */
  -   boolean acquire(Object caller, long lock_acquisition_timeout, LockType lockType)
  -           throws InterruptedException, LockingException, TimeoutException;
  -
  -
  -   /**
  -    * Returns a copy of this node.
  -    */
  -   DataNode clone() throws CloneNotSupportedException;
  -
   }
  
  
  
  1.41      +3 -3      JBossCache/src/org/jboss/cache/Fqn.java
  
  (In the diff below, changes in quantity of whitespace are not shown.)
  
  Index: Fqn.java
  ===================================================================
  RCS file: /cvsroot/jboss/JBossCache/src/org/jboss/cache/Fqn.java,v
  retrieving revision 1.40
  retrieving revision 1.41
  diff -u -b -r1.40 -r1.41
  --- Fqn.java	12 Oct 2006 23:03:57 -0000	1.40
  +++ Fqn.java	20 Nov 2006 03:53:54 -0000	1.41
  @@ -41,7 +41,7 @@
    * <p/>
    * Another way to look at it is that the "/" separarator is only parsed when it form sa  part of a String passed in to Fqn.fromString() and not otherwise.
    *
  - * @version $Revision: 1.40 $
  + * @version $Revision: 1.41 $
    */
   public class Fqn implements Cloneable, Externalizable, Comparable
   {
  @@ -126,7 +126,7 @@
      public Fqn(List names)
      {
         if (names != null)
  -         elements = new ArrayList(names);
  +         elements = Arrays.asList(names.toArray());
         else
            elements = Collections.EMPTY_LIST;
      }
  
  
  
  1.50      +5 -0      JBossCache/src/org/jboss/cache/Node.java
  
  (In the diff below, changes in quantity of whitespace are not shown.)
  
  Index: Node.java
  ===================================================================
  RCS file: /cvsroot/jboss/JBossCache/src/org/jboss/cache/Node.java,v
  retrieving revision 1.49
  retrieving revision 1.50
  diff -u -b -r1.49 -r1.50
  --- Node.java	15 Nov 2006 06:49:25 -0000	1.49
  +++ Node.java	20 Nov 2006 03:53:54 -0000	1.50
  @@ -195,4 +195,9 @@
       * @return Returns true if the child node denoted by the {@link Fqn} passed in exists.
       */
      boolean hasChild(Fqn f);
  +   
  +   /**
  +    * Returns the service provider interface for this node.
  +    */
  +   NodeSPI getNodeSPI();
   }
  
  
  
  1.15      +412 -237  JBossCache/src/org/jboss/cache/NodeImpl.java
  
  (In the diff below, changes in quantity of whitespace are not shown.)
  
  Index: NodeImpl.java
  ===================================================================
  RCS file: /cvsroot/jboss/JBossCache/src/org/jboss/cache/NodeImpl.java,v
  retrieving revision 1.14
  retrieving revision 1.15
  diff -u -b -r1.14 -r1.15
  --- NodeImpl.java	15 Sep 2006 18:01:01 -0000	1.14
  +++ NodeImpl.java	20 Nov 2006 03:53:54 -0000	1.15
  @@ -6,12 +6,11 @@
    */
   package org.jboss.cache;
   
  -
  -import EDU.oswego.cs.dl.util.concurrent.ConcurrentReaderHashMap;
   import org.apache.commons.logging.Log;
   import org.apache.commons.logging.LogFactory;
   import org.jboss.cache.factories.NodeFactory;
   import org.jboss.cache.lock.IdentityLock;
  +import org.jboss.cache.lock.NodeLock;
   import org.jboss.cache.lock.LockingException;
   import org.jboss.cache.lock.TimeoutException;
   import org.jboss.cache.lock.UpgradeException;
  @@ -19,6 +18,9 @@
   import org.jboss.cache.marshall.MethodCallFactory;
   import org.jboss.cache.marshall.MethodDeclarations;
   
  +import java.io.ObjectStreamException;
  +import java.io.Serializable;
  +import java.util.AbstractSet;
   import java.util.Collection;
   import java.util.Collections;
   import java.util.HashMap;
  @@ -26,19 +28,27 @@
   import java.util.Iterator;
   import java.util.Map;
   import java.util.Set;
  +import java.util.concurrent.ConcurrentHashMap;
  +
  +import javax.naming.OperationNotSupportedException;
   
   /**
    * Basic data node class.
    */
  -public class NodeImpl extends AbstractNode
  +public class NodeImpl implements DataNode, Node, NodeSPI, Serializable
   {
   
  -   private static Log log = LogFactory.getLog(NodeImpl.class);
  +   private static final long serialVersionUID = 1L;
  +   
  +   /**
  +    * Default output indent for printing.
  +    */
  +   private static final int INDENT = 4;
   
      /**
  -    * Cached for performance.
  +    * Debug log.
       */
  -   private static boolean trace = log.isTraceEnabled();
  +   private static Log log = LogFactory.getLog(NodeImpl.class);
   
      /**
       * True if all children have been loaded. This is set when TreeCache.getChildrenNames() is called.
  @@ -49,24 +59,32 @@
       * Lock manager that manages locks to be acquired when accessing the node inside a transaction. Lazy set just in case
       * locking is not needed.
       */
  -   private IdentityLock lock_ = null;
  +   private transient IdentityLock lock_ = null;
   
      /**
       * A reference of the TreeCache instance.
       */
  -   private TreeCache cache;
  +   private transient CacheSPI cache;
   
      /**
  -    * Construct an empty node; used by serialization.
  +    * Name of the node.
       */
  -   public NodeImpl()
  -   {
  -   }
  +   private Fqn fqn;
  +
  +   /**
  +    * Map of children names to children.
  +    */
  +   private Map<Object, Node> children;
  +
  +   /**
  +    * Map of general data keys to values.
  +    */
  +   private Map data;
   
      /**
       * Constructs a new node with a name, etc.
       */
  -   public NodeImpl(Object child_name, Fqn fqn, NodeImpl parent, Map data, TreeCache cache)
  +   public NodeImpl(Object child_name, Fqn fqn, Map data, CacheSPI cache)
      {
         init(child_name, fqn, cache);
         if (data != null)
  @@ -82,36 +100,28 @@
       *                {@link #data} field; <code>false</code> if param <code>data</code>'s contents should be copied into
       *                this object's {@link #data} field.
       */
  -   public NodeImpl(Object child_name, Fqn fqn, NodeImpl parent, Map data, boolean mapSafe,
  -                   TreeCache cache)
  +   public NodeImpl(Object child_name, Fqn fqn, Map data, boolean mapSafe, CacheSPI cache)
      {
         init(child_name, fqn, cache);
         if (data != null)
         {
            if (mapSafe)
            {
  -            this.data = data;
  +            if (data.getClass().getName().startsWith("java.util.Collections"))
  +               throw new IllegalArgumentException("What's this crap? " + data);
  +            this.data = data; // ? is this safe
            }
            else
            {
  -            this.data().putAll(data);
  +            this.data = new HashMap(data);
            }
         }
      }
   
      /**
  -    * Constructs a new node with a single key and value.
  -    */
  -   public NodeImpl(Object child_name, Fqn fqn, NodeImpl parent, Object key, Object value, TreeCache cache)
  -   {
  -      init(child_name, fqn, cache);
  -      data().put(key, value);
  -   }
  -
  -   /**
       * Initializes with a name and FQN and cache.
       */
  -   protected final void init(Object child_name, Fqn fqn, TreeCache cache)
  +   protected final void init(Object child_name, Fqn fqn, CacheSPI cache)
      {
         if (cache == null)
         {
  @@ -128,7 +138,7 @@
      /**
       * Returns a parent by checking the TreeMap by name.
       */
  -   public TreeNode getParent()
  +   public Node getParent()
      {
         if (fqn.isRoot())
         {
  @@ -141,7 +151,7 @@
      {
         if (lock_ == null)
         {
  -         lock_ = new IdentityLock(cache.getConfiguration().getIsolationLevel(), fqn);
  +         lock_ = new IdentityLock(cache.getConfiguration().getIsolationLevel(), this);
         }
      }
   
  @@ -151,39 +161,33 @@
         {
            if (getFqn().isRoot())
            {
  -            children = new ConcurrentReaderHashMap(64);
  +            children = new ConcurrentHashMap<Object, Node>(64, .5f, 16);
            }
            else
            {
  -            children = new ConcurrentReaderHashMap(4);
  +            // Less segments to save memory
  +            children = new ConcurrentHashMap<Object, Node>(4, .75f, 4);
            }
         }
         return children;
      }
   
  -   private void setTreeCacheInstance(TreeCache cache)
  +   public void setCache(CacheSPI cache)
      {
         this.cache = cache;
         this.lock_ = null;
  -   }
  -
  -   /**
  -    * Set the tree cache instance recursively down to the children as well. Note that this method is not currently
  -    * thread safe.
  -    */
  -   public void setRecursiveTreeCacheInstance(TreeCache cache)
  -   {
  -
  -      setTreeCacheInstance(cache);
  -
         if (children != null)
         {
  -         for (Iterator it = children.keySet().iterator(); it.hasNext();)
  +         for (Node n : children.values())
            {
  -            DataNode nd = (DataNode) children.get(it.next());
  -            nd.setRecursiveTreeCacheInstance(cache);
  +            n.getNodeSPI().setCache(cache);
  +         }
            }
         }
  +   
  +   public CacheSPI getCache()
  +   {
  +      return cache;
      }
   
      public boolean getChildrenLoaded()
  @@ -196,20 +200,18 @@
         children_loaded = flag;
      }
   
  -   public Object get(Object key)
  -   {
  -      synchronized (this)
  +   public synchronized Object get(Object key)
         {
  -         return data == null ? null : data.get(key);
  -      }
  +      if (data == null)
  +         return null;
  +      return data.get(key);
      }
   
  -   public boolean containsKey(Object key)
  -   {
  -      synchronized (this)
  +   public synchronized boolean containsKey(Object key)
         {
  -         return data != null && data.containsKey(key);
  -      }
  +      if (data == null)
  +         return false;
  +      return data.containsKey(key);
      }
   
      /**
  @@ -217,11 +219,7 @@
       */
      public Set getDataKeys()
      {
  -      synchronized (this)
  -      {
  -         if (data == null) return Collections.emptySet();
  -         return data.keySet();
  -      }
  +      return getKeys();
      }
   
      boolean isReadLocked()
  @@ -242,7 +240,7 @@
      /**
       * @deprecated Use getLock() instead
       */
  -   public IdentityLock getImmutableLock()
  +   public NodeLock getImmutableLock()
      {
         initLock();
         return lock_;
  @@ -254,29 +252,21 @@
         return lock_;
      }
   
  -   public Map getData()
  -   {
  -      synchronized (this)
  +   public synchronized Map getData()
         {
            if (data == null)
            {
  -            return new HashMap(0);
  -         }
  -         return new HashMap(data);
  +         return Collections.EMPTY_MAP;
         }
  +      return Collections.unmodifiableMap(data);
      }
   
  -   public int numAttributes()
  -   {
  -      synchronized (this)
  +   public synchronized int numAttributes()
         {
            return data != null ? data.size() : 0;
         }
  -   }
   
  -   public void put(Map data, boolean erase)
  -   {
  -      synchronized (this)
  +   public synchronized void put(Map data, boolean erase)
         {
            if (erase)
            {
  @@ -285,20 +275,39 @@
                  this.data.clear();
               }
            }
  -         if (data == null) return;
  -         this.data().putAll(data);
  -      }
  +      if (data == null)
  +         return;
  +      data().putAll(data);
      }
   
      public Object put(Object key, Object value)
      {
  +      if (cache.getInvocationContext().getOptionOverrides().isBypassInterceptorChain())
  +      {
  +         Object result;
         synchronized (this)
         {
  -         return this.data().put(key, value);
  +            try {
  +               result = data().put(key, value);
  +            } catch (UnsupportedOperationException e) {
  +               throw new IllegalStateException("data is of type " + data.getClass());
  +            }
  +         }
  +         cache.getInvocationContext().getOptionOverrides().setBypassInterceptorChain(false);
  +         return result;
  +      }
  +      else
  +      {
  +         return cache.put(getFqn(), key, value);
  +      }
         }
  +
  +   public Node getOrCreateChild(Object child_name, GlobalTransaction gtx)
  +   {
  +      return getOrCreateChild(child_name, gtx, true);
      }
   
  -   public TreeNode getOrCreateChild(Object child_name, GlobalTransaction gtx, boolean createIfNotExists)
  +   private Node getOrCreateChild(Object child_name, GlobalTransaction gtx, boolean createIfNotExists)
      {
         DataNode child;
         if (child_name == null)
  @@ -329,7 +338,8 @@
                  children.put(child_name, child);
                  if (gtx != null)
                  {
  -                  MethodCall undo_op = MethodCallFactory.create(MethodDeclarations.removeNodeMethodLocal, gtx, child_fqn, false);
  +                  MethodCall undo_op = MethodCallFactory.create(MethodDeclarations.removeNodeMethodLocal, gtx,
  +                        child_fqn, false);
                     cache.addUndoOperation(gtx, undo_op);
                     // add the node name to the list maintained for the current tx
                     // (needed for abort/rollback of transaction)
  @@ -341,7 +351,7 @@
            // notify if we actually created a new child
            if (newChild == child)
            {
  -            if (trace)
  +            if (log.isTraceEnabled())
               {
                  log.trace("created child: fqn=" + child_fqn);
               }
  @@ -352,255 +362,420 @@
   
      }
   
  -   public TreeNode createChild(Object child_name, Fqn fqn, TreeNode parent)
  +   public Node createChild(Object child_name, Fqn fqn, Node parent)
      {
         return getOrCreateChild(child_name, null, true);
      }
   
  -   public TreeNode createChild(Object child_name, Fqn fqn, TreeNode parent, Object key, Object value)
  +   public Node createChild(Object child_name, Fqn fqn, Node parent, Object key, Object value)
      {
  -      TreeNode n = getOrCreateChild(child_name, null, true);
  +      Node n = getOrCreateChild(child_name, null, true);
         n.put(key, value);
         return n;
      }
   
  -   public Object remove(Object key)
  +   public synchronized Object remove(Object key)
  +   {
  +      return data != null ? data.remove(key) : null;
  +   }
  +
  +   public void printDetails(StringBuffer sb, int indent)
      {
  +      Map tmp;
         synchronized (this)
         {
  -         return data != null ? data.remove(key) : null;
  +         tmp = data != null ? new HashMap(data) : null;
         }
  +      printDetailsInMap(sb, indent, tmp);
      }
   
  -   public void clear()
  +   /**
  +    * Returns a debug string.
  +    */
  +   public String toString()
      {
  +      StringBuffer sb = new StringBuffer();
  +      sb.append("Node ").append(fqn);
         synchronized (this)
         {
            if (data != null)
            {
  -            data.clear();
  -            data = null;
  +            sb.append(" data=").append(data.keySet());
  +         }
  +      }
  +      if (children != null && !children.isEmpty())
  +      {
  +         sb.append(" child=").append(children.keySet());
            }
  +      if (lock_ != null)
  +      {
  +         if (isReadLocked())
  +            sb.append(" RL");
  +         if (isWriteLocked())
  +            sb.append(" WL");
         }
  +      return sb.toString();
      }
   
  -   public void printDetails(StringBuffer sb, int indent)
  +   public void release(Object caller)
      {
  -      Map tmp;
  -      synchronized (this)
  +      if (lock_ != null)
         {
  -         tmp = data != null ? new HashMap(data) : null;
  +         lock_.release(caller);
         }
  -      printDetailsInMap(sb, indent, tmp);
      }
   
  -   public void printLockInfo(StringBuffer sb, int indent)
  +   public void releaseForce()
  +   {
  +      if (lock_ != null)
      {
  -      boolean locked = lock_ != null && lock_.isLocked();
  +         lock_.releaseForce();
  +      }
  +   }
   
  -      printIndent(sb, indent);
  -      sb.append(Fqn.SEPARATOR).append(getName());
  -      if (locked)
  +   public void releaseAllForce()
         {
  -         sb.append("\t(");
  -         lock_.toString(sb);
  -         sb.append(")");
  +      for (Node n: children.values()) {
  +         ((NodeImpl)n).releaseAllForce();
  +      }
  +      releaseForce();
         }
   
  -      if (children != null && children.size() > 0)
  +   public Node addChild(Fqn f)
         {
  -         Collection values = children.values();
  -         for (Iterator it = values.iterator(); it.hasNext();)
  +      if (log.isTraceEnabled())
  +         log.trace("adding child " + f + " to " + getFqn());
  +      if (cache.getInvocationContext().getOptionOverrides().isBypassInterceptorChain())
            {
  -            sb.append("\n");
  -            ((DataNode) it.next()).printLockInfo(sb, indent + INDENT);
  +         Node newNode = this;
  +         GlobalTransaction gtx = cache.getInvocationContext().getGlobalTransaction();
  +
  +         if (f.size() == 1)
  +         {
  +            newNode = getOrCreateChild(f.getLast(), gtx);
            }
  +         else
  +         {
  +            // recursively create children
  +            Node currentParent = this;
  +            for (Object o : f.peekElements())
  +            {
  +               newNode = currentParent.getNodeSPI().getOrCreateChild(o, gtx);
  +               currentParent = newNode;
         }
      }
   
  -   /**
  -    * Returns a debug string.
  -    */
  -   public String toString()
  +         cache.getInvocationContext().getOptionOverrides().setBypassInterceptorChain(false);
  +         return newNode;
  +      }
  +      else
      {
  -      StringBuffer sb = new StringBuffer();
  -      sb.append("\nfqn=" + fqn);
  -      synchronized (this)
  +         Fqn nf = new Fqn(getFqn(), f);
  +         cache.put(nf, Collections.emptyMap());
  +         return getChild(f);
  +      }
  +   }
  +
  +   public synchronized void clearData()
         {
            if (data != null)
            {
  -            sb.append("\ndata=" + data);
  +         data.clear();
            }
         }
  -      if (lock_ != null)
  +
  +   public Node getChild(Fqn fqn)
         {
  -         sb.append("\n read locked=").append(isReadLocked());
  -         sb.append("\n write locked=").append(isWriteLocked());
  +      if (cache.getInvocationContext().getOptionOverrides().isBypassInterceptorChain())
  +      {
  +         Node child = getChild(fqn.getLast());
  +         cache.getInvocationContext().getOptionOverrides().setBypassInterceptorChain(false);
  +         return child;
  +      }
  +      else
  +      {
  +         return cache.getChild(new Fqn(getFqn(), fqn));
         }
  -      return sb.toString();
      }
   
  -   public DataNode clone() throws CloneNotSupportedException
  +   public Set<Object> getChildrenNames()
      {
  -      DataNode parent = (DataNode) getParent();
  -      DataNode n = (DataNode) NodeFactory.getInstance().createNode(getName(), fqn, parent != null ? (DataNode) parent.clone() : null, data, cache);
  -      n.setChildren(children == null ? null : (HashMap) ((HashMap) children).clone());
  -      return n;
  +      return new ChildrenNames();
      }
   
  -   public boolean acquire(Object caller, long timeout, DataNode.LockType lock_type) throws LockingException, TimeoutException, InterruptedException
  +   public synchronized Set getKeys()
      {
  -      // Note that we rely on IdentityLock for synchronization
  -      try
  +      if (data == null)
  +         return Collections.emptySet();
  +      return Collections.unmodifiableSet(data.keySet());
  +   }
  +
  +   public boolean hasChild(Fqn f)
         {
  -         if (lock_type == DataNode.LockType.NONE)
  +      return cache.hasChild(new Fqn(getFqn(), f));
  +   }
  +
  +   public void move(Node newParent) throws NodeNotExistsException
            {
  -            return true;
  +      // TODO
  +      // move must be added to Cache
  +      ((TreeCacheProxyImpl) cache).treeCache.move(newParent.getFqn(), getFqn());
            }
  -         else if (lock_type == DataNode.LockType.READ)
  +
  +   public void putIfNull(Object k, Object v)
            {
  -            return acquireReadLock(caller, timeout);
  +      if (cache.get(getFqn(), k) == null)
  +         put(k, v);
            }
  -         else
  +
  +   public void putIfNull(Map m)
            {
  -            return acquireWriteLock(caller, timeout);
  +      if (getData().isEmpty())
  +         put(m);
            }
  +
  +   public void removeChild(Fqn fqn)
  +   {
  +      cache.removeChild(new Fqn(getFqn(), fqn));
         }
  -      catch (UpgradeException e)
  +
  +   public Map<Object, Node> getChildrenMap()
         {
  -         StringBuffer buf = new StringBuffer("failure upgrading lock: fqn=").append(fqn).append(", caller=").append(caller).
  -                 append(", lock=").append(lock_.toString(true));
  -         if (trace)
  +      return children();
  +   }
  +
  +   public boolean acquire(Object caller, long lock_acquisition_timeout, NodeLock.LockType lockType) throws InterruptedException,
  +         LockingException, TimeoutException
            {
  -            log.trace(buf.toString());
  +      return getLock().acquire(caller, lock_acquisition_timeout, lockType);
            }
  -         throw new UpgradeException(buf.toString(), e);
  +
  +   public Set acquireAll(Object caller, long timeout, NodeLock.LockType lockType) throws LockingException, TimeoutException,
  +         InterruptedException
  +   {
  +      return getLock().acquireAll(caller, timeout, lockType);
         }
  -      catch (LockingException e)
  +
  +   public void setChildrenMap(Map children)
         {
  -         StringBuffer buf = new StringBuffer("failure acquiring lock: fqn=").append(fqn).append(", caller=").append(caller).
  -                 append(", lock=").append(lock_.toString(true));
  -         if (trace)
  +      this.children = children;
  +   }
  +
  +   public boolean hasChildren()
            {
  -            log.trace(buf.toString());
  +      return children != null && children.size() > 0;
            }
  -         throw new LockingException(buf.toString(), e);
  +
  +   public void put(Map data)
  +   {
  +      put(data, false);
         }
  -      catch (TimeoutException e)
  +
  +   public void removeChild(Object child_name)
  +   {
  +      if (children != null)
         {
  -         StringBuffer buf = new StringBuffer("failure acquiring lock: fqn=").append(fqn).append(", caller=").append(caller).
  -                 append(", lock=").append(lock_.toString(true));
  -         if (trace)
  +         children.remove(child_name);
  +         if (log.isTraceEnabled())
            {
  -            log.trace(buf.toString());
  +            log.trace("removed child " + child_name);
            }
  -         throw new TimeoutException(buf.toString(), e);
         }
      }
   
  -   protected boolean acquireReadLock(Object caller, long timeout) throws LockingException, TimeoutException, InterruptedException
  -   {
  -      initLock();
  -      if (trace)
  +   public void removeChildren()
         {
  -         log.trace(new StringBuffer("acquiring RL: fqn=").append(fqn).append(", caller=").append(caller).
  -                 append(", lock=").append(lock_.toString(DataNode.PRINT_LOCK_DETAILS)));
  +      if (children != null)
  +         children.clear();
  +      children = null;
         }
  -      boolean flag = lock_.acquireReadLock(caller, timeout);
  -      if (trace)
  +
  +   public void print(StringBuffer sb, int indent)
  +   {
  +      printIndent(sb, indent);
  +      sb.append(Fqn.SEPARATOR).append(getName());
  +      if (children != null && children.size() > 0)
  +      {
  +         Collection values = children.values();
  +         for (Iterator it = values.iterator(); it.hasNext();)
         {
  -         log.trace(new StringBuffer("acquired RL: fqn=").append(fqn).append(", caller=").append(caller).
  -                 append(", lock=").append(lock_.toString(DataNode.PRINT_LOCK_DETAILS)));
  +            sb.append("\n");
  +            ((DataNode) it.next()).print(sb, indent + INDENT);
  +         }
         }
  -      return flag;
      }
   
  -   protected boolean acquireWriteLock(Object caller, long timeout) throws LockingException, TimeoutException, InterruptedException
  +   public void printIndent(StringBuffer sb, int indent)
      {
  -      initLock();
  -      if (trace)
  +      if (sb != null)
  +      {
  +         for (int i = 0; i < indent; i++)
         {
  -         log.trace(new StringBuffer("acquiring WL: fqn=").append(fqn).append(", caller=").append(caller).
  -                 append(", lock=").append(lock_.toString(DataNode.PRINT_LOCK_DETAILS)));
  +            sb.append(" ");
  +         }
  +      }
         }
  -      boolean flag = lock_.acquireWriteLock(caller, timeout);
  -      if (trace)
  +
  +   public void addChild(Object child_name, Node n)
  +   {
  +      if (child_name != null)
         {
  -         log.trace(new StringBuffer("acquired WL: fqn=").append(fqn).append(", caller=").append(caller).
  -                 append(", lock=").append(lock_.toString(DataNode.PRINT_LOCK_DETAILS)));
  +         children().put(child_name, n);
         }
  -      return flag;
      }
   
  -   public Set acquireAll(Object caller, long timeout, DataNode.LockType lock_type) throws LockingException, TimeoutException, InterruptedException
  +   /**
  +    * Returns the name of this node.
  +    */
  +   public Object getName()
      {
  -      DataNode tmp;
  -      boolean acquired;
  -      Set retval = new HashSet();
  +      return fqn.getLast();
  +   }
   
  -      if (lock_type == DataNode.LockType.NONE)
  +   /**
  +    * Returns the name of this node.
  +    */
  +   public Fqn getFqn()
         {
  -         return retval;
  +      return fqn;
         }
  -      acquired = acquire(caller, timeout, lock_type);
  -      if (acquired)
  +
  +   public void setFqn(Fqn f)
         {
  -         retval.add(getLock());
  +      fqn = f;
         }
   
  -      if (children != null)
  +   public TreeNode getChild(Object child_name)
         {
  -         for (Iterator it = children.values().iterator(); it.hasNext();)
  +      if (child_name == null)
  +         return null;
  +      return children == null ? null : (DataNode) children.get(child_name);
  +   }
  +
  +   public boolean childExists(Object child_name)
            {
  -            tmp = (DataNode) it.next();
  -            retval.addAll(tmp.acquireAll(caller, timeout, lock_type));
  +      if (child_name == null)
  +         return false;
  +      return children != null && children.containsKey(child_name);
            }
  +
  +   public Set<Node> getChildren()
  +   {
  +      return new ChildrenNodes();
         }
  -      return retval;
  +
  +   public synchronized Map<Object, Object> getRawData()
  +   {
  +      return data();
      }
   
  -   public void release(Object caller)
  +   /**
  +    * Returns null or the Map in use with the data.
  +    */
  +   private Map data()
      {
  -      if (lock_ != null)
  +      if (data == null)
         {
  -         lock_.release(caller);
  +         data = new HashMap<Object, Object>();
         }
  +      return data;
      }
   
  -   public void releaseForce()
  +   /**
  +    * Adds details of the node into a map as strings.
  +    */
  +   protected void printDetailsInMap(StringBuffer sb, int indent, Map map)
      {
  -      if (lock_ != null)
  +      Map.Entry entry;
  +      printIndent(sb, indent);
  +      indent += 2; // increse it
  +      sb.append(Fqn.SEPARATOR).append(getName());
  +      sb.append("\n");
  +      if (map != null)
         {
  -         lock_.releaseForce();
  +         for (Iterator it = map.entrySet().iterator(); it.hasNext();)
  +         {
  +            entry = (Map.Entry) it.next();
  +            sb.append(entry.getKey()).append(": ").append(entry.getValue()).append("\n");
  +         }
  +      }
  +      if (children != null && children.size() > 0)
  +      {
  +         Collection values = children.values();
  +         for (Iterator it = values.iterator(); it.hasNext();)
  +         {
  +            sb.append("\n");
  +            ((DataNode) it.next()).printDetails(sb, indent);
  +         }
         }
      }
   
  -   public void releaseAll(Object owner)
  +   public NodeSPI getNodeSPI()
      {
  -      DataNode tmp;
  -      if (children != null)
  +      return this;
  +   }
  +
  +   private class ChildrenNames extends AbstractSet implements Serializable
         {
  -         for (Iterator it = children.values().iterator(); it.hasNext();)
  +
  +      @Override
  +      public Iterator iterator()
            {
  -            tmp = (DataNode) it.next();
  -            tmp.releaseAll(owner);
  +         if (children == null)
  +            return Collections.emptySet().iterator();
  +         return children.keySet().iterator();
            }
  +
  +      @Override
  +      public boolean contains(Object o)
  +      {
  +         if (children == null)
  +            return false;
  +         return children.containsKey(o);
         }
  -      release(owner);
  +
  +      @Override
  +      public int size()
  +      {
  +         if (children == null)
  +            return 0;
  +         return children.size();
      }
   
  -   public void releaseAllForce()
  +      private Object writeReplace() throws ObjectStreamException
      {
  -      DataNode tmp;
  -      if (children != null)
  +         if (children == null)
  +            return Collections.emptySet();
  +         return Collections.unmodifiableSet(new HashSet(children.keySet()));
  +      }
  +
  +   }
  +
  +   private class ChildrenNodes extends AbstractSet
         {
  -         for (Iterator it = children.values().iterator(); it.hasNext();)
  +
  +      @Override
  +      public Iterator iterator()
            {
  -            tmp = (DataNode) it.next();
  -            tmp.releaseAllForce();
  +         if (children == null)
  +            return Collections.emptySet().iterator();
  +         return children.values().iterator();
            }
  +      
  +      // although the map is empty
  +      public void clear()
  +      {
  +         throw new UnsupportedOperationException();
         }
  -      releaseForce();
  +
  +      @Override
  +      public int size()
  +      {
  +         if (children == null)
  +            return 0;
  +         return children.size();
      }
  -}
   
  +   }
  +
  +}
  
  
  
  1.3       +59 -1     JBossCache/src/org/jboss/cache/NodeSPI.java
  
  (In the diff below, changes in quantity of whitespace are not shown.)
  
  Index: NodeSPI.java
  ===================================================================
  RCS file: /cvsroot/jboss/JBossCache/src/org/jboss/cache/NodeSPI.java,v
  retrieving revision 1.2
  retrieving revision 1.3
  diff -u -b -r1.2 -r1.3
  --- NodeSPI.java	14 Aug 2006 17:20:35 -0000	1.2
  +++ NodeSPI.java	20 Nov 2006 03:53:54 -0000	1.3
  @@ -6,6 +6,11 @@
    */
   package org.jboss.cache;
   
  +import java.util.Map;
  +
  +import org.jboss.cache.lock.IdentityLock;
  +import org.jboss.cache.lock.NodeLock;
  +
   /**
    * A more detailed interface to {@link Node}, which is used when writing plugins for or extending JBoss Cache.
    *
  @@ -13,11 +18,64 @@
    * @since 2.0.0
    * @author <a href="mailto:manik at jboss.org">Manik Surtani (manik at jboss.org)</a>
    */
  -public interface NodeSPI extends Node
  +public interface NodeSPI
   {
       // TODO:
       // everything that is not already represented by Node
       // that is used by either an interceptor, or eviction policy
       // should be added to this interface, provided they are public.
   
  +   /**
  +    * Returns true if the children of this node were loaded from a cache loader.
  +    */
  +   boolean getChildrenLoaded();
  +
  +   /**
  +    * Sets if the children of this node were loaded from a cache loader.
  +    */
  +   void setChildrenLoaded(boolean loaded);
  +   
  +   /**
  +    * Returns a map to access the raw children.
  +    * The data should only be modified by the cache itself.
  +    * This method should never return null.  
  +    */
  +   Map<Object, Node> getChildrenMap();
  +
  +   /**
  +    * Sets the node's children explictly.
  +    * The data should only be modified by the cache itself.
  +    * @param children cannot be null
  +    */
  +   void setChildrenMap(Map<Object, Node> children);
  +   
  +   /**
  +    * Returns a map to access the node's data.
  +    * This data should only be modified by the cache itself.
  +    * This method should never return null.
  +    */
  +   Map<Object, Object> getRawData();
  +
  +   /**
  +    * Initializes the cache for this node and all children as well. 
  +    */
  +   void setCache(CacheSPI cache);
  +   
  +   /**
  +    * Returns the cache for this node. 
  +    */
  +   CacheSPI getCache();
  +   
  +   /**
  +    * Returns an existing child or creates a new one using a global transaction. 
  +    * @return newly created node
  +    */
  +   Node getOrCreateChild(Object name, GlobalTransaction tx);
  +
  +   /**
  +    * Returns a lock for this node.
  +    */
  +   NodeLock getLock();
  +
  +
   }
  
  
  
  1.15      +24 -49    JBossCache/src/org/jboss/cache/OptimisticTreeNode.java
  
  (In the diff below, changes in quantity of whitespace are not shown.)
  
  Index: OptimisticTreeNode.java
  ===================================================================
  RCS file: /cvsroot/jboss/JBossCache/src/org/jboss/cache/OptimisticTreeNode.java,v
  retrieving revision 1.14
  retrieving revision 1.15
  diff -u -b -r1.14 -r1.15
  --- OptimisticTreeNode.java	21 Jun 2006 11:10:58 -0000	1.14
  +++ OptimisticTreeNode.java	20 Nov 2006 03:53:54 -0000	1.15
  @@ -10,6 +10,7 @@
   import org.apache.commons.logging.LogFactory;
   import org.jboss.cache.optimistic.DefaultDataVersion;
   import org.jboss.cache.optimistic.DataVersion;
  +import org.jboss.util.NullArgumentException;
   
   import java.util.Map;
   
  @@ -21,65 +22,39 @@
   public class OptimisticTreeNode extends NodeImpl
   {
   
  -    private DataVersion version = DefaultDataVersion.ZERO;
  -
  -    private static Log log = LogFactory.getLog(OptimisticTreeNode.class);
  +    private DataVersion version;
   
       /**
        * Although this object has a reference to the TreeCache, the optimistic
        * node is actually disconnected from the TreeCache itself.
        * The parent could be looked up from the TransactionWorkspace.
  +    private Node parent;
        */
  -    private NodeImpl parent;
   
  -    public OptimisticTreeNode()
  +    public OptimisticTreeNode(Object childName, Fqn fqn, Node parent, Map data, DataVersion version, CacheSPI cache)
       {
  +       this(childName, fqn, parent, data, false, cache, DefaultDataVersion.ZERO);
       }
   
  -    /**
  -     * @param child_name
  -     * @param fqn
  -     * @param parent
  -     * @param data
  -     * @param cache
  -     */
  -    public OptimisticTreeNode(Object child_name, Fqn fqn, NodeImpl parent, Map data, TreeCache cache)
  +    public OptimisticTreeNode(Object childName, Fqn fqn, Node parent, Map data, boolean mapSafe, CacheSPI cache)
       {
  -        super(child_name, fqn, parent, data, cache);
  -        this.parent = parent;
  +        this(childName, fqn, parent, data, mapSafe, cache, DefaultDataVersion.ZERO);
       }
   
  -    /**
  -     * @param child_name
  -     * @param fqn
  -     * @param parent
  -     * @param data
  -     * @param cache
  -     */
  -    public OptimisticTreeNode(Object child_name, Fqn fqn, NodeImpl parent, Map data, boolean mapSafe, TreeCache cache)
  +    public OptimisticTreeNode(Object childName, Fqn fqn, Node parent, Map data, boolean mapSafe, CacheSPI cache, DataVersion version)
       {
  -        super(child_name, fqn, parent, data, mapSafe, cache);
  -        this.parent = parent;
  -    }
  -
  -    /**
  -     * @param child_name
  -     * @param fqn
  -     * @param parent
  -     * @param key
  -     * @param value
  -     * @param cache
  -     */
  -    public OptimisticTreeNode(Object child_name, Fqn fqn, NodeImpl parent, Object key, Object value, TreeCache cache)
  -    {
  -        super(child_name, fqn, parent, key, value, cache);
  -        this.parent = parent;
  +        super(childName, fqn, data, mapSafe, cache);
  +        if (version == null)
  +           throw new NullPointerException("version");
  +        //if (parent == null)
  +           //throw new NullPointerException("parent");        
  +        //this.parent = parent;
  +        this.version = version;
       }
   
  -    public OptimisticTreeNode(Object childName, Fqn fqn, NodeImpl parent, Map data, boolean mapSafe, TreeCache cache, DataVersion version)
  +    public OptimisticTreeNode(Object childName, Fqn fqn, Node parent, Map data, CacheSPI cache)
       {
  -        super(childName, fqn, parent, data, mapSafe, cache);
  -        this.parent = parent;
  +       this(childName, fqn, parent, data, false, cache);
       }
   
       /**
  @@ -93,11 +68,11 @@
   
       /**
        * Returns the parent.
  -     */
  -    public TreeNode getParent()
  +    public Node getParent()
       {
          return parent;
       }
  +     */
   
       /**
        * Sets the version id of this node.
  @@ -110,7 +85,7 @@
   
       public String toString()
       {
  -        return super.toString() + "\n(Optimistically locked node)\n";
  +        return "Opt" + super.toString();
       }
       
   }
  
  
  
  1.13      +4 -3      JBossCache/src/org/jboss/cache/RegionManager.java
  
  (In the diff below, changes in quantity of whitespace are not shown.)
  
  Index: RegionManager.java
  ===================================================================
  RCS file: /cvsroot/jboss/JBossCache/src/org/jboss/cache/RegionManager.java,v
  retrieving revision 1.12
  retrieving revision 1.13
  diff -u -b -r1.12 -r1.13
  --- RegionManager.java	16 Nov 2006 18:15:26 -0000	1.12
  +++ RegionManager.java	20 Nov 2006 03:53:54 -0000	1.13
  @@ -8,6 +8,7 @@
   import org.jboss.cache.config.EvictionRegionConfig;
   import org.jboss.cache.eviction.EvictionTimerTask;
   import org.jboss.cache.eviction.RegionNameConflictException;
  +import org.jboss.cache.lock.NodeLock;
   import org.jboss.cache.marshall.VersionAwareMarshaller;
   import org.jgroups.Address;
   
  @@ -485,14 +486,14 @@
               {
                  // Acquire locks
                  Object owner = treeCache.getOwnerForLock();
  -               subtreeRoot.acquireAll(owner, stateFetchTimeout, DataNode.LockType.WRITE);
  +               subtreeRoot.getNodeSPI().getLock().acquireAll(owner, stateFetchTimeout, NodeLock.LockType.WRITE);
                  subtreeLocked = true;
   
                  // Lock the parent, as we're about to write to it
                  parent = (DataNode) subtreeRoot.getParent();
                  if (parent != null)
                  {
  -                  parent.acquire(owner, stateFetchTimeout, DataNode.LockType.WRITE);
  +                  parent.getNodeSPI().getLock().acquire(owner, stateFetchTimeout, NodeLock.LockType.WRITE);
                     parentLocked = true;
                  }
   
  @@ -765,7 +766,7 @@
    * @author Ben Wang 02-2004
    * @author Daniel Huang (dhuang at jboss.org)
    * @author Brian Stansberry
  - * @version $Id: RegionManager.java,v 1.12 2006/11/16 18:15:26 msurtani Exp $
  + * @version $Id: RegionManager.java,v 1.13 2006/11/20 03:53:54 genman Exp $
    */
   /*public class ERegionManager
   {
  
  
  
  1.13      +4 -3      JBossCache/src/org/jboss/cache/TransactionEntry.java
  
  (In the diff below, changes in quantity of whitespace are not shown.)
  
  Index: TransactionEntry.java
  ===================================================================
  RCS file: /cvsroot/jboss/JBossCache/src/org/jboss/cache/TransactionEntry.java,v
  retrieving revision 1.12
  retrieving revision 1.13
  diff -u -b -r1.12 -r1.13
  --- TransactionEntry.java	25 Aug 2006 14:10:09 -0000	1.12
  +++ TransactionEntry.java	20 Nov 2006 03:53:54 -0000	1.13
  @@ -8,6 +8,7 @@
   
   
   import org.jboss.cache.lock.IdentityLock;
  +import org.jboss.cache.lock.NodeLock;
   import org.jboss.cache.config.Option;
   import org.jboss.cache.marshall.MethodCall;
   
  @@ -31,7 +32,7 @@
    * </ul>
    *
    * @author <a href="mailto:bela at jboss.org">Bela Ban</a> Apr 14, 2003
  - * @version $Revision: 1.12 $
  + * @version $Revision: 1.13 $
    */
   public class TransactionEntry {
   
  @@ -133,7 +134,7 @@
      /**
       * Adds a lock to the end of the lock list.
       */
  -   public void addLock(IdentityLock l) {
  +   public void addLock(NodeLock l) {
         if(l != null) {
            synchronized(locks) {
                if (!locks.contains(l))
  @@ -237,7 +238,7 @@
   
      private void undo(MethodCall undo_op, CacheSPI cache) {
         try {
  -         Object retval = undo_op.invoke(((TreeCacheProxyImpl)cache).treeCache);
  +          Object retval = undo_op.invoke(((TreeCacheProxyImpl)cache).getTreeCache());
             if (retval instanceof Throwable)
             {
                 throw (Throwable) retval;
  
  
  
  1.10      +3 -2      JBossCache/src/org/jboss/cache/TransactionTable.java
  
  (In the diff below, changes in quantity of whitespace are not shown.)
  
  Index: TransactionTable.java
  ===================================================================
  RCS file: /cvsroot/jboss/JBossCache/src/org/jboss/cache/TransactionTable.java,v
  retrieving revision 1.9
  retrieving revision 1.10
  diff -u -b -r1.9 -r1.10
  --- TransactionTable.java	25 Aug 2006 14:10:09 -0000	1.9
  +++ TransactionTable.java	20 Nov 2006 03:53:54 -0000	1.10
  @@ -11,6 +11,7 @@
   import org.apache.commons.logging.Log;
   import org.apache.commons.logging.LogFactory;
   import org.jboss.cache.lock.IdentityLock;
  +import org.jboss.cache.lock.NodeLock;
   import org.jboss.cache.marshall.MethodCall;
   
   import javax.transaction.Transaction;
  @@ -25,7 +26,7 @@
    * given TX.
    *
    * @author <a href="mailto:bela at jboss.org">Bela Ban</a> Apr 14, 2003
  - * @version $Revision: 1.9 $
  + * @version $Revision: 1.10 $
    */
   public class TransactionTable {
   
  @@ -180,7 +181,7 @@
      /**
       * Adds a lock to the global transaction.
       */
  -   public void addLock(GlobalTransaction gtx, IdentityLock l) {
  +   public void addLock(GlobalTransaction gtx, NodeLock l) {
         TransactionEntry entry=get(gtx);
         if(entry == null) {
            log.error("transaction entry not found for (gtx=" + gtx + ")");
  
  
  
  1.279     +110 -214  JBossCache/src/org/jboss/cache/TreeCache.java
  
  (In the diff below, changes in quantity of whitespace are not shown.)
  
  Index: TreeCache.java
  ===================================================================
  RCS file: /cvsroot/jboss/JBossCache/src/org/jboss/cache/TreeCache.java,v
  retrieving revision 1.278
  retrieving revision 1.279
  diff -u -b -r1.278 -r1.279
  --- TreeCache.java	16 Nov 2006 18:41:25 -0000	1.278
  +++ TreeCache.java	20 Nov 2006 03:53:54 -0000	1.279
  @@ -13,6 +13,7 @@
   import org.jboss.cache.buddyreplication.BuddyGroup;
   import org.jboss.cache.buddyreplication.BuddyManager;
   import org.jboss.cache.buddyreplication.BuddyNotInitException;
  +import org.jboss.cache.buddyreplication.GravitateResult;
   import org.jboss.cache.config.BuddyReplicationConfig;
   import org.jboss.cache.config.CacheLoaderConfig;
   import org.jboss.cache.config.Configuration;
  @@ -24,13 +25,12 @@
   import org.jboss.cache.loader.CacheLoader;
   import org.jboss.cache.loader.CacheLoaderManager;
   import org.jboss.cache.loader.NodeData;
  -import org.jboss.cache.lock.IdentityLock;
   import org.jboss.cache.lock.IsolationLevel;
  +import org.jboss.cache.lock.NodeLock;
   import org.jboss.cache.lock.LockStrategyFactory;
   import org.jboss.cache.lock.LockUtil;
   import org.jboss.cache.lock.LockingException;
   import org.jboss.cache.lock.TimeoutException;
  -import org.jboss.cache.marshall.InactiveRegionAwareRpcDispatcher;
   import org.jboss.cache.marshall.MethodCall;
   import org.jboss.cache.marshall.MethodCallFactory;
   import org.jboss.cache.marshall.MethodDeclarations;
  @@ -95,7 +95,7 @@
    * @author <a href="mailto:manik at jboss.org">Manik Surtani (manik at jboss.org)</a>
    * @author Brian Stansberry
    * @author Daniel Huang (dhuang at jboss.org)
  - * @version $Id: TreeCache.java,v 1.278 2006/11/16 18:41:25 msurtani Exp $
  + * @version $Id: TreeCache.java,v 1.279 2006/11/20 03:53:54 genman Exp $
    *          <p/>
    * @see <a href="http://labs.jboss.com/portal/jbosscache/docs">JBossCache doc</a>
    */
  @@ -110,13 +110,14 @@
      /**
       * Root DataNode.
       */
  -   protected DataNode root = NodeFactory.getInstance().createRootDataNode(NodeFactory.NODE_TYPE_TREENODE, this);
  +   protected DataNode root;
  +   {
  +      this.rootSpi = new TreeCacheProxyImpl(this);
  +      this.root = NodeFactory.getInstance().createRootDataNode(NodeFactory.NODE_TYPE_TREENODE, this.rootSpi);
  +   }
   
      private RegionManager regionManager = null;
   
  -
  -   final static Object NULL = new Object();
  -
      /**
       * The JGroups JChannel in use.
       */
  @@ -551,7 +552,7 @@
   
      public void fetchPartialState(Object sources[], Fqn subtree) throws Exception
      {
  -      if (subtree == null && subtree.isRoot())
  +      if (subtree == null || subtree.isRoot())
         {
            throw new IllegalArgumentException("Cannot fetch partial state. Invalid subtree " + subtree);
         }
  @@ -566,11 +567,13 @@
            // Yes -- cache is configured LOCAL but app doesn't know it -- Brian
            //throw new IllegalArgumentException("Cannot fetch partial state, targets are " + sources + " and stateId is " + stateId);
            if (log.isWarnEnabled())
  -            log.warn("Cannot fetch partial state, targets are " + sources + " and stateId is " + stateId);
  +            log.warn("Cannot fetch partial state, targets are " + Arrays.asList(sources) + 
  +                  " and stateId is " + stateId);
            return;
         }
   
         ArrayList targets = new ArrayList(Arrays.asList(sources));
  +
         //skip *this* node as a target
         targets.remove(getLocalAddress());
   
  @@ -609,9 +612,7 @@
         stateFetchTimeout = configuration.getLockAcquisitionTimeout() + 5000;
         if (configuration.isNodeLockingOptimistic())
         {
  -         root = NodeFactory.getInstance().createRootDataNode(NodeFactory.NODE_TYPE_OPTIMISTIC_NODE, this);
  -         // prepare an SPI interface to the root node
  -         rootSpi = new TreeCacheProxyImpl(this, (NodeImpl) root);
  +         root = NodeFactory.getInstance().createRootDataNode(NodeFactory.NODE_TYPE_OPTIMISTIC_NODE, rootSpi);
         }
   
         setUseReplQueue(configuration.isUseReplQueue());
  @@ -707,7 +708,8 @@
               MBeanServer server=(MBeanServer)servers.get(0);
               JmxConfigurator.registerChannel(channel, server, "JGroups:channel=" + channel.getChannelName() , true);
   */
  -            disp = new InactiveRegionAwareRpcDispatcher(channel, ml, this, this);
  +            // disp = new InactiveRegionAwareRpcDispatcher(channel, ml, this, this);
  +            disp = new RpcDispatcher(channel, ml, this, this);
               disp.setMarshaller(getMarshaller());
   
               setBuddyReplicationConfig(configuration.getBuddyReplicationConfig());
  @@ -1016,32 +1018,6 @@
      }
   
      /**
  -    * Returns whether the given node is empty; i.e. has no key/value pairs
  -    * in its data map and has no children.
  -    *
  -    * @param node the node. Can be <code>null</code>.
  -    * @return <code>true</code> if <code>node</code> is <code>null</code> or
  -    *         empty; <code>false</code> otherwise.
  -    */
  -   protected boolean isNodeEmpty(DataNode node)
  -   {
  -      boolean empty = true;
  -      if (node != null)
  -      {
  -         if (node.hasChildren())
  -         {
  -            empty = false;
  -         }
  -         else
  -         {
  -            Set keys = node.getDataKeys();
  -            empty = (keys == null || keys.size() == 0);
  -         }
  -      }
  -      return empty;
  -   }
  -
  -   /**
       * Creates a subtree in the local tree.
       * Returns the DataNode created.
       */
  @@ -1065,7 +1041,7 @@
               // Lock the parent, create and add the child
               try
               {
  -               parent.acquire(owner, configuration.getSyncReplTimeout(), DataNode.LockType.WRITE);
  +               parent.getNodeSPI().getLock().acquire(owner, configuration.getSyncReplTimeout(), NodeLock.LockType.WRITE);
               }
               catch (InterruptedException e)
               {
  @@ -1077,7 +1053,7 @@
               {
                  child = factory.createDataNode(type, name,
                          subtree.getFqnChild(i + 1),
  -                       parent, null, this);
  +                       parent, lock_table, coordinator, null, this.rootSpi);
                  parent.addChild(name, child);
               }
               finally
  @@ -1088,7 +1064,7 @@
                  }
                  try
                  {
  -                  parent.releaseForce();
  +                  parent.getNodeSPI().getLock().releaseAll();
                  }
                  catch (Throwable t)
                  {
  @@ -1175,11 +1151,11 @@
         getStateTransferManager().getState(os, fqn, timeout, force, suppressErrors);
      }
   
  -   private void removeLocksForDeadMembers(DataNode node,
  +   private void removeLocksForDeadMembers(Node node,
                                             Vector deadMembers)
      {
         Set deadOwners = new HashSet();
  -      IdentityLock lock = node.getLock();
  +      NodeLock lock = node.getNodeSPI().getLock();
         Object owner = lock.getWriterOwner();
   
         if (isLockOwnerDead(owner, deadMembers))
  @@ -1211,13 +1187,9 @@
         }
   
         // Recursively unlock children
  -      if (node.hasChildren())
  -      {
  -         Collection children = node.getChildren().values();
  -         for (Iterator it = children.iterator(); it.hasNext();)
  +      for (Node child : node.getChildren())
            {
  -            removeLocksForDeadMembers((DataNode) it.next(), deadMembers);
  -         }
  +         removeLocksForDeadMembers(child, deadMembers);
         }
      }
   
  @@ -1326,7 +1298,7 @@
      {
         DataNode n = findNode(fqn);
         if (n == null) return null;
  -      return n.getData();
  +      return n.getNodeSPI().getRawData();
      }
   
      /**
  @@ -1362,7 +1334,7 @@
         {
            return null;
         }
  -      Set keys = n.getDataKeys();
  +      Set keys = n.getNodeSPI().getRawData().keySet();
         // See http://jira.jboss.com/jira/browse/JBCACHE-551
         if (keys == null)
         {
  @@ -1436,7 +1408,7 @@
       *
       * @deprecated This will go away.
       */
  -   public DataNode peek(Fqn fqn)
  +   public Node peek(Fqn fqn)
      {
         return findInternal(fqn);
      }
  @@ -1467,7 +1439,7 @@
       */
      public boolean exists(Fqn fqn)
      {
  -      DataNode n = findInternal(fqn);
  +      Node n = findInternal(fqn);
         return n != null;
      }
   
  @@ -1476,15 +1448,15 @@
       *
       * @param fqn
       */
  -   private DataNode findInternal(Fqn fqn)
  +   private Node findInternal(Fqn fqn)
      {
         if (fqn == null || fqn.size() == 0) return root;
  -      TreeNode n = root;
  +      Node n = root;
         int fqnSize = fqn.size();
         for (int i = 0; i < fqnSize; i++)
         {
            Object obj = fqn.get(i);
  -         n = n.getChild(obj);
  +         n = n.getNodeSPI().getChildrenMap().get(obj);
            if (n == null)
            {
               return null;
  @@ -1514,14 +1486,14 @@
       */
      public boolean exists(Fqn fqn, Object key)
      {
  -      DataNode n = findInternal(fqn);
  +      Node n = findInternal(fqn);
         if (n == null)
         {
            return false;
         }
         else
         {
  -         return n.containsKey(key);
  +         return n.getKeys().contains(key);
         }
      }
   
  @@ -1790,34 +1762,29 @@
      {
         DataNode n = findNode(fqn);
         if (n == null) return null;
  -      Map m = n.getChildren();
  -      if (m != null)
  -      {
  -         return new HashSet(m.keySet());
  -      }
  -      else
  -      {
  -         return null;
  +      Set s = n.getChildrenNames();
  +      return s;
         }
  -   }
  -
   
  +   /**
  +    * Returns true if the FQN exists and the node has children. 
  +    */
      public boolean hasChild(Fqn fqn)
      {
         if (fqn == null) return false;
   
  -      TreeNode n = root;
  +      Node n = root;
         Object obj;
         for (int i = 0; i < fqn.size(); i++)
         {
            obj = fqn.get(i);
  -         n = n.getChild(obj);
  +         n = n.getNodeSPI().getChildrenMap().get(obj);
            if (n == null)
            {
               return false;
            }
         }
  -      return n.hasChildren();
  +      return !n.getNodeSPI().getChildrenMap().isEmpty();
      }
   
      /**
  @@ -1836,7 +1803,6 @@
      {
         StringBuffer sb = new StringBuffer();
         int indent = 0;
  -      Map children;
   
         if (!details)
         {
  @@ -1845,21 +1811,13 @@
         }
         else
         {
  -         children = root.getChildren();
  -         if (children != null && children.size() > 0)
  -         {
  -            Collection nodes = children.values();
  -            for (Iterator it = nodes.iterator(); it.hasNext();)
  -            {
  -               ((DataNode) it.next()).print(sb, indent);
  +         Map<Object, Node> children;
  +         children = root.getNodeSPI().getChildrenMap();
  +         for (Node n : children.values()) {
  +            ((DataNode) n).print(sb, indent);
                  sb.append("\n");
               }
            }
  -         else
  -         {
  -            sb.append(Fqn.SEPARATOR);
  -         }
  -      }
         return sb.toString();
      }
   
  @@ -1875,7 +1833,7 @@
         int indent = 2;
         Map children;
   
  -      children = root.getChildren();
  +      children = root.getNodeSPI().getChildrenMap();
         if (children != null && children.size() > 0)
         {
            Collection nodes = children.values();
  @@ -1899,22 +1857,11 @@
      {
         StringBuffer sb = new StringBuffer("\n");
         int indent = 0;
  -      Map children;
   
  -      children = root.getChildren();
  -      if (children != null && children.size() > 0)
  -      {
  -         Collection nodes = children.values();
  -         for (Iterator it = nodes.iterator(); it.hasNext();)
  -         {
  -            ((DataNode) it.next()).printLockInfo(sb, indent);
  +      for (Node n : root.getNodeSPI().getChildrenMap().values()) {
  +        n.getNodeSPI().getLock().printLockInfo(sb, indent);
               sb.append("\n");
            }
  -      }
  -      else
  -      {
  -         sb.append(Fqn.SEPARATOR);
  -      }
         return sb.toString();
      }
   
  @@ -1923,23 +1870,19 @@
       */
      public int getNumberOfLocksHeld()
      {
  -      return numLocks(root);
  +      return numLocks(root.getNodeSPI());
      }
   
  -   private int numLocks(DataNode n)
  +   private int numLocks(NodeSPI n)
      {
         int num = 0;
  -      Map children;
  -      if (n.isLocked())
  +      if (n.getLock().isLocked())
         {
            num++;
         }
  -      if ((children = n.getChildren()) != null)
  +      for (Node cn : n.getChildrenMap().values()) 
         {
  -         for (Iterator it = children.values().iterator(); it.hasNext();)
  -         {
  -            num += numLocks((DataNode) it.next());
  -         }
  +         num += numLocks(cn.getNodeSPI());
         }
         return num;
      }
  @@ -1955,27 +1898,16 @@
         return numNodes(root) - 1;
      }
   
  -   private int numNodes(DataNode n)
  +   private int numNodes(Node n)
      {
         if (n == null)
         {
            return 0;
         }
         int count = 1; // for n
  -      if (n.hasChildren())
  -      {
  -         Map children = n.getChildren();
  -         if (children != null && children.size() > 0)
  -         {
  -            Collection child_nodes = children.values();
  -            DataNode child;
  -            for (Iterator it = child_nodes.iterator(); it.hasNext();)
  -            {
  -               child = (DataNode) it.next();
  +      for (Node child: n.getNodeSPI().getChildrenMap().values()) {
                  count += numNodes(child);
               }
  -         }
  -      }
         return count;
      }
   
  @@ -1987,7 +1919,7 @@
       */
      public int getNumberOfAttributes()
      {
  -      return numAttributes(root);
  +      return numAttributes(getRoot());
      }
   
      /**
  @@ -2002,27 +1934,15 @@
         return numAttributes(n);
      }
   
  -   private int numAttributes(DataNode n)
  +   private int numAttributes(Node n)
      {
  -      if (n == null)
  +      Map<Object, Node> children = n.getNodeSPI().getChildrenMap();
  +      int count = 0;
  +      for (Node child : children.values())
         {
  -         return 0;
  -      }
  -      int count = n.numAttributes();
  -      if (n.hasChildren())
  -      {
  -         Map children = n.getChildren();
  -         if (children != null && children.size() > 0)
  -         {
  -            Collection child_nodes = children.values();
  -            DataNode child;
  -            for (Iterator it = child_nodes.iterator(); it.hasNext();)
  -            {
  -               child = (DataNode) it.next();
                  count += numAttributes(child);
               }
  -         }
  -      }
  +      count += n.getData().size();
         return count;
      }
   
  @@ -2402,7 +2322,9 @@
         }
   
         notifier.notifyNodeModified(fqn, true, n.getData() == null ? Collections.emptyMap() : Collections.unmodifiableMap(n.getData()));
  +      getInvocationContext().getOptionOverrides().setBypassInterceptorChain(true);
         old_value = n.put(key, value);
  +      getInvocationContext().getOptionOverrides().setBypassInterceptorChain(false);
   
         // create a compensating method call (reverting the effect of
         // this modification) and put it into the TX's undo list.
  @@ -2478,7 +2400,7 @@
      {
   
         DataNode n;
  -      TreeNode parent_node;
  +      Node parent_node;
         MethodCall undo_op = null;
   
         if (log.isTraceEnabled())
  @@ -2533,14 +2455,14 @@
         parent_node = n.getParent();
   
         // remove subtree from parent
  -      parent_node.removeChild(n.getName());
  +      parent_node.getNodeSPI().getChildrenMap().remove(n.getName());
         if (eviction)
         {
  -         parent_node.setChildrenLoaded(false);
  +         parent_node.getNodeSPI().setChildrenLoaded(false);
         }
   
         // release all locks for the entire subtree
  -      n.releaseAll(tx != null ? tx : (Object) Thread.currentThread());
  +      n.getNodeSPI().getLock().releaseAll(tx != null ? tx : (Object) Thread.currentThread());
   
         // create a compensating method call (reverting the effect of
         // this modification) and put it into the TX's undo list.
  @@ -2696,7 +2618,7 @@
            notifier.notifyNodeModified(fqn, true, n.getData() == null ? Collections.emptyMap() : Collections.unmodifiableMap(n.getData()));
         }
   
  -      n.clear();
  +      n.clearData();
         if (eviction)
         {
            n.put(UNINITIALIZED, null); // required by cache loader to subsequently load the element again
  @@ -2938,6 +2860,13 @@
      public List _gravitateData(Fqn fqn, boolean searchSubtrees, boolean marshal)
              throws CacheException
      {
  +      GravitateResult result = gravitateData(fqn, searchSubtrees, marshal);
  +      return result.asList();
  +   }
  +      
  +   public GravitateResult gravitateData(Fqn fqn, boolean searchSubtrees, boolean marshal)
  +      throws CacheException
  +   {
         // we need to get the state for this Fqn and it's sub-nodes.
   
         // for now, perform a very simple series of getData calls.
  @@ -2949,7 +2878,7 @@
            DataNode backupSubtree = findNode(BuddyManager.BUDDY_BACKUP_SUBTREE_FQN);
            if (backupSubtree != null)
            {
  -            Map children = backupSubtree.getChildren();
  +            Map children = backupSubtree.getNodeSPI().getChildrenMap();
               if (children != null)
               {
                  Iterator childNames = children.keySet().iterator();
  @@ -2962,17 +2891,15 @@
            }
         }
   
  -      ArrayList retval;
         if (actualNode == null)
         {
  -         // not found anything.
  -         retval = new ArrayList(1);
  -         retval.add(false);
  +         return GravitateResult.noDataFound();
         }
  -      else
  +   
  +      if (backupNodeFqn == null)
         {
  -         retval = new ArrayList(3);
  -         retval.add(true);
  +         backupNodeFqn = BuddyManager.getBackupFqn(BuddyManager.getGroupNameFromAddress(getLocalAddress()), fqn);
  +      }
   
            List list = getNodeData(new LinkedList(), actualNode);
            if (marshal)
  @@ -2983,7 +2910,7 @@
                  MarshalledValueOutputStream maos = new MarshalledValueOutputStream(baos);
                  maos.writeObject(list);
                  maos.close();
  -               retval.add(baos.toByteArray());
  +            return GravitateResult.marshalledResult(baos.toByteArray(), backupNodeFqn);
               }
               catch (IOException e)
               {
  @@ -2992,33 +2919,19 @@
            }
            else
            {
  -            retval.add(list);
  -         }
  -
  -         if (backupNodeFqn == null)
  -         {
  -            backupNodeFqn = BuddyManager.getBackupFqn(BuddyManager.getGroupNameFromAddress(getLocalAddress()), fqn);
  -         }
  -         retval.add(backupNodeFqn);
  +         return GravitateResult.subtreeResult(list, backupNodeFqn);
         }
  -      return retval;
      }
   
  -   private List getNodeData(List list, DataNode node)
  +   private List getNodeData(List list, Node node)
      {
         NodeData data = new NodeData(BuddyManager.getActualFqn(node.getFqn()), node.getData());
         list.add(data);
  -      Map children = node.getChildren();
  -      if (children != null)
  -      {
  -         Iterator i = children.keySet().iterator();
  -         while (i.hasNext())
  +      Map<Object, Node> children = node.getNodeSPI().getChildrenMap();
  +      for (Node childNode: children.values())
            {
  -            Object childName = i.next();
  -            DataNode childNode = (DataNode) children.get(childName);
               getNodeData(list, childNode);
            }
  -      }
         return list;
      }
   
  @@ -3085,9 +2998,8 @@
         switch (mc.getMethodId())
         {
            case MethodDeclarations.getDataMapMethodLocal_id:
  -            return new HashMap(0);
            case MethodDeclarations.getChildrenNamesMethodLocal_id:
  -            return new HashSet(0);
  +            return Collections.emptyMap();
            default:
               return null;
         }
  @@ -3137,7 +3049,7 @@
      /**
       * Should not be called.
       */
  -   public void _lock(Fqn fqn, DataNode.LockType lock_type, boolean recursive)
  +   public void _lock(Fqn fqn, NodeLock.LockType lock_type, boolean recursive)
              throws TimeoutException, LockingException
      {
         log.warn("method _lock() should not be invoked on TreeCache");
  @@ -3309,7 +3221,7 @@
         node.setFqn(newFqn);
   
         // process children
  -      for (Object n : node.getChildren().values())
  +      for (Object n : node.getChildren())
         {
            NodeImpl child = (NodeImpl) n;
            moveFqns(child, newFqn);
  @@ -3860,7 +3772,7 @@
       */
      private DataNode findNode(Fqn fqn, DataVersion version) throws CacheException
      {
  -      TreeNode n, child_node = null;
  +      Node n, child_node = null;
         Object child_name;
         int treeNodeSize;
   
  @@ -3876,7 +3788,7 @@
            for (int i = 0; i < treeNodeSize; i++)
            {
               child_name = fqn.get(i);
  -            child_node = n.getChild(child_name);
  +            child_node = n.getNodeSPI().getChildrenMap().get(child_name);
               if (child_node == null)
               {
                  return null;
  @@ -3904,15 +3816,10 @@
         return toReturn;
      }
   
  -   public RegionManager getRegionManager()
  +   public synchronized RegionManager getRegionManager()
      {
         if (regionManager == null)
  -      {
  -         synchronized (this)
  -         {
  -            if (regionManager == null) regionManager = new RegionManager(this);
  -         }
  -      }
  +         regionManager = new RegionManager(this);
         return regionManager;
      }
   
  @@ -3928,17 +3835,6 @@
   
      public CacheSPI getCacheSPI()
      {
  -
  -      if (configuration.isNodeLockingOptimistic() && !(root instanceof OptimisticTreeNode))
  -      {
  -         root = NodeFactory.getInstance().createRootDataNode(NodeFactory.NODE_TYPE_OPTIMISTIC_NODE, this);
  -         // prepare an SPI interface to the root node
  -         rootSpi = new TreeCacheProxyImpl(this, (NodeImpl) root);
  -      }
  -      else if (rootSpi == null)
  -      {
  -         rootSpi = new TreeCacheProxyImpl(this, (NodeImpl) root);
  -      }
         return rootSpi;
      }
   
  
  
  
  1.51      +74 -242   JBossCache/src/org/jboss/cache/TreeCacheProxyImpl.java
  
  (In the diff below, changes in quantity of whitespace are not shown.)
  
  Index: TreeCacheProxyImpl.java
  ===================================================================
  RCS file: /cvsroot/jboss/JBossCache/src/org/jboss/cache/TreeCacheProxyImpl.java,v
  retrieving revision 1.50
  retrieving revision 1.51
  diff -u -b -r1.50 -r1.51
  --- TreeCacheProxyImpl.java	18 Nov 2006 09:31:01 -0000	1.50
  +++ TreeCacheProxyImpl.java	20 Nov 2006 03:53:54 -0000	1.51
  @@ -8,6 +8,7 @@
   import org.apache.commons.logging.Log;
   import org.apache.commons.logging.LogFactory;
   import org.jboss.cache.buddyreplication.BuddyManager;
  +import org.jboss.cache.buddyreplication.GravitateResult;
   import org.jboss.cache.config.Configuration;
   import org.jboss.cache.factories.InterceptorChainFactory;
   import org.jboss.cache.interceptors.Interceptor;
  @@ -35,23 +36,19 @@
    *
    * @author <a href="mailto:manik at jboss.org">Manik Surtani (manik at jboss.org)</a>
    */
  -public class TreeCacheProxyImpl implements CacheSPI, NodeSPI
  +public class TreeCacheProxyImpl implements CacheSPI
   {
  +
      public TreeCache treeCache;
  -   public NodeImpl currentNode;
  -   Log log = LogFactory.getLog(TreeCacheProxyImpl.class);
   
  -   public TreeCacheProxyImpl(TreeCache treeCache, NodeImpl currentNode)
  +   public TreeCacheProxyImpl(TreeCache treeCache)
      {
         this.treeCache = treeCache;
  -      this.currentNode = currentNode;
      }
   
      /**
  -    * A temp solution untill we figure out how to extract the membership functionality from
  +    * A temp solution until we figure out how to extract the membership functionality from
       * TreeCache.
  -    *
  -    * @return
       */
      public TreeCache getTreeCache()
      {
  @@ -176,7 +173,7 @@
   
      public Node getRoot()
      {
  -      return treeCache.getCacheSPI();
  +      return treeCache.getRoot();
      }
   
      public void addCacheListener(CacheListener l)
  @@ -250,10 +247,10 @@
         }
      }
   
  -   private void evictChildren(NodeImpl n)
  +   private void evictChildren(Node n)
      {
  -      Map<Object, NodeImpl> children = n.getChildren();
  -      for (NodeImpl child : children.values())
  +      Set<Node> children = n.getChildren();
  +      for (Node child : children)
         {
            evictChildren(child);
         }
  @@ -285,323 +282,158 @@
         treeCache.destroy();
      }
   
  -   public Node getParent()
  -   {
  -      return new TreeCacheProxyImpl(treeCache, (NodeImpl) currentNode.getParent());
  -   }
  -
  -   public Set<Node> getChildren()
  -   {
  -      Map m = currentNode.getChildren();
  -      Set<Node> children = new HashSet<Node>(m.size());
  -      Iterator i = m.values().iterator();
  -      while (i.hasNext())
  -      {
  -         children.add(new TreeCacheProxyImpl(treeCache, (NodeImpl) i.next()));
  -      }
  -
  -      return Collections.unmodifiableSet(children);
  -   }
  -
  -   public Set<Object> getChildrenNames()
  -   {
  -      Set s = currentNode.getChildren().keySet();
  -      Set<Object> children = new HashSet<Object>(s);
  -      return Collections.unmodifiableSet(children);
  -   }
  -
  -   public void setChildren(Map<Object, Node> children)
  -   {
  -      currentNode.setChildren(children);
  -   }
  -
  -   public Map getData()
  -   {
  -      return Collections.unmodifiableMap(new HashMap(currentNode.getData()));
  -   }
  -
  -   public Set getKeys()
  -   {
  -      return Collections.unmodifiableSet(new HashSet(currentNode.getDataKeys()));
  -   }
  -
  -   public Fqn getFqn()
  -   {
  -      return currentNode.getFqn();
  -   }
  -
  -   public Node addChild(Fqn f)
  -   {
  -      if (log.isTraceEnabled()) log.trace("Adding child " + f + "  to parent " + currentNode.getFqn());
  -      if (treeCache.getInvocationContext().getOptionOverrides().isBypassInterceptorChain())
  -      {
  -         TreeCacheProxyImpl retval = null;
  -         NodeImpl newNode;
  -         GlobalTransaction gtx = treeCache.getInvocationContext().getGlobalTransaction();
  -
  -         if (f.size() == 1)
  -         {
  -            //newNode = (NodeImpl) NodeFactory.getInstance().createNode(f.getLast(), currentNode, Collections.EMPTY_MAP, treeCache);
  -            //treeCache._addChild(gtx, currentNode.getFqn(), f.getLast(), newNode, true);
  -            newNode = (NodeImpl) currentNode.getOrCreateChild(f.getLast(), gtx, true);
  -            retval = new TreeCacheProxyImpl(treeCache, newNode);
  -         }
  -         else
  -         {
  -            // recursively create children
  -            NodeImpl currentParent = currentNode;
  -            for (Object o : f.peekElements())
  -            {
  -               //newNode = (NodeImpl) NodeFactory.getInstance().createNode(o, currentParent, Collections.emptyMap(), treeCache);
  -               //currentParent.addChild(o, newNode);
  -               //treeCache._addChild(gtx, currentParent.getFqn(), o, newNode, true);
  -               newNode = (NodeImpl) currentParent.getOrCreateChild(o, gtx, true);
  -               currentParent = newNode;
  -            }
  -            retval = new TreeCacheProxyImpl(treeCache, currentParent);
  -         }
  -
  -         treeCache.getInvocationContext().getOptionOverrides().setBypassInterceptorChain(false);
  -         return retval;
  -      }
  -      else
  +   public boolean isStarted()
         {
  -         treeCache.put(new Fqn(currentNode.getFqn(), f), Collections.emptyMap());
  -         return getChild(f);
  -      }
  +      return treeCache.started;
      }
   
  -   public void removeChild(Fqn f)
  +   public Node addChild(Fqn fqn)
      {
  -      treeCache.remove(new Fqn(currentNode.getFqn(), f));
  +      return getRoot().addChild(fqn);
      }
   
      public Node getChild(Fqn f)
      {
  -      NodeImpl child = null;
  -      if (treeCache.getInvocationContext().getOptionOverrides().isBypassInterceptorChain())
  -      {
  -         if (currentNode.getChildren() != null) child = (NodeImpl) currentNode.getChildren().get(f.getLast());
  -         treeCache.getInvocationContext().getOptionOverrides().setBypassInterceptorChain(false);
  -      }
  -      else
  -      {
  -         child = treeCache.get(new Fqn(currentNode.getFqn(), f));
  -      }
  -      return child == null ? null : new TreeCacheProxyImpl(treeCache, child);
  +      return treeCache.get(f);
      }
   
  -   public Object put(Object k, Object v)
  -   {
  -      Object result = null;
  -      if (treeCache.getInvocationContext().getOptionOverrides().isBypassInterceptorChain())
  -      {
  -         result = currentNode.put(k, v);
  -         treeCache.getInvocationContext().getOptionOverrides().setBypassInterceptorChain(false);
  -      }
  -      else
  +   public Node peek(Fqn fqn)
         {
  -         result = treeCache.put(currentNode.getFqn(), k, v);
  -      }
  -      return result;
  -   }
  -
  -   public void putIfNull(Object k, Object v)
  -   {
  -      if (treeCache.get(currentNode.getFqn(), k) == null) put(k, v);
  -   }
  -
  -   public void put(Map m)
  -   {
  -      treeCache.put(currentNode.getFqn(), m);
  +      return treeCache.peek(fqn);
      }
   
  -   public void putIfNull(Map m)
  +   public RegionManager getRegionManager()
      {
  -      if (getData() == null || getData().isEmpty()) put(m);
  +      return treeCache.getRegionManager();
      }
   
  -   public Object get(Object k)
  +   public GravitateResult gravitateData(Fqn fqn, boolean searchSubtrees, boolean marshal)
      {
  -      return treeCache.get(currentNode.getFqn(), k);
  +      return treeCache.gravitateData(fqn, searchSubtrees, marshal);
      }
   
  -   public void removeNode(Fqn f)
  +   public Notifier getNotifier()
      {
  -      treeCache.remove(f);
  +      return treeCache.getNotifier();
      }
   
  -   public Object remove(Object k)
  -   {
  -      if (k instanceof Fqn)
  -      {
  -         log.warn("Did you mean to call removeNode() instead?  remove() just removes data in the node under the key passed in.");
  -      }
  -      if (treeCache.getInvocationContext().getOptionOverrides().isBypassInterceptorChain())
  -      {
  -         Object value = currentNode.remove(k);
  -         treeCache.getInvocationContext().getOptionOverrides().setBypassInterceptorChain(false);
  -         return value;
  -      }
  -      else
  +   public InvocationContext getInvocationContext()
         {
  -         return treeCache.remove(currentNode.getFqn(), k);
  -      }
  +      return treeCache.getInvocationContext();
      }
   
  -   public void clearData()
  -   {
  -      if (treeCache.getInvocationContext().getOptionOverrides().isBypassInterceptorChain())
  -      {
  -         currentNode.clear();
  -         treeCache.getInvocationContext().getOptionOverrides().setBypassInterceptorChain(false);
  -      }
  -      else
  +   public void setInvocationContext(InvocationContext ctx)
         {
  -         treeCache.removeData(currentNode.getFqn());
  -      }
  +      treeCache.setInvocationContext(ctx);
      }
   
  -   public void move(Node newParent) throws NodeNotExistsException
  +   public GlobalTransaction getCurrentTransaction(Transaction tx, boolean create)
      {
  -      if (currentNode == null || currentNode.getFqn() == null)
  -         throw new NodeNotExistsException("Current node does not exist; perhaps it has been moved?");
  -      Object name = currentNode.getFqn().getLast();
  -      treeCache.move(newParent.getFqn(), currentNode.getFqn());
  -
  -      // does not ALWAYS work.  E.g., opt locking actually creates new nodes soold refs are stale.
  -      // old refs to childnodes will still be stale.  :S
  -      // also, if running within a tx, the peek() will return null until after commit - even worse!
  -      currentNode = (NodeImpl) peek(new Fqn(newParent.getFqn(), name));
  +      return treeCache.getCurrentTransaction(tx, create);
      }
   
  -   public boolean hasChild(Fqn f)
  +   public String printDetails()
      {
  -      return treeCache.exists(new Fqn(currentNode.getFqn(), f));
  +      return treeCache.printDetails();
      }
   
  -   public boolean isLocked()
  +   public String toString()
      {
  -      return currentNode.isLocked();
  +      return "TreeCacheProxyImpl treeCache=" + treeCache;
      }
   
  -   public boolean isReadLocked()
  -   {
  -      return currentNode.isReadLocked();
  -   }
  +   // Node implementation
  +   // Node implementation
  +   // Node implementation   
   
  -   public boolean isWriteLocked()
  +   public void removeNode(Fqn fqn)
      {
  -      return currentNode.isWriteLocked();
  +      treeCache.remove(fqn);
      }
   
  -   public boolean isStarted()
  +   public void clearData()
      {
  -      return treeCache.started;
  +      getRoot().clearData();
      }
   
  -   // -----------
  -
  -   // TODO: Think about what we need to do here regarding locking on Nodes
  -
  -   public boolean acquire(Object owner, long lock_timeout, DataNode.LockType lockType) throws InterruptedException
  +   public Object get(Object k)
      {
  -      return currentNode.acquire(owner, lock_timeout, lockType);
  +      return getRoot().get(k);
      }
   
  -   public IdentityLock getLock()
  +   public Set<Node> getChildren()
      {
  -      return currentNode.getLock();
  +      return getRoot().getChildren();
      }
   
  -   public Set acquireAll(Object owner, long lock_timeout, DataNode.LockType lockType) throws InterruptedException
  +   public Set<Object> getChildrenNames()
      {
  -      return currentNode.acquireAll(owner, lock_timeout, lockType);
  +      return getRoot().getChildrenNames();
      }
   
  -   public DataNode peek(Fqn fqn)
  +   public Map getData()
      {
  -      return treeCache.peek(fqn);
  +      return getRoot().getData();
      }
   
  -   public Node peekNode(Fqn fqn)
  +   public Fqn getFqn()
      {
  -
  -      NodeImpl node = (NodeImpl) treeCache.peek(fqn);
  -      return node == null ? null : new TreeCacheProxyImpl(treeCache, node);
  +      return getRoot().getFqn();
      }
   
  -   public RegionManager getRegionManager()
  +   public Set getKeys()
      {
  -      return treeCache.getRegionManager();
  +      return getRoot().getKeys();
      }
   
  -   public List gravitateData(Fqn fqn, boolean searchSubtrees, boolean marshal)
  +   public Node getParent()
      {
  -      return treeCache._gravitateData(fqn, searchSubtrees, marshal);
  +      return getRoot().getParent();
      }
   
  -   public Notifier getNotifier()
  +    public boolean hasChild(Fqn f)
      {
  -      return treeCache.getNotifier();
  +       return treeCache.exists(f);
      }
   
  -   public InvocationContext getInvocationContext()
  +   public void move(Node newParent) throws NodeNotExistsException
      {
  -      return treeCache.getInvocationContext();
  +      getRoot().move(newParent);
      }
   
  -   public void setInvocationContext(InvocationContext ctx)
  +   public Object put(Object k, Object v)
      {
  -      treeCache.setInvocationContext(ctx);
  +      return getRoot().put(k, v);
      }
   
  -   public GlobalTransaction getCurrentTransaction(Transaction tx)
  +   public void put(Map m)
      {
  -      return treeCache.getCurrentTransaction(tx);
  +      getRoot().put(m);
      }
   
  -   public String printDetails()
  +   public void putIfNull(Object k, Object v)
      {
  -      return treeCache.printDetails();
  +      getRoot().putIfNull(k, v);
      }
   
  -   public boolean getChildrenLoaded()
  +   public void putIfNull(Map m)
      {
  -      return currentNode.getChildrenLoaded();
  +      getRoot().putIfNull(m);
      }
   
  -   public void setChildrenLoaded(boolean b)
  +   public Object remove(Object k)
      {
  -      currentNode.setChildrenLoaded(b);
  +      return getRoot().remove(k);
      }
   
  -   public boolean equals(Object o)
  +   public void removeChild(Fqn f)
      {
  -      if (this == o) return true;
  -      if (o == null || getClass() != o.getClass()) return false;
  -
  -      final TreeCacheProxyImpl that = (TreeCacheProxyImpl) o;
  -
  -      if (currentNode != null ? !currentNode.fqn.equals(that.currentNode.fqn) : that.currentNode != null) return false;
  -      if (treeCache != null ? !treeCache.equals(that.treeCache) : that.treeCache != null) return false;
  -
  -      return true;
  +      treeCache.remove(f);
      }
   
  -   public int hashCode()
  +   public NodeSPI getNodeSPI()
      {
  -      int result;
  -      result = (treeCache != null ? treeCache.hashCode() : 0);
  -      result = 29 * result + (currentNode != null ? currentNode.hashCode() : 0);
  -      return result;
  +      return getRoot().getNodeSPI();
      }
   
  -
  -   public String toString()
  -   {
  -      return "TreeCacheProxyImpl{" +
  -              "Fqn=" + currentNode.getFqn() +
  -              '}';
  -   }
   }
  
  
  
  1.28      +6 -196    JBossCache/src/org/jboss/cache/TreeNode.java
  
  (In the diff below, changes in quantity of whitespace are not shown.)
  
  Index: TreeNode.java
  ===================================================================
  RCS file: /cvsroot/jboss/JBossCache/src/org/jboss/cache/TreeNode.java,v
  retrieving revision 1.27
  retrieving revision 1.28
  diff -u -b -r1.27 -r1.28
  --- TreeNode.java	14 Aug 2006 17:52:35 -0000	1.27
  +++ TreeNode.java	20 Nov 2006 03:53:54 -0000	1.28
  @@ -26,33 +26,21 @@
    * the data after read.
    * <p>
    * Implementations may not necessarily be <em>not</em> synchronized, so access
  - * to instances of TreeNode need to be run under an isolation level above NONE.
  + * to instances of  need to be run under an isolation level above NONE.
    *
    * @author Bela Ban March 25 2003
    * @author Ben Wang
    * @author <a href="mailto:manik at jboss.org">Manik Surtani (manik at jboss.org)</a>
  - * @version $Revision: 1.27 $
  + * @version $Revision: 1.28 $
    */
   
  -public interface TreeNode
  +public interface TreeNode extends Node
   {
   
  -    // TODO: MANIK: are all these params necessary? - to be investigated
  -    /**
  -     * Creates a child node with a name, FQN, and parent.
  -     * Returns the created node.
  -     */
  -    TreeNode createChild(Object child_name, Fqn fqn, TreeNode parent);
  -
  -    /**
  -     * Returns the fully qualified name of the node.
  -     */
  -    Fqn getFqn();
  -
       /**
        * Returns the named child of this node.
        */
  -    TreeNode getChild(Object childName);
  +    Node getChild(Object childName);
   
       /**
        * Removes the named child of this node.
  @@ -60,11 +48,6 @@
       void removeChild(Object childName);
   
       /**
  -     * Returns the parent of this node.
  -     */
  -    TreeNode getParent();
  -
  -    /**
        * Puts the contents of a map into this node.
        * Optionally erases the existing contents.
        * @param eraseData true to erase the existing contents
  @@ -72,32 +55,6 @@
       void put(Map data, boolean eraseData);
   
       /**
  -     * Puts the key and value into the node.
  -     * Returns the old value of the key, if it existed.
  -     */
  -    Object put(Object key, Object value);
  -
  -    /**
  -     * Removes the old value of the key.
  -     */
  -    Object remove(Object key);
  -
  -    /**
  -     * Returns the value of a key or null if it does not exist.
  -     */
  -    Object get(Object key);
  -
  -    /**
  -     * Clears the data of this node.
  -     */
  -    void clear();
  -
  -    /**
  -     * Adds (merges) the contents of the map with the existing data.
  -     */
  -    void put(Map data);
  -
  -    /**
        * Returns the name of this node.
        */
       Object getName();
  @@ -113,171 +70,24 @@
       void printDetails(StringBuffer sb, int indent);
   
       /**
  -     * Prints the node with indent.
  -     */ 
  -    void printIndent(StringBuffer sb, int indent);
  -
  -    /**
  -     * Returns true if the key is in the data set.
  -     */
  -    boolean containsKey(Object key);
  -
  -    /**
  -     * Returns an unmodifiable map, mapping keys to child nodes.
  -     * Implementations need to make sure the map cannot be changed.
  -     *
  -     * @return Map<Object,TreeNode>
  -     */
  -    Map getChildren();
  -
  -    /**
  -     * Returns the data keys, or an empty set if there are no keys.
  -     */
  -    Set getDataKeys();
  -
  -    /**
  -     * Returns true if the child exists.
  -     */
  -    boolean childExists(Object child_name);
  -
  -    /**
  -     * Returns the number of attributes.
  -     */
  -    int numAttributes();
  -
  -    /**
  -     * Returns true if this node has children.
  -     */
  -    boolean hasChildren();
  -
  -    /**
  -     * Creates a child node.
  -     */
  -    TreeNode createChild(Object child_name, Fqn fqn, TreeNode parent, Object key, Object value);
  -
  -    /**
        * Removes all children.
        */
  -    void removeAllChildren();
  +    void removeChildren();
   
       /**
        * Adds the already created child node.
        * Replaces the existing child node if present.
        */
  -    void addChild(Object child_name, TreeNode n);
  +    void addChild(Object child_name, Node n);
   
       // ---- deprecated methods - should use similar meths in DataNode or AbstractNode instead ---
       // ---- these deprecated methods will be removed in JBossCache 1.3. ---
   
       /**
  -     * Returns a copy of the attributes.  Use get(Object key) instead.
  -     * @see DataNode
  -     * @deprecated Will be removed in JBossCache 1.3.
  -     */
  -    Map getData();
  -
  -    /**
  -     * Not to be exposed.  Internal calls should use impl classes.
  -     * @see DataNode
  -     * @deprecated Will be removed in JBossCache 1.3.
  -     */
  -    IdentityLock getImmutableLock();
  -
  -    /**
  -     * Not to be exposed.  Internal calls should use impl classes.
  -     * @see DataNode
  -     * @deprecated Will be removed in JBossCache 1.3.
  -     */
  -    IdentityLock getLock();
  -
  -    /**
  -     * Creates a new child of this node if it doesn't exist. Also notifies the cache
  -     * that the new child has been created.
  -     * dded this new getOrCreateChild() method to avoid thread contention
  -     * on create_lock in PessimisticLockInterceptor.lock()
  -     *
  -     * Not to be exposed.  Internal calls should use impl classes.
  -     * @see DataNode
  -     */
  -    TreeNode getOrCreateChild(Object child_name, GlobalTransaction gtx, boolean createIfNotExists);
  -
  -    /**
  -     * Not to be exposed.  Internal calls should use impl classes.
  -     * @see DataNode
  -     * @deprecated Will be removed in JBossCache 1.3.
  -     */
  -    void printLockInfo(StringBuffer sb, int indent);
  -
  -    /**
  -     * Not to be exposed.  Internal calls should use impl classes.
  -     * @see DataNode
  -     * @deprecated Will be removed in JBossCache 1.3.
  -     */
  -    boolean isLocked();
  -
  -    /**
  -     * Not to be exposed.  Internal calls should use impl classes.
  -     * @see DataNode
  -     * @deprecated Will be removed in JBossCache 1.3.
  -     */
  -    void releaseAll(Object owner);
  -
  -    /**
        * Not to be exposed.  Internal calls should use impl classes.
        * @see DataNode
        * @deprecated Will be removed in JBossCache 1.3.
        */
       void releaseAllForce();
   
  -    /**
  -     * Not to be exposed.  Internal calls should use impl classes.
  -     * @see DataNode
  -     * @deprecated Will be removed in JBossCache 1.3.
  -     */
  -    Set acquireAll(Object caller, long timeout, DataNode.LockType lock_type)
  -            throws LockingException, TimeoutException, InterruptedException;
  -
  -    /**
  -     * Not to be exposed.  Internal calls should use impl classes.
  -     * @see DataNode
  -     * @deprecated Will be removed in JBossCache 1.3.
  -     */
  -    void setRecursiveTreeCacheInstance(TreeCache cache);
  -
  -    /**
  -     * Not to be exposed.  Internal calls should use impl classes.
  -     * @see DataNode
  -     * @deprecated Will be removed in JBossCache 1.3.
  -     */
  -    boolean getChildrenLoaded();
  -
  -    /**
  -     * Not to be exposed.  Internal calls should use impl classes.
  -     * @see DataNode
  -     * @deprecated Will be removed in JBossCache 1.3.
  -     */
  -    void setChildrenLoaded(boolean b);
  -
  -    /**
  -     * Not to be exposed.  Internal calls should use impl classes.
  -     * Sets Map<Object,TreeNode>
  -     * @see DataNode
  -     * @deprecated Will be removed in JBossCache 1.3.
  -     */
  -    void setChildren(Map children);
  -
  -    /**
  -     * Not to be exposed.  Internal calls should use impl classes.
  -     * @see DataNode
  -     * @deprecated Will be removed in JBossCache 1.3.
  -     */
  -    void release(Object caller);
  -
  -    /**
  -     * Not to be exposed.  Internal calls should use impl classes.
  -     * @see DataNode
  -     * @deprecated Will be removed in JBossCache 1.3.
  -     */
  -    void releaseForce();
  -
   }
  
  
  



More information about the jboss-cvs-commits mailing list