[jboss-cvs] JBossCache/src/org/jboss/cache/loader ...

Mircea Markus mircea.markus at gmail.com
Sat Feb 10 12:47:02 EST 2007


  User: mmarkus 
  Date: 07/02/10 12:47:02

  Modified:    src/org/jboss/cache/loader          ConnectionFactory.java
                        ManagedConnectionFactory.java
                        JDBCCacheLoaderConfig.java JDBCCacheLoader.java
                        NonManagedConnectionFactory.java
                        C3p0ConnectionFactory.java
  Added:       src/org/jboss/cache/loader          JDBCCacheLoaderOld.java
                        AdjListJDBCClassLoaderConfig.java
                        AdjListJDBCClassLoader.java
  Log:
  JDBCCaheLoader performance improvements. Also added benchmarks for comparisons and backward compatibility  tests
  
  Revision  Changes    Path
  1.2       +1 -1      JBossCache/src/org/jboss/cache/loader/ConnectionFactory.java
  
  (In the diff below, changes in quantity of whitespace are not shown.)
  
  Index: ConnectionFactory.java
  ===================================================================
  RCS file: /cvsroot/jboss/JBossCache/src/org/jboss/cache/loader/ConnectionFactory.java,v
  retrieving revision 1.1
  retrieving revision 1.2
  diff -u -b -r1.1 -r1.2
  --- ConnectionFactory.java	21 Jan 2007 15:46:42 -0000	1.1
  +++ ConnectionFactory.java	10 Feb 2007 17:47:02 -0000	1.2
  @@ -17,7 +17,7 @@
    */
   public interface ConnectionFactory
   {
  -    void setConfig(JDBCCacheLoaderConfig config);
  +    void setConfig(AdjListJDBCClassLoaderConfig config);
   
       void start() throws Exception;
   
  
  
  
  1.2       +1 -1      JBossCache/src/org/jboss/cache/loader/ManagedConnectionFactory.java
  
  (In the diff below, changes in quantity of whitespace are not shown.)
  
  Index: ManagedConnectionFactory.java
  ===================================================================
  RCS file: /cvsroot/jboss/JBossCache/src/org/jboss/cache/loader/ManagedConnectionFactory.java,v
  retrieving revision 1.1
  retrieving revision 1.2
  diff -u -b -r1.1 -r1.2
  --- ManagedConnectionFactory.java	21 Jan 2007 15:46:42 -0000	1.1
  +++ ManagedConnectionFactory.java	10 Feb 2007 17:47:02 -0000	1.2
  @@ -28,7 +28,7 @@
      private DataSource dataSource;
      private String datasourceName;
   
  -   public void setConfig(JDBCCacheLoaderConfig config)
  +   public void setConfig(AdjListJDBCClassLoaderConfig config)
      {
         datasourceName = config.getDatasourceName();
      }
  
  
  
  1.4       +61 -384   JBossCache/src/org/jboss/cache/loader/JDBCCacheLoaderConfig.java
  
  (In the diff below, changes in quantity of whitespace are not shown.)
  
  Index: JDBCCacheLoaderConfig.java
  ===================================================================
  RCS file: /cvsroot/jboss/JBossCache/src/org/jboss/cache/loader/JDBCCacheLoaderConfig.java,v
  retrieving revision 1.3
  retrieving revision 1.4
  diff -u -b -r1.3 -r1.4
  --- JDBCCacheLoaderConfig.java	21 Jan 2007 15:46:42 -0000	1.3
  +++ JDBCCacheLoaderConfig.java	10 Feb 2007 17:47:02 -0000	1.4
  @@ -1,396 +1,73 @@
   package org.jboss.cache.loader;
   
  +import org.apache.commons.logging.Log;
  +import org.apache.commons.logging.LogFactory;
   import org.jboss.cache.config.CacheLoaderConfig.IndividualCacheLoaderConfig;
   
   import java.util.Properties;
   
   /**
  - * JDBCCacheLoaderConfig
  + * Builds the different SQLs needed by <tt>JDBCCacheLoader</tt>.
    *
  - * @author <a href="mailto:manik at jboss.org">Manik Surtani (manik at jboss.org)</a> 
  - * @author <a href="mailto:galder.zamarreno at jboss.com">Galder Zamarreno</a>
  + * @author Mircea.Markus at iquestint.com
  + * @version 1.0
    */
  -public class JDBCCacheLoaderConfig extends IndividualCacheLoaderConfig
  +public class JDBCCacheLoaderConfig extends AdjListJDBCClassLoaderConfig
   {
  -   /**
  -    * The serialVersionUID
  -    */
  -   private static final long serialVersionUID = -8371846151643130281L;
  -
  -   private boolean createTable;
  -   private String createTableDDL;
  -   private String datasourceName;
  -   private String deleteAllSql;
  -   private String deleteNodeSql;
  -   private boolean dropTable;
  -   private String dropTableDDL;
  -   private String driverClass;
  -   private String insertNodeSql;
  -   private String jdbcURL;
  -   private String jdbcUser;
  -   private String jdbcPassword;
  -   private String selectChildFqnsSql;
  -   private String selectChildNamesSql;
  -   private String selectNodeSql;
  -   private String table;
  -   private String updateNodeSql;
  -   private String updateTableSql;
  -   private String connectionFactoryClass;
  -
  -   public JDBCCacheLoaderConfig()
  -   {
  -      setClassName(JDBCCacheLoader.class.getName());
  -   }
  -
  -   /**
  -    * For use by {@link JDBCCacheLoader}.
  -    *
  -    * @param base generic config object created by XML parsing.
  -    */
  -   JDBCCacheLoaderConfig(IndividualCacheLoaderConfig base)
  -   {
  -      setClassName(JDBCCacheLoader.class.getName());
  -      populateFromBaseConfig(base);
  -   }
  -
  -   public boolean getCreateTable()
  -   {
  -      return createTable;
  -   }
  -
  -   public void setCreateTable(boolean createTable)
  -   {
  -      testImmutability("createTable");
  -      this.createTable = createTable;
  -   }
  -
  -   public String getCreateTableDDL()
  -   {
  -      return createTableDDL;
  -   }
  -
  -   public void setCreateTableDDL(String createTableDDL)
  -   {
  -      testImmutability("createTableDDL");
  -      this.createTableDDL = createTableDDL;
  -   }
  -
  -   public String getDatasourceName()
  -   {
  -      return datasourceName;
  -   }
  -
  -   public void setDatasourceName(String datasourceName)
  -   {
  -      testImmutability("datasourceName");
  -      this.datasourceName = datasourceName;
  -   }
  -
  -   public String getDeleteAllSql()
  -   {
  -      return deleteAllSql;
  -   }
  -
  -   public void setDeleteAllSql(String deleteAllSql)
  -   {
  -      testImmutability("deleteAllSql");
  -      this.deleteAllSql = deleteAllSql;
  -   }
  -
  -   public String getDeleteNodeSql()
  -   {
  -      return deleteNodeSql;
  -   }
  -
  -   public void setDeleteNodeSql(String deleteNodeSql)
  -   {
  -      testImmutability("deleteNodeSql");
  -      this.deleteNodeSql = deleteNodeSql;
  -   }
  -
  -   public String getDriverClass()
  -   {
  -      return driverClass;
  -   }
  -
  -   public void setDriverClass(String driverClass)
  -   {
  -      testImmutability("driverClass");
  -      this.driverClass = driverClass;
  -   }
  -
  -   public boolean getDropTable()
  -   {
  -      return dropTable;
  -   }
  -
  -   public void setDropTable(boolean dropTable)
  -   {
  -      testImmutability("dropTable");
  -      this.dropTable = dropTable;
  -   }
  -
  -   public String getInsertNodeSql()
  -   {
  -      return insertNodeSql;
  -   }
  -
  -   public void setInsertNodeSql(String insertNodeSql)
  -   {
  -      testImmutability("insertNodeSql");
  -      this.insertNodeSql = insertNodeSql;
  -   }
  -
  -   public String getSelectChildFqnsSql()
  -   {
  -      return selectChildFqnsSql;
  -   }
  -
  -   public void setSelectChildFqnsSql(String selectChildFqnsSql)
  -   {
  -      testImmutability("selectChildFqnsSql");
  -      this.selectChildFqnsSql = selectChildFqnsSql;
  -   }
  -
  -   public String getSelectNodeSql()
  -   {
  -      return selectNodeSql;
  -   }
  -
  -   public void setSelectNodeSql(String selectNodeSql)
  -   {
  -      testImmutability("selectNodeSql");
  -      this.selectNodeSql = selectNodeSql;
  -   }
  -
  -   public String getTable()
  -   {
  -      return table;
  -   }
  -
  -   public void setTable(String table)
  -   {
  -      testImmutability("table");
  -      this.table = table;
  -   }
  -
  -   public String getUpdateTableSql()
  -   {
  -      return updateTableSql;
  -   }
  -
  -   public void setUpdateTableSql(String updateTableSql)
  -   {
  -      testImmutability("updateTableSql");
  -      this.updateTableSql = updateTableSql;
  -   }
  -
  -   public String getDropTableDDL()
  -   {
  -      return dropTableDDL;
  -   }
  -
  -   public void setDropTableDDL(String dropTableDDL)
  -   {
  -      testImmutability("dropTableDDL");
  -      this.dropTableDDL = dropTableDDL;
  -   }
  -
  -   public String getSelectChildNamesSql()
  -   {
  -      return selectChildNamesSql;
  -   }
  -
  -   public void setSelectChildNamesSql(String selectChildNamesSql)
  -   {
  -      testImmutability("selectChildNamesSql");
  -      this.selectChildNamesSql = selectChildNamesSql;
  -   }
  -
  -   public String getUpdateNodeSql()
  -   {
  -      return updateNodeSql;
  -   }
  -
  -   public void setUpdateNodeSql(String updateNodeSql)
  -   {
  -      testImmutability("updateNodeSql");
  -      this.updateNodeSql = updateNodeSql;
  -   }
   
  -   public String getJdbcPassword()
  -   {
  -      return jdbcPassword;
  -   }
  +    private static final long serialVersionUID = -8371846151643130271L;
   
  -   public void setJdbcPassword(String jdbcPassword)
  -   {
  -      testImmutability("jdbcPassword");
  -      this.jdbcPassword = jdbcPassword;
  -   }
  +    private static final Log log = LogFactory.getLog(JDBCCacheLoaderConfig.class);
   
  -   public String getJdbcURL()
  -   {
  -      return jdbcURL;
  -   }
  +    private String deleteNode;
  +    private String recursiveChildren;
  +    private String nodeCountSql;
   
  -   public void setJdbcURL(String jdbcURL)
  -   {
  -      testImmutability("jdbcURL");
  -      this.jdbcURL = jdbcURL;
  -   }
   
  -   public String getJdbcUser()
  +    public JDBCCacheLoaderConfig(IndividualCacheLoaderConfig base)
      {
  -      return jdbcUser;
  +        super(base);
      }
   
  -   public void setJdbcUser(String jdbcUser)
  -   {
  -      testImmutability("jdbcUser");
  -      this.jdbcUser = jdbcUser;
  -   }
  -
  -   public String getConnectionFactoryClass()
  -   {
  -      return connectionFactoryClass;
  -   }
  -
  -   public void setConnectionFactoryClass(String connectionFactoryClass)
  -   {
  -      testImmutability("connectionFactoryClass");
  -      this.connectionFactoryClass = connectionFactoryClass;
  -   }
   
      public void setProperties(Properties props)
      {
         super.setProperties(props);
  -      datasourceName = props.getProperty("cache.jdbc.datasource");
  -      if (datasourceName == null)
  -      {
  -         this.driverClass = JDBCCacheLoader.getRequiredProperty(props, "cache.jdbc.driver");
  -         this.jdbcURL = JDBCCacheLoader.getRequiredProperty(props, "cache.jdbc.url");
  -         this.jdbcUser = JDBCCacheLoader.getRequiredProperty(props, "cache.jdbc.user");
  -         this.jdbcPassword = JDBCCacheLoader.getRequiredProperty(props, "cache.jdbc.password");
  -
  -         if (log.isDebugEnabled())
  -         {
  -            log.debug("Properties: " +
  -                    "cache.jdbc.url=" +
  -                    jdbcURL +
  -                    ", cache.jdbc.driver=" +
  -                    driverClass +
  -                    ", cache.jdbc.user=" +
  -                    jdbcUser +
  -                    ", cache.jdbc.password=" +
  -                    jdbcPassword +
  -                    ", cache.jdbc.table=" + table);
  -         }
  +        String sqlConcat = props.getProperty("cache.jdbc.sql-concat");
  +        if (sqlConcat == null) {
  +            log.info("Missiing JDBCCacheLoader config 'cache.jdbc.sql-concat', using default value:'concat(1,2)'");
  +            sqlConcat = "concat(1,2)";
  +        }
  +        String startingWith = sqlConcat.replace('1', '?').replace("2", "'%'"); //concat(?, '%')
  +        String appendSepparator = sqlConcat.replace("1", fqnColumn).replace("2", "'/'"); //concat(fqnColumn, '/')
  +        deleteNode = "delete from " + table + " where " + appendSepparator + " like " + startingWith;
  +        recursiveChildren = "select " + fqnColumn + "," + nodeColumn + " from " + table + " where " + appendSepparator + " like " + startingWith;
  +        nodeCountSql = "select count(*) from " + table;
         }
   
  -      String prop = props.getProperty("cache.jdbc.table.create");
  -      this.createTable = (prop == null || Boolean.valueOf(prop));
  -      prop = props.getProperty("cache.jdbc.table.drop");
  -      this.dropTable = (prop == null || Boolean.valueOf(prop));
  -
  -      this.table = props.getProperty("cache.jdbc.table.name", "jbosscache");
  -      String primaryKey = props.getProperty("cache.jdbc.table.primarykey", "jbosscache_pk");
  -      String fqnColumn = props.getProperty("cache.jdbc.fqn.column", "fqn");
  -      String fqnType = props.getProperty("cache.jdbc.fqn.type", "varchar(255)");
  -      String nodeColumn = props.getProperty("cache.jdbc.node.column", "node");
  -      String nodeType = props.getProperty("cache.jdbc.node.type", "blob");
  -      String parentColumn = props.getProperty("cache.jdbc.parent.column", "parent");
  -
  -      selectChildNamesSql = "select " + fqnColumn + " from " + table + " where " + parentColumn + "=?";
  -      deleteNodeSql = "delete from " + table + " where " + fqnColumn + "=?";
  -      deleteAllSql = "delete from " + table;
  -      selectChildFqnsSql = "select " + fqnColumn + " from " + table + " where " + parentColumn + "=?";
  -      insertNodeSql = "insert into " +
  -              table +
  -              " (" +
  -              fqnColumn +
  -              ", " +
  -              nodeColumn +
  -              ", " +
  -              parentColumn +
  -              ") values (?, ?, ?)";
  -      updateNodeSql = "update " + table + " set " + nodeColumn + "=? where " + fqnColumn + "=?";
  -      selectNodeSql = "select " + nodeColumn + " from " + table + " where " + fqnColumn + "=?";
  -
  -      createTableDDL = "create table " +
  -              table +
  -              "(" +
  -              fqnColumn +
  -              " " +
  -              fqnType +
  -              " not null, " +
  -              nodeColumn +
  -              " " +
  -              nodeType +
  -              ", " +
  -              parentColumn +
  -              " " +
  -              fqnType +
  -              ", constraint " + primaryKey + " primary key (" + fqnColumn + "))";
   
  -      dropTableDDL = "drop table " + table;
  -      connectionFactoryClass = props.getProperty("cache.jdbc.connection.factory", "org.jboss.cache.loader.NonManagedConnectionFactory");
  +    /**
  +     * Returns the sql string for removing a node and all its children.
  +     */
  +    public String getDeleteNodeSql()
  +    {
  +        return deleteNode;
      }
   
  -   public boolean equals(Object obj)
  -   {
  -      if (obj instanceof JDBCCacheLoaderConfig && equalsExcludingProperties(obj))
  +    /**
  +     * Returns an sql that will return a node and all its children.
  +     */
  +    public String getRecursiveChildrenSql()
         {
  -         JDBCCacheLoaderConfig other = (JDBCCacheLoaderConfig) obj;
  -
  -         return (this.createTable == other.createTable)
  -                 && safeEquals(createTableDDL, other.createTableDDL)
  -                 && safeEquals(datasourceName, other.datasourceName)
  -                 && safeEquals(deleteAllSql, other.deleteAllSql)
  -                 && safeEquals(deleteNodeSql, other.deleteNodeSql)
  -                 && safeEquals(driverClass, other.driverClass)
  -                 && (dropTable == other.dropTable)
  -                 && safeEquals(dropTableDDL, other.dropTableDDL)
  -                 && safeEquals(insertNodeSql, other.insertNodeSql)
  -                 && safeEquals(jdbcPassword, other.jdbcPassword)
  -                 && safeEquals(jdbcURL, other.jdbcURL)
  -                 && safeEquals(jdbcUser, other.jdbcUser)
  -                 && safeEquals(selectChildFqnsSql, other.selectChildFqnsSql)
  -                 && safeEquals(selectChildNamesSql, other.selectChildNamesSql)
  -                 && safeEquals(selectNodeSql, other.selectNodeSql)
  -                 && safeEquals(table, other.table)
  -                 && safeEquals(updateNodeSql, other.updateNodeSql)
  -                 && safeEquals(updateTableSql, other.updateTableSql)
  -                 && safeEquals(connectionFactoryClass, other.connectionFactoryClass);
  +        return recursiveChildren;
         }
   
  -      return false;
  -   }
   
  -   public int hashCode()
  +    /**
  +     * Returns an sql that will count all the persisted node.
  +     */
  +    public String getNodeCountSql()
      {
  -      int result = hashCodeExcludingProperties();
  -      result = 31 * result + (createTable ? 0 : 1);
  -      result = 31 * result + (createTableDDL == null ? 0 : createTableDDL.hashCode());
  -      result = 31 * result + (datasourceName == null ? 0 : datasourceName.hashCode());
  -      result = 31 * result + (deleteAllSql == null ? 0 : deleteAllSql.hashCode());
  -      result = 31 * result + (deleteNodeSql == null ? 0 : deleteNodeSql.hashCode());
  -      result = 31 * result + (driverClass == null ? 0 : driverClass.hashCode());
  -      result = 31 * result + (dropTable ? 0 : 1);
  -      result = 31 * result + (dropTableDDL == null ? 0 : dropTableDDL.hashCode());
  -      result = 31 * result + (insertNodeSql == null ? 0 : insertNodeSql.hashCode());
  -      result = 31 * result + (jdbcPassword == null ? 0 : jdbcPassword.hashCode());
  -      result = 31 * result + (jdbcUser == null ? 0 : jdbcUser.hashCode());
  -      result = 31 * result + (jdbcURL == null ? 0 : jdbcURL.hashCode());
  -      result = 31 * result + (selectChildFqnsSql == null ? 0 : selectChildFqnsSql.hashCode());
  -      result = 31 * result + (selectChildNamesSql == null ? 0 : selectChildNamesSql.hashCode());
  -      result = 31 * result + (selectNodeSql == null ? 0 : selectNodeSql.hashCode());
  -      result = 31 * result + (table == null ? 0 : table.hashCode());
  -      result = 31 * result + (updateNodeSql == null ? 0 : updateNodeSql.hashCode());
  -      result = 31 * result + (updateTableSql == null ? 0 : updateTableSql.hashCode());
  -      result = 31 * result + (connectionFactoryClass == null ? 0 : connectionFactoryClass.hashCode());
  -
  -      return result;
  +        return nodeCountSql;
      }
  -
   }
  \ No newline at end of file
  
  
  
  1.32      +238 -1010 JBossCache/src/org/jboss/cache/loader/JDBCCacheLoader.java
  
  (In the diff below, changes in quantity of whitespace are not shown.)
  
  Index: JDBCCacheLoader.java
  ===================================================================
  RCS file: /cvsroot/jboss/JBossCache/src/org/jboss/cache/loader/JDBCCacheLoader.java,v
  retrieving revision 1.31
  retrieving revision 1.32
  diff -u -b -r1.31 -r1.32
  --- JDBCCacheLoader.java	31 Jan 2007 22:51:17 -0000	1.31
  +++ JDBCCacheLoader.java	10 Feb 2007 17:47:02 -0000	1.32
  @@ -1,1035 +1,263 @@
  -/*
  - * JBoss, the OpenSource J2EE webOS
  - *
  - * Distributable under LGPL license.
  - * See terms of license at gnu.org.
  - */
   package org.jboss.cache.loader;
   
   import org.apache.commons.logging.Log;
   import org.apache.commons.logging.LogFactory;
   import org.jboss.cache.Fqn;
  -import org.jboss.cache.Modification;
  -import org.jboss.cache.util.Util;
  +import org.jboss.cache.config.CacheLoaderConfig;
   import org.jboss.cache.config.CacheLoaderConfig.IndividualCacheLoaderConfig;
  +import org.jboss.cache.marshall.NodeData;
   
  -import java.io.ByteArrayInputStream;
  -import java.io.IOException;
   import java.io.InputStream;
  +import java.io.ObjectInputStream;
   import java.sql.Connection;
  -import java.sql.DatabaseMetaData;
   import java.sql.PreparedStatement;
   import java.sql.ResultSet;
   import java.sql.SQLException;
  -import java.sql.Statement;
  -import java.sql.Types;
  -import java.util.ArrayList;
  -import java.util.Collection;
  -import java.util.Collections;
   import java.util.HashMap;
  -import java.util.HashSet;
   import java.util.List;
  -import java.util.Locale;
   import java.util.Map;
  -import java.util.Properties;
  -import java.util.Set;
   
   /**
  - * JDBC CacheLoader implementation.
  - * <p/>
  - * This implementation uses one table. The table consists of three columns:
  - * <ul>
  - * <li>text column for fqn (which is also a primary key)</li>
  - * <li>blob column for attributes (can contain null)</li>
  - * <li>text column for parent fqn (can contain null)</li>
  - * </ul>
  - * <p/>
  - * The configuration options are:
  + * JDBC implementation of <tt>AdjListJDBCClassLoader</tt>.
  + * Represents a faster alternative than JDBCCacheLoaderOld and relies on the same database structrure.
  + * It is backward compatible with data created by existing <tt>JDBCCacheLoaderOld</tt> implemetation.
  + * All configuration elements described there {@link org.jboss.cache.loader.JDBCCacheLoaderOld} also apply for this
  + * implementation.
    * <p/>
  - * <b>Table configuration</b>
  - * <ul>
  - * <li><b>cache.jdbc.table.name</b> - the table name (default is <i>jbosscache</i>)</li>
  - * <li><b>cache.jdbc.table.create</b> - should be true or false, indicates whether to create the table at start phase</li>
  - * <li><b>cache.jdbc.table.drop</b> - should be true or false, indicates whether to drop the table at stop phase</li>
  - * <li><b>cache.jdbc.table.primarykey</b> - the name for the table primary key (default is <i>jbosscache_pk</i>)</li>
  - * <li><b>cache.jdbc.fqn.column</b> - the name for the fqn column (default is <i>fqn</i>)</li>
  - * <li><b>cache.jdbc.fqn.type</b> - the type for the fqn column (default is <i>varchar(255)</i>)</li>
  - * <li><b>cache.jdbc.node.column</b> - the name for the node's contents column (default is <i>node</i>)</li>
  - * <li><b>cache.jdbc.node.type</b> - the type for the node's contents column (default is <i>blob</i>)</li>
  - * <li><b>cache.jdbc.parent.column</b> - the name for the parent fqn column (default is <i>parent</i>)</li>
  - * </ul>
  - * <p/>
  - * <b>DataSource configuration</b>
  - * <ul>
  - * <li><b>cache.jdbc.datasource</b> - the JNDI name of the datasource</li>
  - * </ul>
  - * <p/>
  - * <b>JDBC driver configuration (used when DataSource is not configured)</b>
  - * <ul>
  - * <li><b>cache.jdbc.driver</b> - fully qualified JDBC driver name</li>
  - * <li><b>cache.jdbc.url</b> - URL to connect to the database</li>
  - * <li><b>cache.jdbc.user</b> - the username to use to connect to the database</li>
  - * <li><b>cache.jdbc.password</b> - the password to use to connect to the database</li>
  - * </ul>
    *
  - * @author <a href="mailto:alex at jboss.org">Alexey Loubyansky</a>
  - * @author <a href="mailto:hmesha at novell.com">Hany Mesha </a>
  - * @author <a href="mailto:galder.zamarreno at jboss.com">Galder Zamarreno</a>
  - * @version <tt>$Revision: 1.31 $</tt>
  + * Additional configuration info: <br>
  + * cache.jdbc.sql-concat : DBMS specific function for concat strings. Most likely this will be concat(1,2), but might
  + * be different for proprietary systems.
  + * 
  + * @author Mircea.Markus at iquestint.com
  + * @version 1.0
    */
  -public class JDBCCacheLoader extends AbstractCacheLoader
  +public class JDBCCacheLoader extends AdjListJDBCClassLoader
   {
  +
      private static final Log log = LogFactory.getLog(JDBCCacheLoader.class);
   
      private JDBCCacheLoaderConfig config;
  -   private ConnectionFactory cf;
  -   private String driverName;
  -   
  -   private static String toUpperCase(String s) {
  -      return s.toUpperCase(Locale.ENGLISH);
  -   }
   
  -   private static String toLowerCase(String s) {
  -      return s.toUpperCase(Locale.ENGLISH);
  -   }   
  -
  -   public void setConfig(IndividualCacheLoaderConfig base)
  +    /**
  +     * Builds a AdjListJDBCClassLoaderConfig based on the supplied base config.
  +     */
  +    protected AdjListJDBCClassLoaderConfig processConfig(CacheLoaderConfig.IndividualCacheLoaderConfig base)
      {
         if (base instanceof JDBCCacheLoaderConfig)
         {
            config = (JDBCCacheLoaderConfig) base;
  -      }
  -      else
  +        } else
         {
            config = new JDBCCacheLoaderConfig(base);
         }
  -
  -      if (config.getDatasourceName() == null)
  -      {
  -         try
  -         {
  -            /* Instantiate an standalone connection factory as per configuration, either explicitly
  -            defined or the default one */
  -            cf = (ConnectionFactory)Util.loadClass(config.getConnectionFactoryClass()).newInstance();
  -         }
  -         catch (Exception e)
  -         {
  -            reportAndRethrowError("Connectionn factory class could not be loaded", e);
  -         }
  -      }
  -      else
  -      {
  -         /* We create the ManagedConnectionFactory instance but the JNDI lookup is no done until
  -            the start method is called, since that's when its registered in its lifecycle */
  -         cf = new ManagedConnectionFactory();
  -      }
  -
  -      /* Regardless of the type of connection factory, we set the configuration */
  -      cf.setConfig(config);
  -   }
  -
  -   public IndividualCacheLoaderConfig getConfig()
  -   {
         return config;
      }
   
      /**
  -    * Fetches child node names (not pathes).
  -    *
  -    * @param fqn parent fqn
  -    * @return a set of child node names or null if there are not children found for the fqn
  -    * @throws Exception
  -    */
  -   public Set<String> getChildrenNames(Fqn fqn) throws Exception
  -   {
  -      Set children = null;
  -      Connection con = null;
  -      PreparedStatement ps = null;
  -      ResultSet rs = null;
  -      try
  -      {
  -         if (log.isDebugEnabled())
  -         {
  -            log.debug("executing sql: " + config.getSelectChildNamesSql() + " (" + fqn + ")");
  -         }
  -
  -         con = cf.getConnection();
  -         ps = con.prepareStatement(config.getSelectChildNamesSql());
  -         ps.setString(1, fqn.toString());
  -         rs = ps.executeQuery();
  -         if (rs.next())
  -         {
  -            children = new HashSet();
  -            do
  -            {
  -               String child = rs.getString(1);
  -               int slashInd = child.lastIndexOf('/');
  -               String name = child.substring(slashInd + 1);
  -               //Fqn childFqn = Fqn.fromString(child);
  -               //String name = (String) childFqn.get(childFqn.size() - 1);
  -               children.add(name);
  -            }
  -            while (rs.next());
  -         }
  -      }
  -      catch (SQLException e)
  -      {
  -         reportAndRethrowError("Failed to get children names for fqn " + fqn, e);
  -      }
  -      finally
  -      {
  -         safeClose(rs);
  -         safeClose(ps);
  -         cf.close(con);
  -      }
  -
  -      return children == null ? null : Collections.unmodifiableSet(children);
  -   }
  -
  -   // See http://jira.jboss.com/jira/browse/JBCACHE-118 for why this is commented out.
  -
  -   /**
  -    * Loads an attribute from the database.
  -    *
  -    * @param name node's fqn
  -    * @param key  attribute's key
  -    * @return attribute's value. Null is returned if
  -    *         <ul>
  -    *         <li>there is no value for the attribute key in the node</li>
  -    *         <li>there is a row in the table for the fqn but the node column contains null</li>
  -    *         <li>there is no row in table for the fqn (should this result in an exception?)</li>
  -    *         </ul>
  -    * @throws Exception
  -    */
  -   //   public Object get(Fqn name, Object key) throws Exception
  -   //   {
  -   //      Map node = loadNode(name);
  -   //      return node == null || node == NULL_NODE_IN_ROW ? null : node.get(key);
  -   //   }
  -
  -   /**
  -    * Returns a map representing a node.
  -    *
  -    * @param name node's fqn
  -    * @return node
  -    * @throws Exception
  -    */
  -   public Map get(Fqn name) throws Exception
  -   {
  -      final Map node = loadNode(name);
  -      return node == NULL_NODE_IN_ROW ? new HashMap(0) : node;
  -   }
  -
  -   /**
  -    * Checks that there is a row for the fqn in the database.
  -    *
  -    * @param name node's fqn
  -    * @return true if there is a row in the database for the given fqn even if the node column is null.
  -    * @throws Exception
  -    */
  -   public boolean exists(Fqn name) throws Exception
  -   {
  -      final Map node = loadNode(name);
  -      return node != null;// && node != NULL_NODE_IN_ROW;
  -   }
  -
  -   /**
  -    * Adds/overrides a value in a node for a key.
  -    * If the node does not exist yet, the node will be created.
  -    * If parent nodes do not exist for the node, empty parent nodes will be created.
  -    *
  -    * @param name  node's fqn
  -    * @param key   attribute's key
  -    * @param value attribute's value
  -    * @return old value associated with the attribute's key or null if there was no value previously
  -    *         associated with the attribute's key
  -    * @throws Exception
  +     * As per interface's contract.
  +     * Performance Note: Optimised O(nodeDepth) db calls. 
       */
      public Object put(Fqn name, Object key, Object value) throws Exception
      {
  -      Map oldNode = loadNode(name);
  -      Object oldValue;
  -      Map node;
  -
  -      if (oldNode == null || oldNode == NULL_NODE_IN_ROW)
  -      {
  -         node = new HashMap();
  -      }
  -      else
  -      {
  -         node = oldNode;
  -      }
  -      oldValue = node.put(key, value);
  -
  -      if (oldNode != null)
  -      {
  -         updateNode(name, node);
  -      }
  -      else
  -      {
  -         if (name.size() > 1)
  -         {
  -            for (int i = 1; i < name.size(); ++i)
  -            {
  -               final Fqn parent = name.getFqnChild(i);
  -               if (!exists(parent))
  -               {
  -                  insertNode(parent, null);
  -               }
  -            }
  -         }
  -         insertNode(name, node);
  -      }
  -
  -      return oldValue;
  +        Map toAdd = new HashMap();
  +        toAdd.put(key, value);
  +        Map existing = _put(name, toAdd);
  +        return existing == null ? null : existing.get(key);
      }
   
      /**
  -    * Adds attributes from the passed in map to the existing node.
  -    * If there is no node for the fqn, a new node will be created.
  -    *
  -    * @param name       node's fqn
  -    * @param attributes attributes
  -    * @throws Exception
  +     * As per interface's contract.
  +     * Performance Note: Optimised O(nodeDepth) db calls.
       */
      public void put(Fqn name, Map attributes) throws Exception
      {
  -      put(name, attributes, false);
  +        _put(name, attributes);
      }
   
      /**
  -    * Removes attribute's value for a key. If after removal the node contains no attributes, the node is nullified.
  -    *
  -    * @param name node's name
  -    * @param key  attribute's key
  -    * @return removed value or null if there was no value for the passed in key
  -    * @throws Exception
  +     * As per interface's contrect.
  +     * Performance Note: O(1) db calls.
       */
  -   public Object remove(Fqn name, Object key) throws Exception
  -   {
  -      Object removedValue = null;
  -      Map node = loadNode(name);
  -      if (node != null && node != NULL_NODE_IN_ROW)
  -      {
  -         removedValue = node.remove(key);
  -         if (node.isEmpty())
  -         {
  -            updateNode(name, null);
  -         }
  -         else
  +    public void remove(Fqn fqn) throws Exception
            {
  -            updateNode(name, node);
  -         }
  -      }
  -      return removedValue;
  -   }
  -
  -   /**
  -    * Removes a node and all its children.
  -    * Uses the same connection for all the db work.
  -    *
  -    * @param name node's fqn
  -    * @throws Exception
  -    */
  -   public void remove(Fqn name) throws Exception
  -   {
  -      Connection con = null;
  +        Connection conn = null;
         PreparedStatement ps = null;
         try
         {
  -         if (name.size() == 0)
  -         {
  -            if (log.isDebugEnabled())
  -            {
  -               log.debug("executing sql: " + config.getDeleteAllSql());
  -            }
  -
  -            con = cf.getConnection();
  -            ps = con.prepareStatement(config.getDeleteAllSql());
  -            int deletedRows = ps.executeUpdate();
  -
  -            if (log.isDebugEnabled())
  -            {
  -               log.debug("total rows deleted: " + deletedRows);
  -            }
  -         }
  -         else
  -         {
  -            StringBuffer sql = new StringBuffer(300);
  -            sql.append("delete from ").append(config.getTable()).append(" where fqn in (");
  -            //sql2.append("delete from " + table + " where fqn=? or parent in (");
  -            List fqns = new ArrayList();
  -
  -            addChildrenToDeleteSql(name.toString(), sql, fqns);
  -
  -            sql.append(')');
  -
  -            if (fqns.size() == 1)
  -            {
  -               if (log.isDebugEnabled())
  -               {
  -                  log.debug("executing sql: " + config.getDeleteNodeSql() + "(" + name + ")");
  -               }
  -
  -               con = cf.getConnection();
  -               ps = con.prepareStatement(config.getDeleteNodeSql());
  -               ps.setString(1, name.toString());
  -            }
  -            else
  -            {
  -               if (log.isDebugEnabled())
  -               {
  -                  log.debug("executing sql: " + sql + " " + fqns);
  -               }
  -
  -               con = cf.getConnection();
  -               ps = con.prepareStatement(sql.toString());
  -               for (int i = 0; i < fqns.size(); ++i)
  -               {
  -                  ps.setString(i + 1, (String) fqns.get(i));
  -               }
  -            }
  -
  -            int deletedRows = ps.executeUpdate();
  -
  -            if (log.isDebugEnabled())
  -            {
  -               log.debug("total rows deleted: " + deletedRows);
  -            }
  -         }
  -      }
  -      catch (SQLException e)
  -      {         
  -         reportAndRethrowError("Failed to remove node " + name, e);
  -      }
  -      finally
  +            conn = cf.getConnection();
  +            ps = conn.prepareStatement(config.getDeleteNodeSql());
  +            //apend / at the end avoids this issue: 'a/b/cd' is not a child of 'a/b/c'
  +            ps.setString(1, fqn.isRoot() ? fqn.toString() : fqn + Fqn.SEPARATOR);
  +            ps.executeUpdate();
  +        } catch (SQLException e)
  +        {
  +            log.error("Failed to remove the node : " + fqn, e);
  +            throw new IllegalStateException("Failure while removing sub-tree (" + fqn + ")" + e.getMessage());
  +        } finally
         {
            safeClose(ps);
  -         cf.close(con);
  -      }
  -   }
  -
  -   /**
  -    * Nullifies the node.
  -    *
  -    * @param name node's fqn
  -    * @throws Exception
  -    */
  -   public void removeData(Fqn name) throws Exception
  -   {
  -      updateNode(name, null);
  -   }
  -
  -   /**
  -    * First phase in transaction commit process. The changes are committed if only one phase if requested.
  -    * All the modifications are committed using the same connection.
  -    *
  -    * @param tx            something representing transaction
  -    * @param modifications a list of modifications
  -    * @param one_phase     indicates whether it's one or two phase commit transaction
  -    * @throws Exception
  -    */
  -   public void prepare(Object tx, List<Modification> modifications, boolean one_phase) throws Exception
  -   {
  -      // start a tx
  -      cf.prepare(tx);
  -
  -      try
  -      {
  -         put(modifications);
  -
  -         // commit if it's one phase only
  -         if (one_phase)
  -         {
  -            commit(tx);
  -         }
  -      }
  -      catch (Exception e)
  -      {
  -         // todo should I rollback it here or rollback is supposed to be invoke by someone from outside?
  -         rollback(tx);
  -         // is this ok?
  -         throw e;
  +            cf.close(conn);
         }
      }
   
  -   /**
  -    * Commits a transaction.
  -    *
  -    * @param tx the tx to commit
  -    * @throws Exception
  -    */
  -   public void commit(Object tx) throws Exception
  -   {
  -      cf.commit(tx);
  -   }
   
      /**
  -    * Rolls back a transaction.
  -    *
  -    * @param tx the tx to rollback
  +     * Subscribes to contract.
  +     * Performance Note: O(2) db calls.
       */
  -   public void rollback(Object tx)
  +    protected void getNodeDataList(Fqn fqn, List<NodeData> list) throws Exception
      {
  -      cf.rollback(tx);
  -   }
  -
  -   // Service implementation
  -
  -   public void create() throws Exception
  +        Map nodeAttributes = loadNode(fqn);
  +        if (nodeAttributes == null)
      {
  +            return;
      }
  -
  -   public void start() throws Exception
  -   {
  -      cf.start();
  -      
  -      Connection con = null;
  -      Statement st = null;
  -
  +        Connection connection = null;
  +        PreparedStatement ps = null;
  +        ResultSet rs = null;
         try
         {
  -         con = cf.getConnection();
  -         driverName = getDriverName(con);
  -         if (config.getCreateTable())
  -         {
  -            if (!tableExists(config.getTable(), con))
  -            {
  -               if (log.isDebugEnabled())
  +            connection = cf.getConnection();
  +            ps = connection.prepareStatement(config.getRecursiveChildrenSql());
  +            ps.setString(1, fqn.isRoot() ? fqn.toString() : fqn.toString() + Fqn.SEPARATOR);
  +            rs = ps.executeQuery();
  +            while (rs.next())
                  {
  -                  log.debug("executing ddl: " + config.getCreateTableDDL());
  -               }
  -               st = con.createStatement();
  -               st.executeUpdate(config.getCreateTableDDL());
  -            }
  -         }
  -      }
  -      finally
  +                Map<Object, Object> attributes = readAttributes(rs, 2);
  +                Fqn path = Fqn.fromString(rs.getString(1));
  +                NodeData nodeData = (attributes == null || attributes.isEmpty()) ? new NodeData(path) : new NodeData(path, attributes);
  +                list.add(nodeData);
  +            }
  +        } catch (SQLException e)
  +        {
  +            log.error("Failed to load state for node(" + fqn + ") :" + e.getMessage(), e);
  +            throw new IllegalStateException("Failed to load state for node(" + fqn + ") :" + e.getMessage());
  +        } finally
         {
  -         safeClose(st);
  -         cf.close(con);
  +            safeClose(rs);
  +            safeClose(ps);
  +            cf.close(connection);
         }
      }
   
  -   public void stop()
  +    private Map<Object, Object> readAttributes(ResultSet rs, int index) throws SQLException
      {
  -      if (config.getDropTable())
  +        Map<Object, Object> result;
  +        InputStream is = rs.getBinaryStream(index);
  +        if (is != null && !rs.wasNull())
         {
  -         Connection con = null;
  -         Statement st = null;
  +            ObjectInputStream ois;
            try
            {
  -            if (log.isDebugEnabled())
  -            {
  -               log.debug("executing ddl: " + config.getDropTableDDL());
  -            }
  -
  -            con = cf.getConnection();
  -            st = con.createStatement();
  -            st.executeUpdate(config.getDropTableDDL());
  -            safeClose(st);
  +                Object marshalledNode = getMarshaller().objectFromStream(is);
  +                result = (Map<Object, Object>) marshalledNode;
            }
  -         catch (SQLException e)
  +            catch (Exception e)
            {
  -            log.error("Failed to drop table: " + e.getMessage(), e);
  +                log.error("Failure while reading attribute set from db", e);
  +                throw new SQLException("Failure while reading attribute set from db " + e);
            }
  -         finally
  +        } else
            {
  -            safeClose(st);
  -            cf.close(con);
  -            cf.stop();
  -         }
  +            result = null;
         }
  +        return result;
      }
   
  -   public void destroy()
  -   {
  -   }
  -
  -   // Private
  -
  -   private void addChildrenToDeleteSql(String name, StringBuffer sql, List fqns)
  -           throws SQLException
  +    private Map _put(Fqn name, Map attributes) throws Exception
      {
  -      // for now have to use connection per method, i.e. can't pass the same connection to recursive
  -      // invocations because buggy PointBase driver invalidates result sets.
  -      Connection con = null;
  -      PreparedStatement selChildrenPs = null;
  -      ResultSet rs = null;
  -      try
  +        Map result = null;
  +        Map treeNode = loadNode(name);
  +        if (treeNode == null)
         {
  -         if (log.isDebugEnabled())
  +            addNewSubtree(name, attributes);
  +        } else if (treeNode == NULL_NODE_IN_ROW)
            {
  -            log.debug("executing sql: " + config.getSelectChildFqnsSql() + "(" + name + ")");
  +            updateNode(name, attributes);
  +        } else
  +        {//the node exists and the attribute map is NOT null
  +            Map<Object, Object> newAttributes = new HashMap<Object, Object>(treeNode);
  +            newAttributes.putAll(attributes);//creation sequnce important - we need to overwrite old values
  +            updateNode(name, newAttributes);
  +            result = treeNode;
            }
  -
  -         con = cf.getConnection();
  -         selChildrenPs = con.prepareStatement(config.getSelectChildFqnsSql());
  -         selChildrenPs.setString(1, name);
  -         rs = selChildrenPs.executeQuery();
  -
  -         if (rs.next())
  -         {
  -            do
  -            {
  -               String childStr = rs.getString(1);
  -               addChildrenToDeleteSql(childStr, sql, fqns);
  -            }
  -            while (rs.next());
  +        return result;
            }
   
  -         if (fqns.size() == 0)
  -         {
  -            sql.append("?");
  -         }
  -         else
  +    private void addNewSubtree(Fqn name, Map attributes) throws Exception
            {
  -            sql.append(", ?");
  -         }
  -         fqns.add(name);
  -      }
  -      finally
  -      {
  -         safeClose(rs);
  -         safeClose(selChildrenPs);
  -         cf.close(con);
  -      }
  -   }
  -
  -   public void put(Fqn name, Map attributes, boolean override) throws Exception
  +        Fqn currentNode = name;
  +        do
      {
  -      // JBCACHE-769 -- make a defensive copy
  -      Map attrs = (attributes == null ? null : new HashMap(attributes));
  -
  -      Map oldNode = loadNode(name);
  -      if (oldNode != null)
  +            if (currentNode.equals(name))
         {
  -         if (!override && oldNode != NULL_NODE_IN_ROW && attrs != null)
  +                insertNode(currentNode, attributes);
  +            } else
            {
  -            attrs.putAll(oldNode);
  +                insertNode(currentNode, null);
            }
  -         updateNode(name, attrs);
  -      }
  -      else
  -      {
  -         if (name.size() > 1)
  -         {
  -            for (int i = 1; i < name.size(); ++i)
  +            if (currentNode.isRoot())
               {
  -               final Fqn parent = name.getFqnChild(i);
  -               if (!exists(parent))
  -               {
  -                  insertNode(parent, null);
  -               }
  -            }
  +                break;
            }
  -         insertNode(name, attrs);
  +            currentNode = currentNode.getParent();
  +        } while (!exists(currentNode));
         }
  -   }
  -
  -   /**
  -    * Inserts a node into the database
  -    *
  -    * @param name the fqn
  -    * @param node the node
  -    */
  -   private void insertNode(Fqn name, Map node)
  -   {
  -      Connection con = null;
  -      PreparedStatement ps = null;
  -      try
  -      {
  -         if (log.isDebugEnabled())
  -         {
  -            log.debug("executing sql: " + config.getInsertNodeSql() + " (" + name + ")");
  -         }
  -
  -         con = cf.getConnection();
  -         ps = con.prepareStatement(config.getInsertNodeSql());
  -
  -         ps.setString(1, name.toString());
   
  -         if (node != null)
  -         {
  -            //            ByteArrayOutputStream baos = new ByteArrayOutputStream();
  -            //            ObjectOutputStream oos = new ObjectOutputStream(baos);
  -            //            oos.writeObject(node);
   
  -            byte[] byteStream = getMarshaller().objectToByteBuffer(node);
  -            ByteArrayInputStream bais = new ByteArrayInputStream(byteStream);
  -            ps.setBinaryStream(2, bais, byteStream.length);
  -         }
  -         else
  +    protected Log getLogger()
            {
  -            // a hack to handles the incomp. of SQL server jdbc driver prior to SQL SERVER 2005
  -            if (driverName != null && (driverName.contains("SQLSERVER")
  -                    || driverName.contains("POSTGRESQL")))
  -            {
  -               ps.setNull(2, Types.LONGVARBINARY);
  -            }
  -            else
  -            {
  -               ps.setNull(2, Types.BLOB);
  -            }
  -            //ps.setNull(2, Types.LONGVARBINARY);
  +        return log;
            }
   
  -         if (name.size() == 0)
  -         {
  -            ps.setNull(3, Types.VARCHAR);
  -         }
  -         else
  -         {
  -            ps.setString(3, name.getFqnChild(name.size() - 1).toString());
  -         }
  -
  -         int rows = ps.executeUpdate();
  -         if (rows != 1)
  -         {
  -            throw new IllegalStateException("Expected one insert row but got " + rows);
  -         }
  -      }
  -      catch (RuntimeException e)
  -      {
  -         throw e;
  -      }
  -      catch (Exception e)
  -      {
  -         reportAndRethrowError("Failed to insert node", e);
  -      }
  -      finally
  -      {
  -         safeClose(ps);
  -         cf.close(con);
  -      }
  -   }
   
      /**
  -    * Updates a node in the database.
  -    *
  -    * @param name the fqn
  -    * @param node new node value
  +     * Start is overwritten for the sake of backward compatibility only.
  +     * Here is the issue: old implementation does not create a Fqn.ROOT if not specifically told so.
  +     * As per put's contract, when calling put('/a/b/c', 'key', 'value') all parent nodes should be created up to root.
  +     * Root is not created, though. The compatibility problem comes in the case of loade ENTIRE state.
  +     * The implementation checks node's existence firstly, and based on that continues or not. As root is not
  +     * persisted nothing is loaded etc.
       */
  -   private void updateNode(Fqn name, Map node)
  -   {
  -      Connection con = null;
  -      PreparedStatement ps = null;
  -      try
  -      {
  -         if (log.isDebugEnabled())
  -         {
  -            log.debug("executing sql: " + config.getUpdateNodeSql());
  -         }
  -
  -         con = cf.getConnection();
  -         ps = con.prepareStatement(config.getUpdateNodeSql());
  -
  -         if (node == null)
  -         {
  -            //ps.setNull(1, Types.BLOB);
  -            //            ps.setNull(1, Types.LONGVARBINARY);
  -            // don't set it to null - simply use an empty hash map.
  -            node = new HashMap(0);
  -         }
  -
  -         //         ByteArrayOutputStream baos = new ByteArrayOutputStream();
  -         //         ObjectOutputStream oos = new ObjectOutputStream(baos);
  -         //         oos.writeObject(node);
  -
  -         byte[] byteStream = getMarshaller().objectToByteBuffer(node);
  -         ByteArrayInputStream bais = new ByteArrayInputStream(byteStream);
  -         ps.setBinaryStream(1, bais, byteStream.length);
  -
  -         ps.setString(2, name.toString());
  -
  -         int rows = ps.executeUpdate();
  -         //         if (rows != 1)
  -         //         {
  -         //            throw new IllegalStateException("Expected one updated row but got " + rows);
  -         //         }
  -      }
  -      catch (Exception e)
  +    public void start() throws Exception
         {
  -         reportAndRethrowError("Failed to update node for fqn " + name, e);
  -      }
  -      finally
  +        super.start();
  +        if (!exists(Fqn.ROOT) && getNodeCount() > 0)
         {
  -         safeClose(ps);
  -         cf.close(con);
  +            put(Fqn.ROOT, new HashMap());
         }
      }
   
      /**
  -    * Loads a node from the database.
  -    *
  -    * @param name the fqn
  -    * @return non-null Map representing the node,
  -    *         null if there is no row with the fqn in the table,
  -    *         NULL_NODE_IN_ROW if there is a row in the table with the fqn but the node column contains null.
  +     * Returns a number representing the count of persisted children.
       */
  -   private Map loadNode(Fqn name)
  +    public int getNodeCount() throws Exception
      {
  -      boolean rowExists = false;
  -      Map oldNode = null;
  -      Connection con = null;
  +        Connection conn = null;
         PreparedStatement ps = null;
         ResultSet rs = null;
         try
         {
  -         if (log.isDebugEnabled())
  -         {
  -            log.debug("executing sql: " + config.getSelectNodeSql() + " (" + name + ")");
  -         }
  -
  -         con = cf.getConnection();
  -         ps = con.prepareStatement(config.getSelectNodeSql());
  -         ps.setString(1, name.toString());
  -
  +            conn = cf.getConnection();
  +            ps = conn.prepareStatement(config.getNodeCountSql());
            rs = ps.executeQuery();
  -
  -         if (rs.next())
  -         {
  -            rowExists = true;
  -            InputStream is = rs.getBinaryStream(1);
  -            if (is != null && !rs.wasNull())
  -            {
  -               try
  -               {
  -                  //                 ObjectInputStream ois = null;
  -                  //                 ois = new ObjectInputStream(is);
  -                  //                 Object marshalledNode = ois.readObject();
  -
  -                  // deserialize result
  -                  Object marshalledNode = getMarshaller().objectFromStream(is);
  -                  oldNode = (Map) marshalledNode;
  -               }
  -               catch (Exception e)
  -               {
  -                  throw new Exception("Unable to load to deserialize result: ", e);
  -               }
  -               finally
  -               {
  -                  safeClose(is);
  -               }
  -            }
  -         }
  -      }
  -      catch (Exception e)
  -      {
  -         reportAndRethrowError("Failed to load node for fqn " + name, e);
  -      }
  -      finally
  +            rs.next();//count(*) will always return one row
  +            return rs.getInt(1);
  +        } catch (Exception e)
  +        {
  +            log.error("Failure while trying to get the count of persisted nodes: " + e.getMessage(), e);
  +            throw new IllegalStateException("Failure while trying to get the count of persisted nodes: " + e.getMessage());
  +        } finally
         {
            safeClose(rs);
            safeClose(ps);
  -         cf.close(con);
  -      }
  -
  -      return oldNode == null ? (rowExists ? NULL_NODE_IN_ROW : null) : oldNode;
  -   }
  -
  -   private static void safeClose(InputStream is)
  -   {
  -      if (is != null)
  -      {
  -         try
  -         {
  -            is.close();
  -         }
  -         catch (IOException e)
  -         {
  -            log.warn("Failed to close input stream: " + e.getMessage());
  +            cf.close(conn);
            }
         }
  -   }
  -
  -   private static void safeClose(Statement st)
  -   {
  -      if (st != null)
  -      {
  -         try
  -         {
  -            st.close();
  -         }
  -         catch (SQLException e)
  -         {
  -            log.warn("Failed to close statement: " + e.getMessage());
  -         }
  -      }
  -   }
  -
  -   private static void safeClose(ResultSet rs)
  -   {
  -      if (rs != null)
  -      {
  -         try
  -         {
  -            rs.close();
  -         }
  -         catch (SQLException e)
  -         {
  -            log.warn("Failed to close result set: " + e.getMessage());
  -         }
  -      }
  -   }
  -
  -   private static String getDriverName(Connection con)
  -   {
  -      if (con == null) return null;
  -      try
  -      {
  -         DatabaseMetaData dmd = con.getMetaData();
  -         return toUpperCase(dmd.getDriverName());
  -      }
  -      catch (SQLException e)
  -      {
  -         // This should not happen. A J2EE compatiable JDBC driver is
  -         // required to fully support metadata.
  -         throw new IllegalStateException("Error while getting the driver name", e);
  -      }
  -   }
  -
  -   static String getRequiredProperty(Properties props, String name)
  -   {
  -      String value = props.getProperty(name);
  -      if (value == null)
  -      {
  -         throw new IllegalStateException("Missing required property: " + name);
  -      }
  -      return value;
  -   }
  -
  -   private static boolean tableExists(String tableName, Connection con)
  -   {
  -      ResultSet rs = null;
  -      try
  -      {
  -         // (a j2ee spec compatible jdbc driver has to fully
  -         // implement the DatabaseMetaData)
  -         DatabaseMetaData dmd = con.getMetaData();
  -         String catalog = con.getCatalog();
  -         String schema = null;
  -         String quote = dmd.getIdentifierQuoteString();
  -         if (tableName.startsWith(quote))
  -         {
  -            if (!tableName.endsWith(quote))
  -            {
  -               throw new IllegalStateException("Mismatched quote in table name: " + tableName);
  -            }
  -            int quoteLength = quote.length();
  -            tableName = tableName.substring(quoteLength, tableName.length() - quoteLength);
  -            if (dmd.storesLowerCaseQuotedIdentifiers())
  -            {
  -               tableName = toLowerCase(tableName);
  -            }
  -            else if (dmd.storesUpperCaseQuotedIdentifiers())
  -            {
  -               tableName = toUpperCase(tableName);
  -            }
  -         }
  -         else
  -         {
  -            if (dmd.storesLowerCaseIdentifiers())
  -            {
  -               tableName = tableName.toLowerCase();
  -            }
  -            else if (dmd.storesUpperCaseIdentifiers())
  -            {
  -               tableName = toUpperCase(tableName);
  -            }
  -         }
  -
  -         int dotIndex;
  -         if ((dotIndex = tableName.indexOf('.')) != -1)
  -         {
  -            // Yank out schema name ...
  -            schema = tableName.substring(0, dotIndex);
  -            tableName = tableName.substring(dotIndex + 1);
  -         }
  -
  -         rs = dmd.getTables(catalog, schema, tableName, null);
  -         return rs.next();
  -      }
  -      catch (SQLException e)
  -      {
  -         // This should not happen. A J2EE compatiable JDBC driver is
  -         // required fully support metadata.
  -         throw new IllegalStateException("Error while checking if table aleady exists " + tableName, e);
  -      }
  -      finally
  -      {
  -         safeClose(rs);
  -      }
  -   }
  -
  -   private void reportAndRethrowError(String message, Exception cause) throws IllegalStateException
  -   {
  -      log.error(message, cause);
  -      throw new IllegalStateException(message, cause);
  -   }
  -
  -   // Inner
  -
  -   private static final Map NULL_NODE_IN_ROW = new Map()
  -   {
  -      public int size()
  -      {
  -         throw new UnsupportedOperationException();
  -      }
  -
  -      public void clear()
  -      {
  -         throw new UnsupportedOperationException();
  -      }
  -
  -      public boolean isEmpty()
  -      {
  -         throw new UnsupportedOperationException();
  -      }
   
  -      public boolean containsKey(Object key)
  -      {
  -         throw new UnsupportedOperationException();
  -      }
  -
  -      public boolean containsValue(Object value)
  -      {
  -         throw new UnsupportedOperationException();
  -      }
  -
  -      public Collection values()
  -      {
  -         throw new UnsupportedOperationException();
  -      }
  -
  -      public void putAll(Map t)
  -      {
  -         throw new UnsupportedOperationException();
  -      }
  -
  -      public Set entrySet()
  -      {
  -         throw new UnsupportedOperationException();
  -      }
  -
  -      public Set keySet()
  -      {
  -         throw new UnsupportedOperationException();
  -      }
  -
  -      public Object get(Object key)
  -      {
  -         throw new UnsupportedOperationException();
  -      }
  -
  -      public Object remove(Object key)
  -      {
  -         throw new UnsupportedOperationException();
  -      }
  -
  -      public Object put(Object key, Object value)
  +    public IndividualCacheLoaderConfig getConfig()
         {
  -         throw new UnsupportedOperationException();
  +        return config;
         }
  -   };
   }
  
  
  
  1.2       +1 -1      JBossCache/src/org/jboss/cache/loader/NonManagedConnectionFactory.java
  
  (In the diff below, changes in quantity of whitespace are not shown.)
  
  Index: NonManagedConnectionFactory.java
  ===================================================================
  RCS file: /cvsroot/jboss/JBossCache/src/org/jboss/cache/loader/NonManagedConnectionFactory.java,v
  retrieving revision 1.1
  retrieving revision 1.2
  diff -u -b -r1.1 -r1.2
  --- NonManagedConnectionFactory.java	21 Jan 2007 15:46:42 -0000	1.1
  +++ NonManagedConnectionFactory.java	10 Feb 2007 17:47:02 -0000	1.2
  @@ -30,7 +30,7 @@
      private String pwd;
      private String driverClass;
   
  -   public void setConfig(JDBCCacheLoaderConfig config)
  +   public void setConfig(AdjListJDBCClassLoaderConfig config)
      {
         url = config.getJdbcURL();
         usr = config.getJdbcUser();
  
  
  
  1.2       +1 -1      JBossCache/src/org/jboss/cache/loader/C3p0ConnectionFactory.java
  
  (In the diff below, changes in quantity of whitespace are not shown.)
  
  Index: C3p0ConnectionFactory.java
  ===================================================================
  RCS file: /cvsroot/jboss/JBossCache/src/org/jboss/cache/loader/C3p0ConnectionFactory.java,v
  retrieving revision 1.1
  retrieving revision 1.2
  diff -u -b -r1.1 -r1.2
  --- C3p0ConnectionFactory.java	21 Jan 2007 15:46:42 -0000	1.1
  +++ C3p0ConnectionFactory.java	10 Feb 2007 17:47:02 -0000	1.2
  @@ -27,7 +27,7 @@
   
      private DataSource ds;
   
  -   public void setConfig(JDBCCacheLoaderConfig config)
  +   public void setConfig(AdjListJDBCClassLoaderConfig config)
      {
         super.setConfig(config);
   
  
  
  
  1.1      date: 2007/02/10 17:47:02;  author: mmarkus;  state: Exp;JBossCache/src/org/jboss/cache/loader/JDBCCacheLoaderOld.java
  
  Index: JDBCCacheLoaderOld.java
  ===================================================================
  /*
   * JBoss, the OpenSource J2EE webOS
   *
   * Distributable under LGPL license.
   * See terms of license at gnu.org.
   */
  package org.jboss.cache.loader;
  
  import org.apache.commons.logging.Log;
  import org.apache.commons.logging.LogFactory;
  import org.jboss.cache.Fqn;
  import org.jboss.cache.config.CacheLoaderConfig.IndividualCacheLoaderConfig;
  
  import java.sql.Connection;
  import java.sql.PreparedStatement;
  import java.sql.ResultSet;
  import java.sql.SQLException;
  import java.util.ArrayList;
  import java.util.HashMap;
  import java.util.List;
  import java.util.Map;
  
  /**
   * JDBC CacheLoader implementation.
   * <p/>
   * This implementation uses one table. The table consists of three columns:
   * <ul>
   * <li>text column for fqn (which is also a primary key)</li>
   * <li>blob column for attributes (can contain null)</li>
   * <li>text column for parent fqn (can contain null)</li>
   * </ul>
   * <p/>
   * The configuration options are:
   * <p/>
   * <b>Table configuration</b>
   * <ul>
   * <li><b>cache.jdbc.table.name</b> - the table name (default is <i>jbosscache</i>)</li>
   * <li><b>cache.jdbc.table.create</b> - should be true or false, indicates whether to create the table at start phase</li>
   * <li><b>cache.jdbc.table.drop</b> - should be true or false, indicates whether to drop the table at stop phase</li>
   * <li><b>cache.jdbc.table.primarykey</b> - the name for the table primary key (default is <i>jbosscache_pk</i>)</li>
   * <li><b>cache.jdbc.fqn.column</b> - the name for the fqn column (default is <i>fqn</i>)</li>
   * <li><b>cache.jdbc.fqn.type</b> - the type for the fqn column (default is <i>varchar(255)</i>)</li>
   * <li><b>cache.jdbc.node.column</b> - the name for the node's contents column (default is <i>node</i>)</li>
   * <li><b>cache.jdbc.node.type</b> - the type for the node's contents column (default is <i>blob</i>)</li>
   * <li><b>cache.jdbc.parent.column</b> - the name for the parent fqn column (default is <i>parent</i>)</li>
   * </ul>
   * <p/>
   * <b>DataSource configuration</b>
   * <ul>
   * <li><b>cache.jdbc.datasource</b> - the JNDI name of the datasource</li>
   * </ul>
   * <p/>
   * <b>JDBC driver configuration (used when DataSource is not configured)</b>
   * <ul>
   * <li><b>cache.jdbc.driver</b> - fully qualified JDBC driver name</li>
   * <li><b>cache.jdbc.url</b> - URL to connect to the database</li>
   * <li><b>cache.jdbc.user</b> - the username to use to connect to the database</li>
   * <li><b>cache.jdbc.password</b> - the password to use to connect to the database</li>
   * </ul>
   *
   * @author <a href="mailto:alex at jboss.org">Alexey Loubyansky</a>
   * @author <a href="mailto:hmesha at novell.com">Hany Mesha </a>
   * @author <a href="mailto:galder.zamarreno at jboss.com">Galder Zamarreno</a>
   * @version <tt>$Revision: 1.1 $</tt>
   * @deprecated please use the 
   */
  public class JDBCCacheLoaderOld extends AdjListJDBCClassLoader
  {
      private static final Log log = LogFactory.getLog(JDBCCacheLoaderOld.class);
  
      private AdjListJDBCClassLoaderConfig config;
  
  
      public AdjListJDBCClassLoaderConfig processConfig(IndividualCacheLoaderConfig base)
      {
          if (config instanceof AdjListJDBCClassLoaderConfig)
          {
              config = (AdjListJDBCClassLoaderConfig) base;
          } else
          {
              config = new AdjListJDBCClassLoaderConfig(base);
          }
          return config;
      }
  
      public IndividualCacheLoaderConfig getConfig()
      {
          return config;
      }
  
  
      /**
       * Adds/overrides a value in a node for a key.
       * If the node does not exist yet, the node will be created.
       * If parent nodes do not exist for the node, empty parent nodes will be created.
       *
       * @param name  node's fqn
       * @param key   attribute's key
       * @param value attribute's value
       * @return old value associated with the attribute's key or null if there was no value previously
       *         associated with the attribute's key
       * @throws Exception
       */
      public Object put(Fqn name, Object key, Object value) throws Exception
      {
          Map oldNode = loadNode(name);
          Object oldValue;
          Map node;
  
          if (oldNode == null || oldNode == NULL_NODE_IN_ROW)
          {
              node = new HashMap();
          } else
          {
              node = oldNode;
          }
          oldValue = node.put(key, value);
  
          if (oldNode != null)
          {
              updateNode(name, node);
          } else
          {
              if (name.size() > 1)
              {
                  for (int i = 1; i < name.size(); ++i)
                  {
                      final Fqn parent = name.getFqnChild(i);
                      if (!exists(parent))
                      {
                          insertNode(parent, null);
                      }
                  }
              }
              insertNode(name, node);
          }
  
          return oldValue;
      }
  
      /**
       * Adds attributes from the passed in map to the existing node.
       * If there is no node for the fqn, a new node will be created.
       *
       * @param name       node's fqn
       * @param attributes attributes
       * @throws Exception
       */
      public void put(Fqn name, Map attributes) throws Exception
      {
          put(name, attributes, false);
      }
  
      /**
       * Removes a node and all its children.
       * Uses the same connection for all the db work.
       *
       * @param name node's fqn
       * @throws Exception
       */
      public void remove(Fqn name) throws Exception
      {
          Connection con = null;
          PreparedStatement ps = null;
          try
          {
              if (name.size() == 0)
              {
                  if (log.isDebugEnabled())
                  {
                      log.debug("executing sql: " + config.getDeleteAllSql());
                  }
  
                  con = cf.getConnection();
                  ps = con.prepareStatement(config.getDeleteAllSql());
                  int deletedRows = ps.executeUpdate();
  
                  if (log.isDebugEnabled())
                  {
                      log.debug("total rows deleted: " + deletedRows);
                  }
              } else
              {
                  StringBuffer sql = new StringBuffer(300);
                  sql.append("delete from ").append(config.getTable()).append(" where fqn in (");
                  //sql2.append("delete from " + table + " where fqn=? or parent in (");
                  List fqns = new ArrayList();
  
                  addChildrenToDeleteSql(name.toString(), sql, fqns);
  
                  sql.append(')');
  
                  if (fqns.size() == 1)
                  {
                      if (log.isDebugEnabled())
                      {
                          log.debug("executing sql: " + config.getDeleteNodeSql() + "(" + name + ")");
                      }
  
                      con = cf.getConnection();
                      ps = con.prepareStatement(config.getDeleteNodeSql());
                      ps.setString(1, name.toString());
                  } else
                  {
                      if (log.isDebugEnabled())
                      {
                          log.debug("executing sql: " + sql + " " + fqns);
                      }
  
                      con = cf.getConnection();
                      ps = con.prepareStatement(sql.toString());
                      for (int i = 0; i < fqns.size(); ++i)
                      {
                          ps.setString(i + 1, (String) fqns.get(i));
                      }
                  }
  
                  int deletedRows = ps.executeUpdate();
  
                  if (log.isDebugEnabled())
                  {
                      log.debug("total rows deleted: " + deletedRows);
                  }
              }
          }
          catch (SQLException e)
          {
              reportAndRethrowError("Failed to remove node " + name, e);
          }
          finally
          {
              safeClose(ps);
              cf.close(con);
          }
      }
  
  
  
      // Private
  
      private void addChildrenToDeleteSql(String name, StringBuffer sql, List fqns)
              throws SQLException
      {
          // for now have to use connection per method, i.e. can't pass the same connection to recursive
          // invocations because buggy PointBase driver invalidates result sets.
          Connection con = null;
          PreparedStatement selChildrenPs = null;
          ResultSet rs = null;
          try
          {
              if (log.isDebugEnabled())
              {
                  log.debug("executing sql: " + config.getSelectChildFqnsSql() + "(" + name + ")");
              }
  
              con = cf.getConnection();
              selChildrenPs = con.prepareStatement(config.getSelectChildFqnsSql());
              selChildrenPs.setString(1, name);
              rs = selChildrenPs.executeQuery();
  
              if (rs.next())
              {
                  do
                  {
                      String childStr = rs.getString(1);
                      addChildrenToDeleteSql(childStr, sql, fqns);
                  }
                  while (rs.next());
              }
  
              if (fqns.size() == 0)
              {
                  sql.append("?");
              } else
              {
                  sql.append(", ?");
              }
              fqns.add(name);
          }
          finally
          {
              safeClose(rs);
              safeClose(selChildrenPs);
              cf.close(con);
          }
      }
  
      public void put(Fqn name, Map attributes, boolean override) throws Exception
      {
          // JBCACHE-769 -- make a defensive copy
          Map attrs = (attributes == null ? null : new HashMap(attributes));
  
          Map oldNode = loadNode(name);
          if (oldNode != null)
          {
              if (!override && oldNode != NULL_NODE_IN_ROW && attrs != null)
              {
                  attrs.putAll(oldNode);
              }
              updateNode(name, attrs);
          } else
          {
              if (name.size() > 1)
              {
                  for (int i = 1; i < name.size(); ++i)
                  {
                      final Fqn parent = name.getFqnChild(i);
                      if (!exists(parent))
                      {
                          insertNode(parent, null);
                      }
                  }
              }
              insertNode(name, attrs);
          }
      }
  
  
      protected Log getLogger()
      {
          return log;
      }
  
  }
  
  
  
  1.1      date: 2007/02/10 17:47:02;  author: mmarkus;  state: Exp;JBossCache/src/org/jboss/cache/loader/AdjListJDBCClassLoaderConfig.java
  
  Index: AdjListJDBCClassLoaderConfig.java
  ===================================================================
  package org.jboss.cache.loader;
  
  import org.jboss.cache.config.CacheLoaderConfig.IndividualCacheLoaderConfig;
  
  import java.util.Properties;
  
  /**
   * AdjListJDBCClassLoaderConfig
   *
   * @author <a href="mailto:manik at jboss.org">Manik Surtani (manik at jboss.org)</a>
   * @author <a href="mailto:galder.zamarreno at jboss.com">Galder Zamarreno</a>
   */
  public class AdjListJDBCClassLoaderConfig extends IndividualCacheLoaderConfig
  {
      /**
       * The serialVersionUID
       */
      private static final long serialVersionUID = -8371846151643130281L;
  
      private boolean createTable;
      private String createTableDDL;
      private String datasourceName;
      private String deleteAllSql;
      private String deleteNodeSql;
      private boolean dropTable;
      private String dropTableDDL;
      private String driverClass;
      private String insertNodeSql;
      private String jdbcURL;
      private String jdbcUser;
      private String jdbcPassword;
      private String selectChildFqnsSql;
      private String selectChildNamesSql;
      private String selectNodeSql;
      private String updateNodeSql;
      private String updateTableSql;
      private String connectionFactoryClass;
      protected String table;
      protected String nodeColumn;
      protected String fqnColumn;
  
      public AdjListJDBCClassLoaderConfig()
      {
          setClassName(AdjListJDBCClassLoader.class.getName());
      }
  
      /**
       * For use by {@link JDBCCacheLoaderOld}.
       *
       * @param base generic config object created by XML parsing.
       */
      AdjListJDBCClassLoaderConfig(IndividualCacheLoaderConfig base)
      {
          setClassName(AdjListJDBCClassLoader.class.getName());
          populateFromBaseConfig(base);
      }
  
      public boolean getCreateTable()
      {
          return createTable;
      }
  
      public void setCreateTable(boolean createTable)
      {
          testImmutability("createTable");
          this.createTable = createTable;
      }
  
      public String getCreateTableDDL()
      {
          return createTableDDL;
      }
  
      public void setCreateTableDDL(String createTableDDL)
      {
          testImmutability("createTableDDL");
          this.createTableDDL = createTableDDL;
      }
  
      public String getDatasourceName()
      {
          return datasourceName;
      }
  
      public void setDatasourceName(String datasourceName)
      {
          testImmutability("datasourceName");
          this.datasourceName = datasourceName;
      }
  
      public String getDeleteAllSql()
      {
          return deleteAllSql;
      }
  
      public void setDeleteAllSql(String deleteAllSql)
      {
          testImmutability("deleteAllSql");
          this.deleteAllSql = deleteAllSql;
      }
  
      public String getDeleteNodeSql()
      {
          return deleteNodeSql;
      }
  
      public void setDeleteNodeSql(String deleteNodeSql)
      {
          testImmutability("deleteNodeSql");
          this.deleteNodeSql = deleteNodeSql;
      }
  
      public String getDriverClass()
      {
          return driverClass;
      }
  
      public void setDriverClass(String driverClass)
      {
          testImmutability("driverClass");
          this.driverClass = driverClass;
      }
  
      public boolean getDropTable()
      {
          return dropTable;
      }
  
      public void setDropTable(boolean dropTable)
      {
          testImmutability("dropTable");
          this.dropTable = dropTable;
      }
  
      public String getInsertNodeSql()
      {
          return insertNodeSql;
      }
  
      public void setInsertNodeSql(String insertNodeSql)
      {
          testImmutability("insertNodeSql");
          this.insertNodeSql = insertNodeSql;
      }
  
      public String getSelectChildFqnsSql()
      {
          return selectChildFqnsSql;
      }
  
      public void setSelectChildFqnsSql(String selectChildFqnsSql)
      {
          testImmutability("selectChildFqnsSql");
          this.selectChildFqnsSql = selectChildFqnsSql;
      }
  
      public String getSelectNodeSql()
      {
          return selectNodeSql;
      }
  
      public void setSelectNodeSql(String selectNodeSql)
      {
          testImmutability("selectNodeSql");
          this.selectNodeSql = selectNodeSql;
      }
  
      public String getTable()
      {
          return table;
      }
  
      public void setTable(String table)
      {
          testImmutability("table");
          this.table = table;
      }
  
      public String getUpdateTableSql()
      {
          return updateTableSql;
      }
  
      public void setUpdateTableSql(String updateTableSql)
      {
          testImmutability("updateTableSql");
          this.updateTableSql = updateTableSql;
      }
  
      public String getDropTableDDL()
      {
          return dropTableDDL;
      }
  
      public void setDropTableDDL(String dropTableDDL)
      {
          testImmutability("dropTableDDL");
          this.dropTableDDL = dropTableDDL;
      }
  
      public String getSelectChildNamesSql()
      {
          return selectChildNamesSql;
      }
  
      public void setSelectChildNamesSql(String selectChildNamesSql)
      {
          testImmutability("selectChildNamesSql");
          this.selectChildNamesSql = selectChildNamesSql;
      }
  
      public String getUpdateNodeSql()
      {
          return updateNodeSql;
      }
  
      public void setUpdateNodeSql(String updateNodeSql)
      {
          testImmutability("updateNodeSql");
          this.updateNodeSql = updateNodeSql;
      }
  
      public String getJdbcPassword()
      {
          return jdbcPassword;
      }
  
      public void setJdbcPassword(String jdbcPassword)
      {
          testImmutability("jdbcPassword");
          this.jdbcPassword = jdbcPassword;
      }
  
      public String getJdbcURL()
      {
          return jdbcURL;
      }
  
      public void setJdbcURL(String jdbcURL)
      {
          testImmutability("jdbcURL");
          this.jdbcURL = jdbcURL;
      }
  
      public String getJdbcUser()
      {
          return jdbcUser;
      }
  
      public void setJdbcUser(String jdbcUser)
      {
          testImmutability("jdbcUser");
          this.jdbcUser = jdbcUser;
      }
  
      public String getConnectionFactoryClass()
      {
          return connectionFactoryClass;
      }
  
      public void setConnectionFactoryClass(String connectionFactoryClass)
      {
          testImmutability("connectionFactoryClass");
          this.connectionFactoryClass = connectionFactoryClass;
      }
  
      public void setProperties(Properties props)
      {
          super.setProperties(props);
          datasourceName = props.getProperty("cache.jdbc.datasource");
          if (datasourceName == null)
          {
              this.driverClass = AdjListJDBCClassLoader.getRequiredProperty(props, "cache.jdbc.driver");
              this.jdbcURL = AdjListJDBCClassLoader.getRequiredProperty(props, "cache.jdbc.url");
              this.jdbcUser = AdjListJDBCClassLoader.getRequiredProperty(props, "cache.jdbc.user");
              this.jdbcPassword = AdjListJDBCClassLoader.getRequiredProperty(props, "cache.jdbc.password");
  
              if (log.isDebugEnabled())
              {
                  log.debug("Properties: " +
                          "cache.jdbc.url=" +
                          jdbcURL +
                          ", cache.jdbc.driver=" +
                          driverClass +
                          ", cache.jdbc.user=" +
                          jdbcUser +
                          ", cache.jdbc.password=" +
                          jdbcPassword +
                          ", cache.jdbc.table=" + table);
              }
          }
  
          String prop = props.getProperty("cache.jdbc.table.create");
          this.createTable = (prop == null || Boolean.valueOf(prop));
          prop = props.getProperty("cache.jdbc.table.drop");
          this.dropTable = (prop == null || Boolean.valueOf(prop));
  
          this.table = props.getProperty("cache.jdbc.table.name", "jbosscache");
          String primaryKey = props.getProperty("cache.jdbc.table.primarykey", "jbosscache_pk");
          fqnColumn = props.getProperty("cache.jdbc.fqn.column", "fqn");
          String fqnType = props.getProperty("cache.jdbc.fqn.type", "varchar(255)");
          nodeColumn = props.getProperty("cache.jdbc.node.column", "node");
          String nodeType = props.getProperty("cache.jdbc.node.type", "blob");
          String parentColumn = props.getProperty("cache.jdbc.parent.column", "parent");
  
          selectChildNamesSql = "select " + fqnColumn + " from " + table + " where " + parentColumn + "=?";
          deleteNodeSql = "delete from " + table + " where " + fqnColumn + "=?";
          deleteAllSql = "delete from " + table;
          selectChildFqnsSql = "select " + fqnColumn + " from " + table + " where " + parentColumn + "=?";
          insertNodeSql = "insert into " +
                  table +
                  " (" +
                  fqnColumn +
                  ", " +
                  nodeColumn +
                  ", " +
                  parentColumn +
                  ") values (?, ?, ?)";
          updateNodeSql = "update " + table + " set " + nodeColumn + "=? where " + fqnColumn + "=?";
          selectNodeSql = "select " + nodeColumn + " from " + table + " where " + fqnColumn + "=?";
  
          createTableDDL = "create table " +
                  table +
                  "(" +
                  fqnColumn +
                  " " +
                  fqnType +
                  " not null, " +
                  nodeColumn +
                  " " +
                  nodeType +
                  ", " +
                  parentColumn +
                  " " +
                  fqnType +
                  ", constraint " + primaryKey + " primary key (" + fqnColumn + "))";
  
          dropTableDDL = "drop table " + table;
          connectionFactoryClass = props.getProperty("cache.jdbc.connection.factory", "org.jboss.cache.loader.NonManagedConnectionFactory");
      }
  
      public boolean equals(Object obj)
      {
          if (obj instanceof AdjListJDBCClassLoaderConfig && equalsExcludingProperties(obj))
          {
              AdjListJDBCClassLoaderConfig other = (AdjListJDBCClassLoaderConfig) obj;
  
              return (this.createTable == other.createTable)
                      && safeEquals(createTableDDL, other.createTableDDL)
                      && safeEquals(datasourceName, other.datasourceName)
                      && safeEquals(deleteAllSql, other.deleteAllSql)
                      && safeEquals(deleteNodeSql, other.deleteNodeSql)
                      && safeEquals(driverClass, other.driverClass)
                      && (dropTable == other.dropTable)
                      && safeEquals(dropTableDDL, other.dropTableDDL)
                      && safeEquals(insertNodeSql, other.insertNodeSql)
                      && safeEquals(jdbcPassword, other.jdbcPassword)
                      && safeEquals(jdbcURL, other.jdbcURL)
                      && safeEquals(jdbcUser, other.jdbcUser)
                      && safeEquals(selectChildFqnsSql, other.selectChildFqnsSql)
                      && safeEquals(selectChildNamesSql, other.selectChildNamesSql)
                      && safeEquals(selectNodeSql, other.selectNodeSql)
                      && safeEquals(table, other.table)
                      && safeEquals(updateNodeSql, other.updateNodeSql)
                      && safeEquals(updateTableSql, other.updateTableSql)
                      && safeEquals(connectionFactoryClass, other.connectionFactoryClass);
          }
  
          return false;
      }
  
      public int hashCode()
      {
          int result = hashCodeExcludingProperties();
          result = 31 * result + (createTable ? 0 : 1);
          result = 31 * result + (createTableDDL == null ? 0 : createTableDDL.hashCode());
          result = 31 * result + (datasourceName == null ? 0 : datasourceName.hashCode());
          result = 31 * result + (deleteAllSql == null ? 0 : deleteAllSql.hashCode());
          result = 31 * result + (deleteNodeSql == null ? 0 : deleteNodeSql.hashCode());
          result = 31 * result + (driverClass == null ? 0 : driverClass.hashCode());
          result = 31 * result + (dropTable ? 0 : 1);
          result = 31 * result + (dropTableDDL == null ? 0 : dropTableDDL.hashCode());
          result = 31 * result + (insertNodeSql == null ? 0 : insertNodeSql.hashCode());
          result = 31 * result + (jdbcPassword == null ? 0 : jdbcPassword.hashCode());
          result = 31 * result + (jdbcUser == null ? 0 : jdbcUser.hashCode());
          result = 31 * result + (jdbcURL == null ? 0 : jdbcURL.hashCode());
          result = 31 * result + (selectChildFqnsSql == null ? 0 : selectChildFqnsSql.hashCode());
          result = 31 * result + (selectChildNamesSql == null ? 0 : selectChildNamesSql.hashCode());
          result = 31 * result + (selectNodeSql == null ? 0 : selectNodeSql.hashCode());
          result = 31 * result + (table == null ? 0 : table.hashCode());
          result = 31 * result + (updateNodeSql == null ? 0 : updateNodeSql.hashCode());
          result = 31 * result + (updateTableSql == null ? 0 : updateTableSql.hashCode());
          result = 31 * result + (connectionFactoryClass == null ? 0 : connectionFactoryClass.hashCode());
  
          return result;
      }
  
  }
  
  
  1.1      date: 2007/02/10 17:47:02;  author: mmarkus;  state: Exp;JBossCache/src/org/jboss/cache/loader/AdjListJDBCClassLoader.java
  
  Index: AdjListJDBCClassLoader.java
  ===================================================================
  package org.jboss.cache.loader;
  
  import org.jboss.cache.Fqn;
  import org.jboss.cache.Modification;
  import org.jboss.cache.util.Util;
  import org.jboss.cache.config.CacheLoaderConfig;
  import org.apache.commons.logging.Log;
  
  import java.util.*;
  import java.sql.*;
  import java.io.InputStream;
  import java.io.IOException;
  import java.io.ByteArrayInputStream;
  
  /**
   * Adjacency List Model is the model of persisting trees in which each children holds a reference to its parent.
   * An alternative model is the Nested Set Model (a.k.a. Modified Preorder Model) - this approach adds some additional
   * indexing information to each persisted node. This indexing info is further used for optimizing operations like
   * subtree loading, deleting etc. The indexes are update for each insertion.
   * <p/>
   * Adjacency List Model proved more performance-effective for the following reason: the entire path is persisted rather
   * than only a reference to parent. Looking up nodes heavily relies on that, and the performance is similar as in the
   * case of Modified Preorder Model. Even more there is no costly update indexes operation.
   *
   * @author Mircea.Markus at iquestint.com
   * @version 1.0
   */
  public abstract class AdjListJDBCClassLoader extends AbstractCacheLoader
  {
      protected ConnectionFactory cf;
      protected String driverName;
      private AdjListJDBCClassLoaderConfig config;
  
      public void setConfig(CacheLoaderConfig.IndividualCacheLoaderConfig base)
      {
          config = processConfig(base);
  
          if (config.getDatasourceName() == null)
          {
              try
              {
                  /* Instantiate an standalone connection factory as per configuration, either explicitly
                  defined or the default one */
                  cf = (ConnectionFactory) Util.loadClass(config.getConnectionFactoryClass()).newInstance();
              }
              catch (Exception e)
              {
                  getLogger().error("Connectionn factory class could not be loaded", e);
                  throw new IllegalStateException("Connectionn factory class could not be loaded", e);
              }
          } else
          {
              /* We create the ManagedConnectionFactory instance but the JNDI lookup is no done until
              the start method is called, since that's when its registered in its lifecycle */
              cf = new ManagedConnectionFactory();
          }
          /* Regardless of the type of connection factory, we set the configuration */
          cf.setConfig(config);
      }
  
  
      /**
       * Returns a map representing a node.
       *
       * @param name node's fqn
       * @return node
       * @throws Exception
       */
      public Map get(Fqn name) throws Exception
      {
          final Map node = loadNode(name);
          return node == NULL_NODE_IN_ROW ? new HashMap(0) : node;
      }
  
      /**
       * Fetches child node names (not pathes).
       *
       * @param fqn parent fqn
       * @return a set of child node names or null if there are not children found for the fqn
       * @throws Exception
       */
      public Set<String> getChildrenNames(Fqn fqn) throws Exception
      {
          Set children = null;
          Connection con = null;
          PreparedStatement ps = null;
          ResultSet rs = null;
          try
          {
              if (getLogger().isDebugEnabled())
              {
                  getLogger().debug("executing sql: " + config.getSelectChildNamesSql() + " (" + fqn + ")");
              }
  
              con = cf.getConnection();
              ps = con.prepareStatement(config.getSelectChildNamesSql());
              ps.setString(1, fqn.toString());
              rs = ps.executeQuery();
              if (rs.next())
              {
                  children = new HashSet();
                  do
                  {
                      String child = rs.getString(1);
                      int slashInd = child.lastIndexOf('/');
                      String name = child.substring(slashInd + 1);
                      //Fqn childFqn = Fqn.fromString(child);
                      //String name = (String) childFqn.get(childFqn.size() - 1);
                      children.add(name);
                  }
                  while (rs.next());
              }
          }
          catch (SQLException e)
          {
              reportAndRethrowError("Failed to get children names for fqn " + fqn, e);
          }
          finally
          {
              safeClose(rs);
              safeClose(ps);
              cf.close(con);
          }
  
          return children == null ? null : Collections.unmodifiableSet(children);
      }
  
  
      /**
       * Nullifies the node.
       *
       * @param name node's fqn
       * @throws Exception
       */
      public void removeData(Fqn name) throws Exception
      {
          updateNode(name, null);
      }
  
      /**
       * First phase in transaction commit process. The changes are committed if only one phase if requested.
       * All the modifications are committed using the same connection.
       *
       * @param tx            something representing transaction
       * @param modifications a list of modifications
       * @param one_phase     indicates whether it's one or two phase commit transaction
       * @throws Exception
       */
      public void prepare(Object tx, List<Modification> modifications, boolean one_phase) throws Exception
      {
          // start a tx
          cf.prepare(tx);
  
          try
          {
              put(modifications);
  
              // commit if it's one phase only
              if (one_phase)
              {
                  commit(tx);
              }
          }
          catch (Exception e)
          {
              // todo should I rollback it here or rollback is supposed to be invoke by someone from outside?
              rollback(tx);
              // is this ok?
              throw e;
          }
      }
  
  
      /**
       * Commits a transaction.
       *
       * @param tx the tx to commit
       * @throws Exception
       */
      public void commit(Object tx) throws Exception
      {
          cf.commit(tx);
      }
  
      /**
       * Rolls back a transaction.
       *
       * @param tx the tx to rollback
       */
      public void rollback(Object tx)
      {
          cf.rollback(tx);
      }
  
      // Service implementation
  
      public void create() throws Exception
      {
      }
  
      public void start() throws Exception
      {
          cf.start();
  
          Connection con = null;
          Statement st = null;
  
          try
          {
              con = cf.getConnection();
              driverName = getDriverName(con);
              if (config.getCreateTable())
              {
                  if (!tableExists(config.getTable(), con))
                  {
                      if (getLogger().isDebugEnabled())
                      {
                          getLogger().debug("executing ddl: " + config.getCreateTableDDL());
                      }
                      st = con.createStatement();
                      st.executeUpdate(config.getCreateTableDDL());
                  }
              }
          }
          finally
          {
              safeClose(st);
              cf.close(con);
          }
      }
  
      public void stop()
      {
          if (config.getDropTable())
          {
              Connection con = null;
              Statement st = null;
              try
              {
                  if (getLogger().isDebugEnabled())
                  {
                      getLogger().debug("executing ddl: " + config.getDropTableDDL());
                  }
  
                  con = cf.getConnection();
                  st = con.createStatement();
                  st.executeUpdate(config.getDropTableDDL());
                  safeClose(st);
              }
              catch (SQLException e)
              {
                  getLogger().error("Failed to drop table: " + e.getMessage(), e);
              }
              finally
              {
                  safeClose(st);
                  cf.close(con);
                  cf.stop();
              }
          }
      }
  
      public void destroy()
      {
      }
  
  
      /**
       * Checks that there is a row for the fqn in the database.
       *
       * @param name node's fqn
       * @return true if there is a row in the database for the given fqn even if the node column is null.
       * @throws Exception
       */
      public boolean exists(Fqn name) throws Exception
      {
          final Map node = loadNode(name);
          return node != null;// && node != NULL_NODE_IN_ROW;
      }
  
      /**
       * Removes attribute's value for a key. If after removal the node contains no attributes, the node is nullified.
       *
       * @param name node's name
       * @param key  attribute's key
       * @return removed value or null if there was no value for the passed in key
       * @throws Exception
       */
      public Object remove(Fqn name, Object key) throws Exception
      {
          Object removedValue = null;
          Map node = loadNode(name);
          if (node != null && node != NULL_NODE_IN_ROW)
          {
              removedValue = node.remove(key);
              if (node.isEmpty())
              {
                  updateNode(name, null);
              } else
              {
                  updateNode(name, node);
              }
          }
          return removedValue;
      }
  
  
      /**
       * Loads a node from the database.
       *
       * @param name the fqn
       * @return non-null Map representing the node,
       *         null if there is no row with the fqn in the table,
       *         NULL_NODE_IN_ROW if there is a row in the table with the fqn but the node column contains null.
       */
      protected Map loadNode(Fqn name)
      {
          boolean rowExists = false;
          Map oldNode = null;
          Connection con = null;
          PreparedStatement ps = null;
          ResultSet rs = null;
          try
          {
              if (getLogger().isDebugEnabled())
              {
                  getLogger().debug("executing sql: " + config.getSelectNodeSql() + " (" + name + ")");
              }
  
              con = cf.getConnection();
              ps = con.prepareStatement(config.getSelectNodeSql());
              ps.setString(1, name.toString());
  
              rs = ps.executeQuery();
  
              if (rs.next())
              {
                  rowExists = true;
                  InputStream is = rs.getBinaryStream(1);
                  if (is != null && !rs.wasNull())
                  {
                      try
                      {
                          //                 ObjectInputStream ois = null;
                          //                 ois = new ObjectInputStream(is);
                          //                 Object marshalledNode = ois.readObject();
  
                          // deserialize result
                          Object marshalledNode = getMarshaller().objectFromStream(is);
                          oldNode = (Map) marshalledNode;
                      }
                      catch (Exception e)
                      {
                          throw new Exception("Unable to load to deserialize result: ", e);
                      }
                      finally
                      {
                          safeClose(is);
                      }
                  }
              }
          }
          catch (Exception e)
          {
              reportAndRethrowError("Failed to load node for fqn " + name, e);
          }
          finally
          {
              safeClose(rs);
              safeClose(ps);
              cf.close(con);
          }
  
          return oldNode == null ? (rowExists ? NULL_NODE_IN_ROW : null) : oldNode;
      }
  
  
      /**
       * Inserts a node into the database
       *
       * @param name the fqn
       * @param node the node
       */
      protected void insertNode(Fqn name, Map node)
      {
          Connection con = null;
          PreparedStatement ps = null;
          try
          {
              if (getLogger().isDebugEnabled())
              {
                  getLogger().debug("executing sql: " + config.getInsertNodeSql() + " (" + name + ")");
              }
  
              con = cf.getConnection();
              ps = con.prepareStatement(config.getInsertNodeSql());
  
              ps.setString(1, name.toString());
  
              if (node != null)
              {
                  //            ByteArrayOutputStream baos = new ByteArrayOutputStream();
                  //            ObjectOutputStream oos = new ObjectOutputStream(baos);
                  //            oos.writeObject(node);
  
                  byte[] byteStream = getMarshaller().objectToByteBuffer(node);
                  ByteArrayInputStream bais = new ByteArrayInputStream(byteStream);
                  ps.setBinaryStream(2, bais, byteStream.length);
              } else
              {
                  // a hack to handles the incomp. of SQL server jdbc driver prior to SQL SERVER 2005
                  if (driverName != null && (driverName.contains("SQLSERVER")
                          || driverName.contains("POSTGRESQL")))
                  {
                      ps.setNull(2, Types.LONGVARBINARY);
                  } else
                  {
                      ps.setNull(2, Types.BLOB);
                  }
                  //ps.setNull(2, Types.LONGVARBINARY);
              }
  
              if (name.size() == 0)
              {
                  ps.setNull(3, Types.VARCHAR);
              } else
              {
                  ps.setString(3, name.getFqnChild(name.size() - 1).toString());
              }
  
              int rows = ps.executeUpdate();
              if (rows != 1)
              {
                  throw new IllegalStateException("Expected one insert row but got " + rows);
              }
          }
          catch (RuntimeException e)
          {
              throw e;
          }
          catch (Exception e)
          {
              getLogger().error("Failed to insert node :" + e.getMessage());
              throw new IllegalStateException("Failed to insert node: " + e.getMessage(), e);
          }
          finally
          {
              safeClose(ps);
              cf.close(con);
          }
      }
  
  
      /**
       * Updates a node in the database.
       *
       * @param name the fqn
       * @param node new node value
       */
      protected void updateNode(Fqn name, Map node)
      {
          Connection con = null;
          PreparedStatement ps = null;
          try
          {
              if (getLogger().isDebugEnabled())
              {
                  getLogger().debug("executing sql: " + config.getUpdateNodeSql());
              }
  
              con = cf.getConnection();
              ps = con.prepareStatement(config.getUpdateNodeSql());
  
              if (node == null)
              {
                  //ps.setNull(1, Types.BLOB);
                  //            ps.setNull(1, Types.LONGVARBINARY);
                  // don't set it to null - simply use an empty hash map.
                  node = new HashMap(0);
              }
  
              //         ByteArrayOutputStream baos = new ByteArrayOutputStream();
              //         ObjectOutputStream oos = new ObjectOutputStream(baos);
              //         oos.writeObject(node);
  
              byte[] byteStream = getMarshaller().objectToByteBuffer(node);
              ByteArrayInputStream bais = new ByteArrayInputStream(byteStream);
              ps.setBinaryStream(1, bais, byteStream.length);
  
              ps.setString(2, name.toString());
  
              int rows = ps.executeUpdate();
              //         if (rows != 1)
              //         {
              //            throw new IllegalStateException("Expected one updated row but got " + rows);
              //         }
          }
          catch (Exception e)
          {
              reportAndRethrowError("Failed to update node for fqn " + name, e);
          }
          finally
          {
              safeClose(ps);
              cf.close(con);
          }
      }
  
      protected String getDriverName(Connection con)
      {
          if (con == null) return null;
          try
          {
              DatabaseMetaData dmd = con.getMetaData();
              return toUpperCase(dmd.getDriverName());
          }
          catch (SQLException e)
          {
              // This should not happen. A J2EE compatiable JDBC driver is
              // required to fully support metadata.
              throw new IllegalStateException("Error while getting the driver name", e);
          }
      }
  
      static String getRequiredProperty(Properties props, String name)
      {
          String value = props.getProperty(name);
          if (value == null)
          {
              throw new IllegalStateException("Missing required property: " + name);
          }
          return value;
      }
  
      protected boolean tableExists(String tableName, Connection con)
      {
          ResultSet rs = null;
          try
          {
              // (a j2ee spec compatible jdbc driver has to fully
              // implement the DatabaseMetaData)
              DatabaseMetaData dmd = con.getMetaData();
              String catalog = con.getCatalog();
              String schema = null;
              String quote = dmd.getIdentifierQuoteString();
              if (tableName.startsWith(quote))
              {
                  if (!tableName.endsWith(quote))
                  {
                      throw new IllegalStateException("Mismatched quote in table name: " + tableName);
                  }
                  int quoteLength = quote.length();
                  tableName = tableName.substring(quoteLength, tableName.length() - quoteLength);
                  if (dmd.storesLowerCaseQuotedIdentifiers())
                  {
                      tableName = toLowerCase(tableName);
                  } else if (dmd.storesUpperCaseQuotedIdentifiers())
                  {
                      tableName = toUpperCase(tableName);
                  }
              } else
              {
                  if (dmd.storesLowerCaseIdentifiers())
                  {
                      tableName = toLowerCase(tableName);
                  } else if (dmd.storesUpperCaseIdentifiers())
                  {
                      tableName = toUpperCase(tableName);
                  }
              }
  
              int dotIndex;
              if ((dotIndex = tableName.indexOf('.')) != -1)
              {
                  // Yank out schema name ...
                  schema = tableName.substring(0, dotIndex);
                  tableName = tableName.substring(dotIndex + 1);
              }
  
              rs = dmd.getTables(catalog, schema, tableName, null);
              return rs.next();
          }
          catch (SQLException e)
          {
              // This should not happen. A J2EE compatiable JDBC driver is
              // required fully support metadata.
              throw new IllegalStateException("Error while checking if table aleady exists " + tableName, e);
          }
          finally
          {
              safeClose(rs);
          }
      }
  
  
      protected abstract Log getLogger();
  
      protected abstract AdjListJDBCClassLoaderConfig processConfig(CacheLoaderConfig.IndividualCacheLoaderConfig base);
  
      protected void reportAndRethrowError(String message, Exception cause) throws IllegalStateException
      {
          getLogger().error(message, cause);
          throw new IllegalStateException(message, cause);
      }
  
      protected void safeClose(InputStream is)
      {
          if (is != null)
          {
              try
              {
                  is.close();
              }
              catch (IOException e)
              {
                  getLogger().warn("Failed to close input stream: " + e.getMessage());
              }
          }
      }
  
      protected void safeClose(Statement st)
      {
          if (st != null)
          {
              try
              {
                  st.close();
              }
              catch (SQLException e)
              {
                  getLogger().warn("Failed to close statement: " + e.getMessage());
              }
          }
      }
  
      protected void safeClose(ResultSet rs)
      {
          if (rs != null)
          {
              try
              {
                  rs.close();
              }
              catch (SQLException e)
              {
                  getLogger().warn("Failed to close result set: " + e.getMessage());
              }
          }
      }
  
      private static String toUpperCase(String s)
      {
          return s.toUpperCase(Locale.ENGLISH);
      }
  
      private static String toLowerCase(String s)
      {
          return s.toLowerCase((Locale.ENGLISH));
      }
  
      // Inner
  
      protected static final Map NULL_NODE_IN_ROW = new Map()
      {
          public int size()
          {
              throw new UnsupportedOperationException();
          }
  
          public void clear()
          {
              throw new UnsupportedOperationException();
          }
  
          public boolean isEmpty()
          {
              throw new UnsupportedOperationException();
          }
  
          public boolean containsKey(Object key)
          {
              throw new UnsupportedOperationException();
          }
  
          public boolean containsValue(Object value)
          {
              throw new UnsupportedOperationException();
          }
  
          public Collection values()
          {
              throw new UnsupportedOperationException();
          }
  
          public void putAll(Map t)
          {
              throw new UnsupportedOperationException();
          }
  
          public Set entrySet()
          {
              throw new UnsupportedOperationException();
          }
  
          public Set keySet()
          {
              throw new UnsupportedOperationException();
          }
  
          public Object get(Object key)
          {
              throw new UnsupportedOperationException();
          }
  
          public Object remove(Object key)
          {
              throw new UnsupportedOperationException();
          }
  
          public Object put(Object key, Object value)
          {
              throw new UnsupportedOperationException();
          }
      };
  
  }
  
  
  



More information about the jboss-cvs-commits mailing list