[exo-jcr-commits] exo-jcr SVN: r194 - in jcr/trunk/component/core/src: main/java/org/exoplatform/services/jcr/impl/core/query/lucene and 2 other directories.

do-not-reply at jboss.org do-not-reply at jboss.org
Fri Oct 2 04:25:49 EDT 2009


Author: skabashnyuk
Date: 2009-10-02 04:25:49 -0400 (Fri, 02 Oct 2009)
New Revision: 194

Added:
   jcr/trunk/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/IndexingTree.java
   jcr/trunk/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/SearchIndexConfigurationHelper.java
Modified:
   jcr/trunk/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/QueryHandlerContext.java
   jcr/trunk/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/SearchManager.java
   jcr/trunk/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/SystemSearchManager.java
   jcr/trunk/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/lucene/MultiIndex.java
   jcr/trunk/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/lucene/SearchIndex.java
   jcr/trunk/component/core/src/test/java/org/exoplatform/services/jcr/api/core/query/TestAll.java
   jcr/trunk/component/core/src/test/java/org/exoplatform/services/jcr/api/core/query/lucene/directory/DirectoryManagerTest.java
Log:
EXOJCR-161 : Index startup update

Added: jcr/trunk/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/IndexingTree.java
===================================================================
--- jcr/trunk/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/IndexingTree.java	                        (rev 0)
+++ jcr/trunk/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/IndexingTree.java	2009-10-02 08:25:49 UTC (rev 194)
@@ -0,0 +1,118 @@
+/*
+ * Copyright (C) 2009 eXo Platform SAS.
+ *
+ * This is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * This software is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this software; if not, write to the Free
+ * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+ * 02110-1301 USA, or see the FSF site: http://www.fsf.org.
+ */
+package org.exoplatform.services.jcr.impl.core.query;
+
+import org.exoplatform.services.jcr.dataflow.ItemState;
+import org.exoplatform.services.jcr.datamodel.ItemData;
+import org.exoplatform.services.jcr.datamodel.NodeData;
+import org.exoplatform.services.jcr.datamodel.QPath;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * @author <a href="mailto:Sergey.Kabashnyuk at exoplatform.org">Sergey
+ *         Kabashnyuk</a>
+ * @version $Id: exo-jboss-codetemplates.xml 34360 2009-07-22 23:58:59Z ksm $
+ * 
+ */
+public class IndexingTree {
+    private final QPath indexingRootQpath;
+    private final NodeData indexingRoot;
+
+    private final List<QPath> excludedPaths;
+
+    /**
+     * @param indexingRoot
+     * @param excludedPaths
+     */
+    public IndexingTree(NodeData indexingRoot, List<QPath> excludedPaths) {
+	super();
+	this.indexingRoot = indexingRoot;
+	this.indexingRootQpath = indexingRoot.getQPath();
+	this.excludedPaths = excludedPaths;
+    }
+
+    /**
+     * @param indexingRoot
+     * @param excludedPaths
+     */
+    public IndexingTree(NodeData indexingRoot) {
+	super();
+	this.indexingRoot = indexingRoot;
+	this.indexingRootQpath = indexingRoot.getQPath();
+	this.excludedPaths = new ArrayList<QPath>();
+    }
+
+    /**
+     * @return the excludedPaths
+     */
+    public List<QPath> getExcludedPaths() {
+	return excludedPaths;
+    }
+
+    /**
+     * @return the indexingRoot
+     */
+    public NodeData getIndexingRoot() {
+	return indexingRoot;
+    }
+
+    /**
+     * Checks if the given event should be excluded based on the
+     * {@link #excludePath} setting.
+     * 
+     * @param event
+     *            observation event
+     * @return <code>true</code> if the event should be excluded,
+     *         <code>false</code> otherwise
+     */
+    public boolean isExcluded(ItemState event) {
+
+	for (QPath excludedPath : excludedPaths) {
+	    if (event.getData().getQPath().isDescendantOf(excludedPath)
+		    || event.getData().getQPath().equals(excludedPath))
+		return true;
+	}
+
+	return !event.getData().getQPath().isDescendantOf(indexingRootQpath)
+		&& !event.getData().getQPath().equals(indexingRootQpath);
+    }
+
+    /**
+     * Checks if the given event should be excluded based on the
+     * {@link #excludePath} setting.
+     * 
+     * @param event
+     *            observation event
+     * @return <code>true</code> if the event should be excluded,
+     *         <code>false</code> otherwise
+     */
+    public boolean isExcluded(ItemData eventData) {
+
+	for (QPath excludedPath : excludedPaths) {
+	    if (eventData.getQPath().isDescendantOf(excludedPath)
+		    || eventData.getQPath().equals(excludedPath))
+		return true;
+	}
+
+	return !eventData.getQPath().isDescendantOf(indexingRootQpath)
+		&& !eventData.getQPath().equals(indexingRootQpath);
+    }
+}


Property changes on: jcr/trunk/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/IndexingTree.java
___________________________________________________________________
Name: svn:mime-type
   + text/plain

Modified: jcr/trunk/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/QueryHandlerContext.java
===================================================================
--- jcr/trunk/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/QueryHandlerContext.java	2009-10-01 15:37:53 UTC (rev 193)
+++ jcr/trunk/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/QueryHandlerContext.java	2009-10-02 08:25:49 UTC (rev 194)
@@ -27,152 +27,163 @@
  * stable. This class provides access to the environment where the query handler
  * is running in.
  */
-public class QueryHandlerContext
-{
-   /**
-    * The persistent <code>ItemStateManager</code>
-    */
-   private final ItemDataConsumer stateMgr;
+public class QueryHandlerContext {
+    /**
+     * The persistent <code>ItemStateManager</code>
+     */
+    private final ItemDataConsumer stateMgr;
 
-   /**
-    * The node type registry of the repository
-    */
-   private final NodeTypeDataManager nodeTypeDataManager;
+    /**
+     * The node type registry of the repository
+     */
+    private final NodeTypeDataManager nodeTypeDataManager;
 
-   /**
-    * The namespace registry of the repository.
-    */
-   private final NamespaceRegistryImpl nsRegistry;
+    /**
+     * The namespace registry of the repository.
+     */
+    private final NamespaceRegistryImpl nsRegistry;
 
-   /**
-    * The id of the root node.
-    */
-   private final String rootIdentifer;
+    /**
+     * The id of the root node.
+     */
+    private final IndexingTree indexingTree;
 
-   /**
-    * PropertyType registry to look up the type of a property with a given name.
-    */
-   private final PropertyTypeRegistry propRegistry;
+    /**
+     * PropertyType registry to look up the type of a property with a given
+     * name.
+     */
+    private final PropertyTypeRegistry propRegistry;
 
-   /**
-    * The query handler for the jcr:system tree
-    */
-   private final QueryHandler parentHandler;
+    /**
+     * The query handler for the jcr:system tree
+     */
+    private final QueryHandler parentHandler;
 
-   /**
-    * Text extractor for extracting text content of binary properties.
-    */
-   private final DocumentReaderService extractor;
+    /**
+     * Text extractor for extracting text content of binary properties.
+     */
+    private final DocumentReaderService extractor;
 
-   private final String indexDirectory;
+    private final String indexDirectory;
 
-   /**
-    * Creates a new context instance.
-    * 
-    * @param fs a this <code>QueryHandler</code> may use to store its index. If
-    *          no <code>FileSystem</code> has been configured <code>fs</code> is
-    *          <code>null</code>.
-    * @param stateMgr provides persistent item states.
-    * @param rootId the id of the root node.
-    * @param ntRegistry the node type registry.
-    * @param nsRegistry the namespace registry.
-    * @param parentHandler the parent query handler or <code>null</code> it there
-    *          is no parent handler.
-    * @param excludedNodeId id of the node that should be excluded from indexing.
-    *          Any descendant of that node is also excluded from indexing.
-    */
-   public QueryHandlerContext(ItemDataConsumer stateMgr, String rootIdentifer, NodeTypeDataManager nodeTypeDataManager,
-      NamespaceRegistryImpl nsRegistry, QueryHandler parentHandler, String indexDirectory,
-      DocumentReaderService extractor)
-   {
-      this.stateMgr = stateMgr;
-      this.rootIdentifer = rootIdentifer;
-      this.nodeTypeDataManager = nodeTypeDataManager;
-      this.nsRegistry = nsRegistry;
-      this.indexDirectory = indexDirectory;
-      this.extractor = extractor;
-      this.propRegistry = new PropertyTypeRegistry(nodeTypeDataManager);
-      this.parentHandler = parentHandler;
-      ((NodeTypeDataManagerImpl)this.nodeTypeDataManager).addListener(propRegistry);
-   }
+    private final boolean createInitialIndex;
 
-   /**
-    * Returns the persistent {@link ItemStateManager} of the workspace this
-    * <code>QueryHandler</code> is based on.
-    * 
-    * @return the persistent <code>ItemStateManager</code> of the current
-    *         workspace.
-    */
-   public ItemDataConsumer getItemStateManager()
-   {
-      return stateMgr;
-   }
+    /**
+     * Creates a new context instance.
+     * 
+     * @param fs
+     *            a this <code>QueryHandler</code> may use to store its index.
+     *            If no <code>FileSystem</code> has been configured
+     *            <code>fs</code> is <code>null</code>.
+     * @param stateMgr
+     *            provides persistent item states.
+     * @param rootId
+     *            the id of the root node.
+     * @param ntRegistry
+     *            the node type registry.
+     * @param nsRegistry
+     *            the namespace registry.
+     * @param parentHandler
+     *            the parent query handler or <code>null</code> it there is no
+     *            parent handler.
+     * @param excludedNodeId
+     *            id of the node that should be excluded from indexing. Any
+     *            descendant of that node is also excluded from indexing.
+     */
+    public QueryHandlerContext(ItemDataConsumer stateMgr,
+	    IndexingTree indexingTree, NodeTypeDataManager nodeTypeDataManager,
+	    NamespaceRegistryImpl nsRegistry, QueryHandler parentHandler,
+	    String indexDirectory, DocumentReaderService extractor,
+	    boolean createInitialIndex) {
+	this.stateMgr = stateMgr;
+	this.indexingTree = indexingTree;
+	this.nodeTypeDataManager = nodeTypeDataManager;
+	this.nsRegistry = nsRegistry;
+	this.indexDirectory = indexDirectory;
+	this.extractor = extractor;
+	this.createInitialIndex = createInitialIndex;
+	this.propRegistry = new PropertyTypeRegistry(nodeTypeDataManager);
+	this.parentHandler = parentHandler;
+	((NodeTypeDataManagerImpl) this.nodeTypeDataManager)
+		.addListener(propRegistry);
+    }
 
-   /**
-    * Returns the id of the root node.
-    * 
-    * @return the idof the root node.
-    */
-   public String getRootNodeIdentifer()
-   {
-      return rootIdentifer;
-   }
+    /**
+     * @return the createInitialIndex
+     */
+    public boolean isCreateInitialIndex() {
+	return createInitialIndex;
+    }
 
-   /**
-    * Returns the PropertyTypeRegistry for this repository.
-    * 
-    * @return the PropertyTypeRegistry for this repository.
-    */
-   public PropertyTypeRegistry getPropertyTypeRegistry()
-   {
-      return propRegistry;
-   }
+    /**
+     * Returns the persistent {@link ItemStateManager} of the workspace this
+     * <code>QueryHandler</code> is based on.
+     * 
+     * @return the persistent <code>ItemStateManager</code> of the current
+     *         workspace.
+     */
+    public ItemDataConsumer getItemStateManager() {
+	return stateMgr;
+    }
 
-   /**
-    * Returns the NodeTypeRegistry for this repository.
-    * 
-    * @return the NodeTypeRegistry for this repository.
-    */
-   public NodeTypeDataManager getNodeTypeDataManager()
-   {
-      return nodeTypeDataManager;
-   }
+    /**
+     * Returns the id of the root node.
+     * 
+     * @return the idof the root node.
+     */
+    public IndexingTree getIndexingTree() {
+	return indexingTree;
+    }
 
-   /**
-    * Returns the NamespaceRegistryImpl for this repository.
-    * 
-    * @return the NamespaceRegistryImpl for this repository.
-    */
-   public NamespaceRegistryImpl getNamespaceRegistry()
-   {
-      return nsRegistry;
-   }
+    /**
+     * Returns the PropertyTypeRegistry for this repository.
+     * 
+     * @return the PropertyTypeRegistry for this repository.
+     */
+    public PropertyTypeRegistry getPropertyTypeRegistry() {
+	return propRegistry;
+    }
 
-   /**
-    * Returns the parent query handler.
-    * 
-    * @return the parent query handler.
-    */
-   public QueryHandler getParentHandler()
-   {
-      return parentHandler;
-   }
+    /**
+     * Returns the NodeTypeRegistry for this repository.
+     * 
+     * @return the NodeTypeRegistry for this repository.
+     */
+    public NodeTypeDataManager getNodeTypeDataManager() {
+	return nodeTypeDataManager;
+    }
 
-   /**
-    * Destroys this context and releases resources.
-    */
-   public void destroy()
-   {
-      ((NodeTypeDataManagerImpl)this.nodeTypeDataManager).removeListener(propRegistry);
-   }
+    /**
+     * Returns the NamespaceRegistryImpl for this repository.
+     * 
+     * @return the NamespaceRegistryImpl for this repository.
+     */
+    public NamespaceRegistryImpl getNamespaceRegistry() {
+	return nsRegistry;
+    }
 
-   public DocumentReaderService getExtractor()
-   {
-      return extractor;
-   }
+    /**
+     * Returns the parent query handler.
+     * 
+     * @return the parent query handler.
+     */
+    public QueryHandler getParentHandler() {
+	return parentHandler;
+    }
 
-   public String getIndexDirectory()
-   {
-      return indexDirectory;
-   }
+    /**
+     * Destroys this context and releases resources.
+     */
+    public void destroy() {
+	((NodeTypeDataManagerImpl) this.nodeTypeDataManager)
+		.removeListener(propRegistry);
+    }
+
+    public DocumentReaderService getExtractor() {
+	return extractor;
+    }
+
+    public String getIndexDirectory() {
+	return indexDirectory;
+    }
 }

Added: jcr/trunk/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/SearchIndexConfigurationHelper.java
===================================================================
--- jcr/trunk/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/SearchIndexConfigurationHelper.java	                        (rev 0)
+++ jcr/trunk/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/SearchIndexConfigurationHelper.java	2009-10-02 08:25:49 UTC (rev 194)
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2009 eXo Platform SAS.
+ *
+ * This is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * This software is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this software; if not, write to the Free
+ * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+ * 02110-1301 USA, or see the FSF site: http://www.fsf.org.
+ */
+package org.exoplatform.services.jcr.impl.core.query;
+
+import org.exoplatform.services.jcr.config.QueryHandlerEntry;
+import org.exoplatform.services.jcr.config.RepositoryConfigurationException;
+import org.exoplatform.services.jcr.impl.core.query.lucene.SearchIndex;
+
+import java.io.IOException;
+
+/**
+ * @author <a href="mailto:Sergey.Kabashnyuk at exoplatform.org">Sergey
+ *         Kabashnyuk</a>
+ * @version $Id: exo-jboss-codetemplates.xml 34360 2009-07-22 23:58:59Z ksm $
+ * 
+ */
+public class SearchIndexConfigurationHelper {
+
+    private final SearchIndex searchIndex;
+
+    /**
+     * @param searchIndex
+     */
+    public SearchIndexConfigurationHelper(SearchIndex searchIndex) {
+	super();
+	this.searchIndex = searchIndex;
+    }
+
+    public void init(QueryHandlerEntry queryHandlerEntry) throws IOException,
+	    RepositoryConfigurationException {
+	// Path will be set using queryHandelContext
+
+    }
+}


Property changes on: jcr/trunk/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/SearchIndexConfigurationHelper.java
___________________________________________________________________
Name: svn:mime-type
   + text/plain

Modified: jcr/trunk/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/SearchManager.java
===================================================================
--- jcr/trunk/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/SearchManager.java	2009-10-01 15:37:53 UTC (rev 193)
+++ jcr/trunk/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/SearchManager.java	2009-10-02 08:25:49 UTC (rev 194)
@@ -59,525 +59,488 @@
 /**
  * Acts as a global entry point to execute queries and index nodes.
  */
-public class SearchManager implements Startable, MandatoryItemsPersistenceListener
-{
+public class SearchManager implements Startable,
+	MandatoryItemsPersistenceListener {
 
-   /**
-    * Logger instance for this class
-    */
-   private static final Log log = ExoLogger.getLogger(SearchManager.class);
+    /**
+     * Logger instance for this class
+     */
+    private static final Log log = ExoLogger.getLogger(SearchManager.class);
 
-   protected final QueryHandlerEntryWrapper config;
+    protected final QueryHandlerEntryWrapper config;
 
-   /**
-    * Text extractor for extracting text content of binary properties.
-    */
-   protected final DocumentReaderService extractor;
+    /**
+     * Text extractor for extracting text content of binary properties.
+     */
+    protected final DocumentReaderService extractor;
 
-   /**
-    * QueryHandler where query execution is delegated to
-    */
-   protected QueryHandler handler;
+    /**
+     * QueryHandler where query execution is delegated to
+     */
+    protected QueryHandler handler;
 
-   /**
-    * The shared item state manager instance for the workspace.
-    */
-   protected final ItemDataConsumer itemMgr;
+    /**
+     * The shared item state manager instance for the workspace.
+     */
+    protected final ItemDataConsumer itemMgr;
 
-   /**
-    * The namespace registry of the repository.
-    */
-   protected final NamespaceRegistryImpl nsReg;
+    /**
+     * The namespace registry of the repository.
+     */
+    protected final NamespaceRegistryImpl nsReg;
 
-   /**
-    * The node type registry.
-    */
-   protected final NodeTypeDataManager nodeTypeDataManager;
+    /**
+     * The node type registry.
+     */
+    protected final NodeTypeDataManager nodeTypeDataManager;
 
-   /**
-    * QueryHandler of the parent search manager or <code>null</code> if there is
-    * none.
-    */
-   protected final SearchManager parentSearchManager;
+    /**
+     * QueryHandler of the parent search manager or <code>null</code> if there
+     * is none.
+     */
+    protected final SearchManager parentSearchManager;
 
-   protected QPath indexingRoot;
+    // protected QPath indexingRoot;
+    //
+    // protected List<QPath> excludedPaths = new ArrayList<QPath>();
 
-   protected List<QPath> excludedPaths = new ArrayList<QPath>();
+    protected IndexingTree indexingTree;
 
-   private final ConfigurationManager cfm;
+    private final ConfigurationManager cfm;
 
-   /**
-    * Creates a new <code>SearchManager</code>.
-    * 
-    * @param config the search configuration.
-    * @param nsReg the namespace registry.
-    * @param ntReg the node type registry.
-    * @param itemMgr the shared item state manager.
-    * @param rootNodeId the id of the root node.
-    * @param parentMgr the parent search manager or <code>null</code> if there is
-    *          no parent search manager.
-    * @param excludedNodeId id of the node that should be excluded from indexing.
-    *          Any descendant of that node will also be excluded from indexing.
-    * @throws RepositoryException if the search manager cannot be initialized
-    * @throws RepositoryConfigurationException
-    */
-   public SearchManager(QueryHandlerEntry config, NamespaceRegistryImpl nsReg, NodeTypeDataManager ntReg,
-      WorkspacePersistentDataManager itemMgr, SystemSearchManagerHolder parentSearchManager,
-      DocumentReaderService extractor, ConfigurationManager cfm) throws RepositoryException,
-      RepositoryConfigurationException
-   {
+    /**
+     * Creates a new <code>SearchManager</code>.
+     * 
+     * @param config
+     *            the search configuration.
+     * @param nsReg
+     *            the namespace registry.
+     * @param ntReg
+     *            the node type registry.
+     * @param itemMgr
+     *            the shared item state manager.
+     * @param rootNodeId
+     *            the id of the root node.
+     * @param parentMgr
+     *            the parent search manager or <code>null</code> if there is no
+     *            parent search manager.
+     * @param excludedNodeId
+     *            id of the node that should be excluded from indexing. Any
+     *            descendant of that node will also be excluded from indexing.
+     * @throws RepositoryException
+     *             if the search manager cannot be initialized
+     * @throws RepositoryConfigurationException
+     */
+    public SearchManager(QueryHandlerEntry config, NamespaceRegistryImpl nsReg,
+	    NodeTypeDataManager ntReg, WorkspacePersistentDataManager itemMgr,
+	    SystemSearchManagerHolder parentSearchManager,
+	    DocumentReaderService extractor, ConfigurationManager cfm)
+	    throws RepositoryException, RepositoryConfigurationException {
 
-      this.extractor = extractor;
+	this.extractor = extractor;
 
-      this.config = new QueryHandlerEntryWrapper(config);
-      this.nodeTypeDataManager = ntReg;
-      this.nsReg = nsReg;
-      this.itemMgr = itemMgr;
-      this.cfm = cfm;
+	this.config = new QueryHandlerEntryWrapper(config);
+	this.nodeTypeDataManager = ntReg;
+	this.nsReg = nsReg;
+	this.itemMgr = itemMgr;
+	this.cfm = cfm;
 
-      this.parentSearchManager = parentSearchManager != null ? parentSearchManager.get() : null;
-      itemMgr.addItemPersistenceListener(this);
+	this.parentSearchManager = parentSearchManager != null ? parentSearchManager
+		.get()
+		: null;
+	itemMgr.addItemPersistenceListener(this);
+    }
 
-      //initializeQueryHandler();
-   }
+    /**
+     * Creates a query object from a node that can be executed on the workspace.
+     * 
+     * @param session
+     *            the session of the user executing the query.
+     * @param itemMgr
+     *            the item manager of the user executing the query. Needed to
+     *            return <code>Node</code> instances in the result set.
+     * @param node
+     *            a node of type nt:query.
+     * @return a <code>Query</code> instance to execute.
+     * @throws InvalidQueryException
+     *             if <code>absPath</code> is not a valid persisted query (that
+     *             is, a node of type nt:query)
+     * @throws RepositoryException
+     *             if any other error occurs.
+     */
+    public Query createQuery(SessionImpl session,
+	    SessionDataManager sessionDataManager, Node node)
+	    throws InvalidQueryException, RepositoryException {
+	AbstractQueryImpl query = createQueryInstance();
+	query.init(session, sessionDataManager, handler, node);
+	return query;
+    }
 
-   /**
-    * Creates a query object from a node that can be executed on the workspace.
-    * 
-    * @param session the session of the user executing the query.
-    * @param itemMgr the item manager of the user executing the query. Needed to
-    *          return <code>Node</code> instances in the result set.
-    * @param node a node of type nt:query.
-    * @return a <code>Query</code> instance to execute.
-    * @throws InvalidQueryException if <code>absPath</code> is not a valid
-    *           persisted query (that is, a node of type nt:query)
-    * @throws RepositoryException if any other error occurs.
-    */
-   public Query createQuery(SessionImpl session, SessionDataManager sessionDataManager, Node node)
-      throws InvalidQueryException, RepositoryException
-   {
-      AbstractQueryImpl query = createQueryInstance();
-      query.init(session, sessionDataManager, handler, node);
-      return query;
-   }
+    /**
+     * Creates a query object that can be executed on the workspace.
+     * 
+     * @param session
+     *            the session of the user executing the query.
+     * @param itemMgr
+     *            the item manager of the user executing the query. Needed to
+     *            return <code>Node</code> instances in the result set.
+     * @param statement
+     *            the actual query statement.
+     * @param language
+     *            the syntax of the query statement.
+     * @return a <code>Query</code> instance to execute.
+     * @throws InvalidQueryException
+     *             if the query is malformed or the <code>language</code> is
+     *             unknown.
+     * @throws RepositoryException
+     *             if any other error occurs.
+     */
+    public Query createQuery(SessionImpl session,
+	    SessionDataManager sessionDataManager, String statement,
+	    String language) throws InvalidQueryException, RepositoryException {
+	AbstractQueryImpl query = createQueryInstance();
+	query.init(session, sessionDataManager, handler, statement, language);
+	return query;
+    }
 
-   /**
-    * Creates a query object that can be executed on the workspace.
-    * 
-    * @param session the session of the user executing the query.
-    * @param itemMgr the item manager of the user executing the query. Needed to
-    *          return <code>Node</code> instances in the result set.
-    * @param statement the actual query statement.
-    * @param language the syntax of the query statement.
-    * @return a <code>Query</code> instance to execute.
-    * @throws InvalidQueryException if the query is malformed or the
-    *           <code>language</code> is unknown.
-    * @throws RepositoryException if any other error occurs.
-    */
-   public Query createQuery(SessionImpl session, SessionDataManager sessionDataManager, String statement,
-      String language) throws InvalidQueryException, RepositoryException
-   {
-      AbstractQueryImpl query = createQueryInstance();
-      query.init(session, sessionDataManager, handler, statement, language);
-      return query;
-   }
+    /**
+     * just for test use only
+     */
+    public QueryHandler getHandler() {
 
-   /**
-    * just for test use only
-    */
-   public QueryHandler getHandler()
-   {
+	return handler;
+    }
 
-      return handler;
-   }
+    public void onSaveItems(ItemStateChangesLog changesLog) {
+	if (handler == null)
+	    return;
 
-   public void onSaveItems(ItemStateChangesLog changesLog)
-   {
-      if (handler == null)
-         return;
+	long time = System.currentTimeMillis();
 
-      long time = System.currentTimeMillis();
+	// nodes that need to be removed from the index.
+	final Set<String> removedNodes = new HashSet<String>();
+	// nodes that need to be added to the index.
+	final Set<String> addedNodes = new HashSet<String>();
 
-      // nodes that need to be removed from the index.
-      final Set<String> removedNodes = new HashSet<String>();
-      // nodes that need to be added to the index.
-      final Set<String> addedNodes = new HashSet<String>();
+	final Map<String, List<ItemState>> updatedNodes = new HashMap<String, List<ItemState>>();
 
-      final Map<String, List<ItemState>> updatedNodes = new HashMap<String, List<ItemState>>();
+	for (Iterator<ItemState> iter = changesLog.getAllStates().iterator(); iter
+		.hasNext();) {
+	    ItemState itemState = iter.next();
 
-      for (Iterator<ItemState> iter = changesLog.getAllStates().iterator(); iter.hasNext();)
-      {
-         ItemState itemState = iter.next();
+	    if (!indexingTree.isExcluded(itemState)) {
+		String uuid = itemState.isNode() ? itemState.getData()
+			.getIdentifier() : itemState.getData()
+			.getParentIdentifier();
 
-         if (!isExcluded(itemState))
-         {
-            String uuid =
-               itemState.isNode() ? itemState.getData().getIdentifier() : itemState.getData().getParentIdentifier();
+		if (itemState.isAdded()) {
+		    if (itemState.isNode()) {
+			addedNodes.add(uuid);
+		    } else {
+			if (!addedNodes.contains(uuid)) {
+			    createNewOrAdd(uuid, itemState, updatedNodes);
+			}
+		    }
+		} else if (itemState.isRenamed()) {
+		    if (itemState.isNode()) {
+			addedNodes.add(uuid);
+		    } else {
+			createNewOrAdd(uuid, itemState, updatedNodes);
+		    }
+		} else if (itemState.isUpdated()) {
+		    createNewOrAdd(uuid, itemState, updatedNodes);
+		} else if (itemState.isMixinChanged()) {
+		    createNewOrAdd(uuid, itemState, updatedNodes);
+		} else if (itemState.isDeleted()) {
+		    if (itemState.isNode()) {
+			if (addedNodes.contains(uuid)) {
+			    addedNodes.remove(uuid);
+			    removedNodes.remove(uuid);
+			} else {
+			    removedNodes.add(uuid);
+			}
+			// remove all changes after node remove
+			updatedNodes.remove(uuid);
+		    } else {
+			if (!removedNodes.contains(uuid)
+				&& !addedNodes.contains(uuid)) {
+			    createNewOrAdd(uuid, itemState, updatedNodes);
+			}
+		    }
+		}
+	    }
+	}
+	// TODO make quick changes
+	for (String uuid : updatedNodes.keySet()) {
+	    removedNodes.add(uuid);
+	    addedNodes.add(uuid);
+	}
 
-            if (itemState.isAdded())
-            {
-               if (itemState.isNode())
-               {
-                  addedNodes.add(uuid);
-               }
-               else
-               {
-                  if (!addedNodes.contains(uuid))
-                  {
-                     createNewOrAdd(uuid, itemState, updatedNodes);
-                  }
-               }
-            }
-            else if (itemState.isRenamed())
-            {
-               if (itemState.isNode())
-               {
-                  addedNodes.add(uuid);
-               }
-               else
-               {
-                  createNewOrAdd(uuid, itemState, updatedNodes);
-               }
-            }
-            else if (itemState.isUpdated())
-            {
-               createNewOrAdd(uuid, itemState, updatedNodes);
-            }
-            else if (itemState.isMixinChanged())
-            {
-               createNewOrAdd(uuid, itemState, updatedNodes);
-            }
-            else if (itemState.isDeleted())
-            {
-               if (itemState.isNode())
-               {
-                  if (addedNodes.contains(uuid))
-                  {
-                     addedNodes.remove(uuid);
-                     removedNodes.remove(uuid);
-                  }
-                  else
-                  {
-                     removedNodes.add(uuid);
-                  }
-                  // remove all changes after node remove
-                  updatedNodes.remove(uuid);
-               }
-               else
-               {
-                  if (!removedNodes.contains(uuid) && !addedNodes.contains(uuid))
-                  {
-                     createNewOrAdd(uuid, itemState, updatedNodes);
-                  }
-               }
-            }
-         }
-      }
-      // TODO make quick changes
-      for (String uuid : updatedNodes.keySet())
-      {
-         removedNodes.add(uuid);
-         addedNodes.add(uuid);
-      }
+	Iterator<NodeData> addedStates = new Iterator<NodeData>() {
+	    private final Iterator<String> iter = addedNodes.iterator();
 
-      Iterator<NodeData> addedStates = new Iterator<NodeData>()
-      {
-         private final Iterator<String> iter = addedNodes.iterator();
+	    public boolean hasNext() {
+		return iter.hasNext();
+	    }
 
-         public boolean hasNext()
-         {
-            return iter.hasNext();
-         }
+	    public NodeData next() {
 
-         public NodeData next()
-         {
+		// cycle till find a next or meet the end of set
+		do {
+		    String id = iter.next();
+		    try {
+			ItemData item = itemMgr.getItemData(id);
+			if (item != null) {
+			    if (item.isNode())
+				return (NodeData) item; // return node
+			    else
+				log.warn("Node not found, but property " + id
+					+ ", " + item.getQPath().getAsString()
+					+ " found. ");
+			} else
+			    log.warn("Unable to index node with id " + id
+				    + ", node does not exist.");
 
-            // cycle till find a next or meet the end of set
-            do
-            {
-               String id = iter.next();
-               try
-               {
-                  ItemData item = itemMgr.getItemData(id);
-                  if (item != null)
-                  {
-                     if (item.isNode())
-                        return (NodeData)item; // return node
-                     else
-                        log.warn("Node not found, but property " + id + ", " + item.getQPath().getAsString()
-                           + " found. ");
-                  }
-                  else
-                     log.warn("Unable to index node with id " + id + ", node does not exist.");
+		    } catch (RepositoryException e) {
+			log.error("Can't read next node data " + id, e);
+		    }
+		} while (iter.hasNext()); // get next if error or node not found
 
-               }
-               catch (RepositoryException e)
-               {
-                  log.error("Can't read next node data " + id, e);
-               }
-            }
-            while (iter.hasNext()); // get next if error or node not found
+		return null; // we met the end of iterator set
+	    }
 
-            return null; // we met the end of iterator set
-         }
+	    public void remove() {
+		throw new UnsupportedOperationException();
+	    }
+	};
 
-         public void remove()
-         {
-            throw new UnsupportedOperationException();
-         }
-      };
+	Iterator<String> removedIds = new Iterator<String>() {
+	    private final Iterator<String> iter = removedNodes.iterator();
 
-      Iterator<String> removedIds = new Iterator<String>()
-      {
-         private final Iterator<String> iter = removedNodes.iterator();
+	    public boolean hasNext() {
+		return iter.hasNext();
+	    }
 
-         public boolean hasNext()
-         {
-            return iter.hasNext();
-         }
+	    public String next() {
+		return nextNodeId();
+	    }
 
-         public String next()
-         {
-            return nextNodeId();
-         }
+	    public String nextNodeId() throws NoSuchElementException {
+		return iter.next();
+	    }
 
-         public String nextNodeId() throws NoSuchElementException
-         {
-            return iter.next();
-         }
+	    public void remove() {
+		throw new UnsupportedOperationException();
 
-         public void remove()
-         {
-            throw new UnsupportedOperationException();
+	    }
+	};
 
-         }
-      };
+	if (removedNodes.size() > 0 || addedNodes.size() > 0) {
+	    try {
+		handler.updateNodes(removedIds, addedStates);
+	    } catch (RepositoryException e) {
+		log.error("Error indexing changes " + e, e);
+	    } catch (IOException e) {
+		log.error("Error indexing changes " + e, e);
+		try {
+		    handler.logErrorChanges(removedNodes, addedNodes);
+		} catch (IOException ioe) {
+		    log.warn(
+			    "Exception occure when errorLog writed. Error log is not complete. "
+				    + ioe, ioe);
+		}
+	    }
+	}
 
-      if (removedNodes.size() > 0 || addedNodes.size() > 0)
-      {
-         try
-         {
-            handler.updateNodes(removedIds, addedStates);
-         }
-         catch (RepositoryException e)
-         {
-            log.error("Error indexing changes " + e, e);
-         }
-         catch (IOException e)
-         {
-            log.error("Error indexing changes " + e, e);
-            try
-            {
-               handler.logErrorChanges(removedNodes, addedNodes);
-            }
-            catch (IOException ioe)
-            {
-               log.warn("Exception occure when errorLog writed. Error log is not complete. " + ioe, ioe);
-            }
-         }
-      }
+	if (log.isDebugEnabled()) {
+	    log.debug("onEvent: indexing finished in "
+		    + String.valueOf(System.currentTimeMillis() - time)
+		    + " ms.");
+	}
+    }
 
-      if (log.isDebugEnabled())
-      {
-         log.debug("onEvent: indexing finished in " + String.valueOf(System.currentTimeMillis() - time) + " ms.");
-      }
-   }
+    public void createNewOrAdd(String key, ItemState state,
+	    Map<String, List<ItemState>> updatedNodes) {
+	List<ItemState> list = updatedNodes.get(key);
+	if (list == null) {
+	    list = new ArrayList<ItemState>();
+	    updatedNodes.put(key, list);
+	}
+	list.add(state);
 
-   public void createNewOrAdd(String key, ItemState state, Map<String, List<ItemState>> updatedNodes)
-   {
-      List<ItemState> list = updatedNodes.get(key);
-      if (list == null)
-      {
-         list = new ArrayList<ItemState>();
-         updatedNodes.put(key, list);
-      }
-      list.add(state);
+    }
 
-   }
+    public void start() {
 
-   public void start()
-   {
+	if (log.isDebugEnabled())
+	    log.debug("start");
 
-      if (log.isDebugEnabled())
-         log.debug("start");
+	if (indexingTree == null) {
+	    List<QPath> excludedPath = new ArrayList<QPath>();
+	    // Calculating excluded node identifiers
+	    excludedPath.add(Constants.JCR_SYSTEM_PATH);
 
-      // Calculating excluded node identifiers
-      excludedPaths.add(Constants.JCR_SYSTEM_PATH);
+	    if (config.getExcludedNodeIdentifers() != null) {
+		StringTokenizer stringTokenizer = new StringTokenizer(config
+			.getExcludedNodeIdentifers());
+		while (stringTokenizer.hasMoreTokens()) {
 
-      if (config.getExcludedNodeIdentifers() != null)
-      {
-         StringTokenizer stringTokenizer = new StringTokenizer(config.getExcludedNodeIdentifers());
-         while (stringTokenizer.hasMoreTokens())
-         {
+		    try {
+			ItemData excludeData = itemMgr
+				.getItemData(stringTokenizer.nextToken());
+			if (excludeData != null)
+			    excludedPath.add(excludeData.getQPath());
+		    } catch (RepositoryException e) {
+			log.warn(e.getLocalizedMessage());
+		    }
+		}
+	    }
 
-            try
-            {
-               ItemData excludeData = itemMgr.getItemData(stringTokenizer.nextToken());
-               if (excludeData != null)
-                  excludedPaths.add(excludeData.getQPath());
-            }
-            catch (RepositoryException e)
-            {
-               log.warn(e.getLocalizedMessage());
-            }
-         }
-      }
+	    NodeData indexingRootData = null;
+	    if (config.getRootNodeIdentifer() != null) {
+		try {
+		    ItemData indexingRootDataItem = itemMgr.getItemData(config
+			    .getRootNodeIdentifer());
+		    if (indexingRootDataItem != null
+			    && indexingRootDataItem.isNode())
+			indexingRootData = (NodeData) indexingRootDataItem;
+		} catch (RepositoryException e) {
+		    log.warn(e.getLocalizedMessage() + " Indexing root set to "
+			    + Constants.ROOT_PATH.getAsString());
 
-      indexingRoot = Constants.ROOT_PATH;
-      if (config.getRootNodeIdentifer() != null)
-      {
-         try
-         {
-            ItemData indexingRootData = itemMgr.getItemData(config.getRootNodeIdentifer());
-            if (indexingRootData != null && indexingRootData.isNode())
-               indexingRoot = indexingRootData.getQPath();
-         }
-         catch (RepositoryException e)
-         {
-            log.warn(e.getLocalizedMessage() + " Indexing root set to " + indexingRoot.getAsString());
-         }
+		}
 
-      }
-      try
-      {
-        // handler.init(null);
-         initializeQueryHandler();
+	    } else {
+		try {
+		    indexingRootData = (NodeData) itemMgr
+			    .getItemData(Constants.ROOT_UUID);
+		} catch (RepositoryException e) {
+		    log.error("Fail to load root node data");
+		}
+	    }
 
-      }
-//      catch (IOException e)
-//      {
-//         log.error(e.getLocalizedMessage());
-//         handler = null;
-//         throw new RuntimeException(e.getLocalizedMessage(), e.getCause());
-//      }
-      catch (RepositoryException e)
-      {
-         log.error(e.getLocalizedMessage());
-         handler = null;
-         throw new RuntimeException(e.getLocalizedMessage(), e.getCause());
-      }
-      catch (RepositoryConfigurationException e)
-      {
-         log.error(e.getLocalizedMessage());
-         handler = null;
-         throw new RuntimeException(e.getLocalizedMessage(), e.getCause());
-      }
-   }
+	    indexingTree = new IndexingTree(indexingRootData, excludedPath);
+	}
+	try {
+	    initializeQueryHandler();
+	} catch (RepositoryException e) {
+	    log.error(e.getLocalizedMessage());
+	    handler = null;
+	    throw new RuntimeException(e.getLocalizedMessage(), e.getCause());
+	} catch (RepositoryConfigurationException e) {
+	    log.error(e.getLocalizedMessage());
+	    handler = null;
+	    throw new RuntimeException(e.getLocalizedMessage(), e.getCause());
+	}
+    }
 
-   public void stop()
-   {
-      handler.close();
-      log.info("Search manager stopped");
-   }
+    public void stop() {
+	handler.close();
+	log.info("Search manager stopped");
+    }
 
-   /**
-    * Checks if the given event should be excluded based on the
-    * {@link #excludePath} setting.
-    * 
-    * @param event observation event
-    * @return <code>true</code> if the event should be excluded,
-    *         <code>false</code> otherwise
-    */
-   protected boolean isExcluded(ItemState event)
-   {
+    // /**
+    // * Checks if the given event should be excluded based on the
+    // * {@link #excludePath} setting.
+    // *
+    // * @param event
+    // * observation event
+    // * @return <code>true</code> if the event should be excluded,
+    // * <code>false</code> otherwise
+    // */
+    // protected boolean isExcluded(ItemState event) {
+    //
+    // for (QPath excludedPath : excludedPaths) {
+    // if (event.getData().getQPath().isDescendantOf(excludedPath)
+    // || event.getData().getQPath().equals(excludedPath))
+    // return true;
+    // }
+    //
+    // return !event.getData().getQPath().isDescendantOf(indexingRoot)
+    // && !event.getData().getQPath().equals(indexingRoot);
+    // }
 
-      for (QPath excludedPath : excludedPaths)
-      {
-         if (event.getData().getQPath().isDescendantOf(excludedPath) || event.getData().getQPath().equals(excludedPath))
-            return true;
-      }
+    protected QueryHandlerContext createQueryHandlerContext(
+	    QueryHandler parentHandler) throws RepositoryConfigurationException {
 
-      return !event.getData().getQPath().isDescendantOf(indexingRoot)
-         && !event.getData().getQPath().equals(indexingRoot);
-   }
+	QueryHandlerContext context = new QueryHandlerContext(itemMgr,
+		indexingTree, nodeTypeDataManager, nsReg, parentHandler, config
+			.getIndexDir(), extractor, true);
+	return context;
+    }
 
-   protected QueryHandlerContext createQueryHandlerContext(QueryHandler parentHandler)
-      throws RepositoryConfigurationException
-   {
+    /**
+     * Initializes the query handler.
+     * 
+     * @throws RepositoryException
+     *             if the query handler cannot be initialized.
+     * @throws RepositoryConfigurationException
+     * @throws ClassNotFoundException
+     */
+    protected void initializeQueryHandler() throws RepositoryException,
+	    RepositoryConfigurationException {
+	// initialize query handler
+	String className = config.getType();
+	if (className == null)
+	    throw new RepositoryConfigurationException(
+		    "Content hanler       configuration fail");
 
-      QueryHandlerContext context =
-         new QueryHandlerContext(itemMgr, config.getRootNodeIdentifer() != null ? config.getRootNodeIdentifer()
-            : Constants.ROOT_UUID, nodeTypeDataManager, nsReg, parentHandler, config.getIndexDir(), extractor);
-      return context;
-   }
+	try {
+	    Class qHandlerClass = Class.forName(className, true, this
+		    .getClass().getClassLoader());
+	    Constructor constuctor = qHandlerClass.getConstructor(
+		    QueryHandlerEntry.class, ConfigurationManager.class);
+	    handler = (QueryHandler) constuctor.newInstance(config
+		    .getQueryHandlerEntry(), cfm);
+	    QueryHandler parentHandler = (this.parentSearchManager != null) ? parentSearchManager
+		    .getHandler()
+		    : null;
+	    QueryHandlerContext context = createQueryHandlerContext(parentHandler);
+	    handler.init(context);
 
-   /**
-    * Initializes the query handler.
-    * 
-    * @throws RepositoryException if the query handler cannot be initialized.
-    * @throws RepositoryConfigurationException
-    * @throws ClassNotFoundException
-    */
-   protected void initializeQueryHandler() throws RepositoryException, RepositoryConfigurationException
-   {
-      // initialize query handler
-      String className = config.getType();
-      if (className == null)
-         throw new RepositoryConfigurationException("Content hanler       configuration fail");
+	} catch (SecurityException e) {
+	    throw new RepositoryException(e.getMessage(), e);
+	} catch (IllegalArgumentException e) {
+	    throw new RepositoryException(e.getMessage(), e);
+	} catch (ClassNotFoundException e) {
+	    throw new RepositoryException(e.getMessage(), e);
+	} catch (NoSuchMethodException e) {
+	    throw new RepositoryException(e.getMessage(), e);
+	} catch (InstantiationException e) {
+	    throw new RepositoryException(e.getMessage(), e);
+	} catch (IllegalAccessException e) {
+	    throw new RepositoryException(e.getMessage(), e);
+	} catch (InvocationTargetException e) {
+	    throw new RepositoryException(e.getMessage(), e);
+	} catch (IOException e) {
+	    throw new RepositoryException(e.getMessage(), e);
+	}
+    }
 
-      try
-      {
-         Class qHandlerClass = Class.forName(className, true, this.getClass().getClassLoader());
-         Constructor constuctor = qHandlerClass.getConstructor(QueryHandlerEntry.class, ConfigurationManager.class);
-         handler = (QueryHandler)constuctor.newInstance(config.getQueryHandlerEntry(), cfm);
-         QueryHandler parentHandler = (this.parentSearchManager != null) ? parentSearchManager.getHandler() : null;
-         QueryHandlerContext context = createQueryHandlerContext(parentHandler);
-         handler.init(context);
-      }
-      catch (SecurityException e)
-      {
-         throw new RepositoryException(e.getMessage(), e);
-      }
-      catch (IllegalArgumentException e)
-      {
-         throw new RepositoryException(e.getMessage(), e);
-      }
-      catch (ClassNotFoundException e)
-      {
-         throw new RepositoryException(e.getMessage(), e);
-      }
-      catch (NoSuchMethodException e)
-      {
-         throw new RepositoryException(e.getMessage(), e);
-      }
-      catch (InstantiationException e)
-      {
-         throw new RepositoryException(e.getMessage(), e);
-      }
-      catch (IllegalAccessException e)
-      {
-         throw new RepositoryException(e.getMessage(), e);
-      }
-      catch (InvocationTargetException e)
-      {
-         throw new RepositoryException(e.getMessage(), e);
-      }
-      catch (IOException e)
-      {
-         throw new RepositoryException(e.getMessage(), e);
-      }
-   }
-   /**
-    * Creates a new instance of an {@link AbstractQueryImpl} which is not
-    * initialized.
-    *
-    * @return an new query instance.
-    * @throws RepositoryException if an error occurs while creating a new query
-    *                             instance.
-    */
-   protected AbstractQueryImpl createQueryInstance() throws RepositoryException {
-       try {
-           String queryImplClassName = handler.getQueryClass();
-           Object obj = Class.forName(queryImplClassName).newInstance();
-           if (obj instanceof AbstractQueryImpl) {
-               return (AbstractQueryImpl) obj;
-           } else {
-               throw new IllegalArgumentException(queryImplClassName
-                       + " is not of type " + AbstractQueryImpl.class.getName());
-           }
-       } catch (Throwable t) {
-           throw new RepositoryException("Unable to create query: " + t.toString(), t);
-       }
-   }
+    /**
+     * Creates a new instance of an {@link AbstractQueryImpl} which is not
+     * initialized.
+     * 
+     * @return an new query instance.
+     * @throws RepositoryException
+     *             if an error occurs while creating a new query instance.
+     */
+    protected AbstractQueryImpl createQueryInstance()
+	    throws RepositoryException {
+	try {
+	    String queryImplClassName = handler.getQueryClass();
+	    Object obj = Class.forName(queryImplClassName).newInstance();
+	    if (obj instanceof AbstractQueryImpl) {
+		return (AbstractQueryImpl) obj;
+	    } else {
+		throw new IllegalArgumentException(queryImplClassName
+			+ " is not of type "
+			+ AbstractQueryImpl.class.getName());
+	    }
+	} catch (Throwable t) {
+	    throw new RepositoryException("Unable to create query: "
+		    + t.toString(), t);
+	}
+    }
 
 }

Modified: jcr/trunk/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/SystemSearchManager.java
===================================================================
--- jcr/trunk/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/SystemSearchManager.java	2009-10-01 15:37:53 UTC (rev 193)
+++ jcr/trunk/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/SystemSearchManager.java	2009-10-02 08:25:49 UTC (rev 194)
@@ -22,13 +22,14 @@
 import org.exoplatform.services.jcr.config.RepositoryConfigurationException;
 import org.exoplatform.services.jcr.core.nodetype.NodeTypeDataManager;
 import org.exoplatform.services.jcr.dataflow.ItemStateChangesLog;
+import org.exoplatform.services.jcr.datamodel.NodeData;
+import org.exoplatform.services.jcr.datamodel.QPath;
 import org.exoplatform.services.jcr.impl.Constants;
 import org.exoplatform.services.jcr.impl.core.NamespaceRegistryImpl;
 import org.exoplatform.services.jcr.impl.dataflow.persistent.WorkspacePersistentDataManager;
 import org.exoplatform.services.log.ExoLogger;
 import org.exoplatform.services.log.Log;
 
-import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 
@@ -41,98 +42,89 @@
  * @version $Id: SystemSearchManager.java 13891 2008-05-05 16:02:30Z pnedonosko
  *          $
  */
-public class SystemSearchManager extends SearchManager
-{
+public class SystemSearchManager extends SearchManager {
 
-   /**
-    * Class logger.
-    */
-   private final Log log = ExoLogger.getLogger("jcr.SystemSearchManager");
+    /**
+     * Class logger.
+     */
+    private final Log log = ExoLogger.getLogger("jcr.SystemSearchManager");
 
-   /**
-    * Is started flag.
-    */
-   private boolean isStarted = false;
+    /**
+     * Is started flag.
+     */
+    private boolean isStarted = false;
 
-   /**
-    * ChangesLog Buffer (used for saves before start).
-    */
-   private List<ItemStateChangesLog> changesLogBuffer = new ArrayList<ItemStateChangesLog>();
+    /**
+     * ChangesLog Buffer (used for saves before start).
+     */
+    private List<ItemStateChangesLog> changesLogBuffer = new ArrayList<ItemStateChangesLog>();
 
-   public static final String INDEX_DIR_SUFFIX = "system";
+    public static final String INDEX_DIR_SUFFIX = "system";
 
-   public SystemSearchManager(QueryHandlerEntry config, NamespaceRegistryImpl nsReg, NodeTypeDataManager ntReg,
-      WorkspacePersistentDataManager itemMgr, DocumentReaderService service, ConfigurationManager cfm)
-      throws RepositoryException, RepositoryConfigurationException
-   {
-      super(config, nsReg, ntReg, itemMgr, null, service, cfm);
-   }
+    public SystemSearchManager(QueryHandlerEntry config,
+	    NamespaceRegistryImpl nsReg, NodeTypeDataManager ntReg,
+	    WorkspacePersistentDataManager itemMgr,
+	    DocumentReaderService service, ConfigurationManager cfm)
+	    throws RepositoryException, RepositoryConfigurationException {
+	super(config, nsReg, ntReg, itemMgr, null, service, cfm);
+    }
 
-   @Override
-   public void onSaveItems(ItemStateChangesLog changesLog)
-   {
-      if (!isStarted)
-      {
-         changesLogBuffer.add(changesLog);
-      }
-      else
-      {
-         super.onSaveItems(changesLog);
-      }
-   }
+    @Override
+    public void onSaveItems(ItemStateChangesLog changesLog) {
+	if (!isStarted) {
+	    changesLogBuffer.add(changesLog);
+	} else {
+	    super.onSaveItems(changesLog);
+	}
+    }
 
-   @Override
-   public void start()
-   {
-      indexingRoot = Constants.JCR_SYSTEM_PATH;
-      excludedPaths.remove(Constants.JCR_SYSTEM_PATH);
-      isStarted = true;
-      try
-      {
-         //handler.init(null);
-         initializeQueryHandler();
+    @Override
+    public void start() {
 
-      }
-//      catch (IOException e)
-//      {
-//         log.error(e.getLocalizedMessage());
-//         handler = null;
-//         changesLogBuffer.clear();
-//         changesLogBuffer = null;
-//         throw new RuntimeException(e);
-//      }
-      catch (RepositoryException e)
-      {
-         log.error(e.getLocalizedMessage());
-         handler = null;
-         changesLogBuffer.clear();
-         changesLogBuffer = null;
-         throw new RuntimeException(e);
-      }
-      catch (RepositoryConfigurationException e)
-      {
-         log.error(e.getLocalizedMessage());
-         handler = null;
-         changesLogBuffer.clear();
-         changesLogBuffer = null;
-         throw new RuntimeException(e);
-      }
-      for (ItemStateChangesLog bufferedChangesLog : changesLogBuffer)
-      {
-         super.onSaveItems(bufferedChangesLog);
-      }
-      changesLogBuffer.clear();
-      changesLogBuffer = null;
-   }
+	isStarted = true;
+	try {
+	    if (indexingTree == null) {
+		List<QPath> excludedPaths = new ArrayList<QPath>();
 
-   @Override
-   protected QueryHandlerContext createQueryHandlerContext(QueryHandler parentHandler)
-      throws RepositoryConfigurationException
-   {
-      QueryHandlerContext context =
-         new QueryHandlerContext(itemMgr, Constants.SYSTEM_UUID, nodeTypeDataManager, nsReg, parentHandler, config
-            .getIndexDir()
-            + "_" + INDEX_DIR_SUFFIX, extractor);
-      return context;
-   }
+		NodeData indexingRootNodeData = (NodeData) itemMgr
+			.getItemData(Constants.SYSTEM_UUID);
+
+		indexingTree = new IndexingTree(indexingRootNodeData,
+			excludedPaths);
+	    }
+	    initializeQueryHandler();
+
+	}
+
+	catch (RepositoryException e) {
+	    log.error(e.getLocalizedMessage());
+	    handler = null;
+	    changesLogBuffer.clear();
+	    changesLogBuffer = null;
+	    throw new RuntimeException(e);
+	} catch (RepositoryConfigurationException e) {
+	    log.error(e.getLocalizedMessage());
+	    handler = null;
+	    changesLogBuffer.clear();
+	    changesLogBuffer = null;
+	    throw new RuntimeException(e);
+	}
+	for (ItemStateChangesLog bufferedChangesLog : changesLogBuffer) {
+	    super.onSaveItems(bufferedChangesLog);
+	}
+	changesLogBuffer.clear();
+	changesLogBuffer = null;
+    }
+
+    @Override
+    protected QueryHandlerContext createQueryHandlerContext(
+	    QueryHandler parentHandler) throws RepositoryConfigurationException {
+	QueryHandlerContext context = new QueryHandlerContext(itemMgr,
+		indexingTree, nodeTypeDataManager, nsReg, parentHandler, config
+			.getIndexDir()
+			+ "_" + INDEX_DIR_SUFFIX, extractor, changesLogBuffer
+			.size() > 0
+			&& !isStarted);
+	return context;
+    }
 }

Modified: jcr/trunk/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/lucene/MultiIndex.java
===================================================================
--- jcr/trunk/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/lucene/MultiIndex.java	2009-10-01 15:37:53 UTC (rev 193)
+++ jcr/trunk/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/lucene/MultiIndex.java	2009-10-02 08:25:49 UTC (rev 194)
@@ -16,6 +16,19 @@
  */
 package org.exoplatform.services.jcr.impl.core.query.lucene;
 
+import org.apache.lucene.document.Document;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.store.Directory;
+import org.exoplatform.services.jcr.dataflow.ItemDataConsumer;
+import org.exoplatform.services.jcr.datamodel.ItemData;
+import org.exoplatform.services.jcr.datamodel.NodeData;
+import org.exoplatform.services.jcr.impl.Constants;
+import org.exoplatform.services.jcr.impl.core.query.IndexingTree;
+import org.exoplatform.services.jcr.impl.core.query.lucene.directory.DirectoryManager;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -33,37 +46,25 @@
 import javax.jcr.ItemNotFoundException;
 import javax.jcr.RepositoryException;
 
-import org.apache.lucene.document.Document;
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.Term;
-import org.apache.lucene.store.Directory;
-
-import org.exoplatform.services.jcr.dataflow.ItemDataConsumer;
-import org.exoplatform.services.jcr.datamodel.ItemData;
-import org.exoplatform.services.jcr.datamodel.NodeData;
-import org.exoplatform.services.jcr.impl.Constants;
-import org.exoplatform.services.jcr.impl.core.query.lucene.directory.DirectoryManager;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 /**
  * A <code>MultiIndex</code> consists of a {@link VolatileIndex} and multiple
  * {@link PersistentIndex}es. The goal is to keep most parts of the index open
- * with index readers and write new index data to the volatile index. When
- * the volatile index reaches a certain size (see {@link SearchIndex#setMinMergeDocs(int)})
- * a new persistent index is created with the index data from the volatile index,
- * the same happens when the volatile index has been idle for some time (see
- * {@link SearchIndex#setVolatileIdleTime(int)}).
- * The new persistent index is then added to the list of already existing
- * persistent indexes. Further operations on the new persistent index will
- * however only require an <code>IndexReader</code> which serves for queries
- * but also for delete operations on the index.
+ * with index readers and write new index data to the volatile index. When the
+ * volatile index reaches a certain size (see
+ * {@link SearchIndex#setMinMergeDocs(int)}) a new persistent index is created
+ * with the index data from the volatile index, the same happens when the
+ * volatile index has been idle for some time (see
+ * {@link SearchIndex#setVolatileIdleTime(int)}). The new persistent index is
+ * then added to the list of already existing persistent indexes. Further
+ * operations on the new persistent index will however only require an
+ * <code>IndexReader</code> which serves for queries but also for delete
+ * operations on the index.
  * <p/>
- * The persistent indexes are merged from time to time. The merge behaviour
- * is configurable using the methods: {@link SearchIndex#setMaxMergeDocs(int)},
- * {@link SearchIndex#setMergeFactor(int)} and {@link SearchIndex#setMinMergeDocs(int)}.
- * For detailed description of the configuration parameters see also the lucene
- * <code>IndexWriter</code> class.
+ * The persistent indexes are merged from time to time. The merge behaviour is
+ * configurable using the methods: {@link SearchIndex#setMaxMergeDocs(int)},
+ * {@link SearchIndex#setMergeFactor(int)} and
+ * {@link SearchIndex#setMinMergeDocs(int)}. For detailed description of the
+ * configuration parameters see also the lucene <code>IndexWriter</code> class.
  * <p/>
  * This class is thread-safe.
  * <p/>
@@ -72,2175 +73,2024 @@
  * thread and reader threads is done using {@link #updateMonitor} and
  * {@link #updateInProgress}.
  */
-public class MultiIndex
-{
+public class MultiIndex {
 
-   /**
-    * The logger instance for this class
-    */
-   private static final Logger log = LoggerFactory.getLogger(MultiIndex.class);
+    /**
+     * The logger instance for this class
+     */
+    private static final Logger log = LoggerFactory.getLogger(MultiIndex.class);
 
-   //    /**
-   //     * A path factory.
-   //     */
-   //    private static final PathFactory PATH_FACTORY = PathFactoryImpl.getInstance();
+    /**
+     * Names of active persistent index directories.
+     */
+    private final IndexInfos indexNames = new IndexInfos("indexes");
 
-   /**
-    * Names of active persistent index directories.
-    */
-   private final IndexInfos indexNames = new IndexInfos("indexes");
+    /**
+     * Names of index directories that can be deleted.
+     */
+    private final Set deletable = new HashSet();
 
-   /**
-    * Names of index directories that can be deleted.
-    */
-   private final Set deletable = new HashSet();
+    /**
+     * List of open persistent indexes. This list may also contain an open
+     * PersistentIndex owned by the IndexMerger daemon. Such an index is not
+     * registered with indexNames and <b>must not</b> be used in regular index
+     * operations (delete node, etc.)!
+     */
+    private final List indexes = new ArrayList();
 
-   /**
-    * List of open persistent indexes. This list may also contain an open
-    * PersistentIndex owned by the IndexMerger daemon. Such an index is not
-    * registered with indexNames and <b>must not</b> be used in regular index
-    * operations (delete node, etc.)!
-    */
-   private final List indexes = new ArrayList();
+    /**
+     * The internal namespace mappings of the query manager.
+     */
+    private final NamespaceMappings nsMappings;
 
-   /**
-    * The internal namespace mappings of the query manager.
-    */
-   private final NamespaceMappings nsMappings;
+    /**
+     * The directory manager.
+     */
+    private final DirectoryManager directoryManager;
 
-   /**
-    * The directory manager.
-    */
-   private final DirectoryManager directoryManager;
+    /**
+     * The base directory to store the index.
+     */
+    private final Directory indexDir;
 
-   /**
-    * The base directory to store the index.
-    */
-   private final Directory indexDir;
+    /**
+     * The query handler
+     */
+    private final SearchIndex handler;
 
-   /**
-    * The query handler
-    */
-   private final SearchIndex handler;
+    /**
+     * The volatile index.
+     */
+    private VolatileIndex volatileIndex;
 
-   /**
-    * The volatile index.
-    */
-   private VolatileIndex volatileIndex;
+    /**
+     * Flag indicating whether an update operation is in progress.
+     */
+    private boolean updateInProgress = false;
 
-   /**
-    * Flag indicating whether an update operation is in progress.
-    */
-   private boolean updateInProgress = false;
+    /**
+     * If not <code>null</code> points to a valid <code>IndexReader</code> that
+     * reads from all indexes, including volatile and persistent indexes.
+     */
+    private CachingMultiIndexReader multiReader;
 
-   /**
-    * If not <code>null</code> points to a valid <code>IndexReader</code> that
-    * reads from all indexes, including volatile and persistent indexes.
-    */
-   private CachingMultiIndexReader multiReader;
+    /**
+     * Shared document number cache across all persistent indexes.
+     */
+    private final DocNumberCache cache;
 
-   /**
-    * Shared document number cache across all persistent indexes.
-    */
-   private final DocNumberCache cache;
+    /**
+     * Monitor to use to synchronize access to {@link #multiReader} and
+     * {@link #updateInProgress}.
+     */
+    private final Object updateMonitor = new Object();
 
-   /**
-    * Monitor to use to synchronize access to {@link #multiReader} and
-    * {@link #updateInProgress}.
-    */
-   private final Object updateMonitor = new Object();
+    /**
+     * <code>true</code> if the redo log contained entries on startup.
+     */
+    private boolean redoLogApplied = false;
 
-   /**
-    * <code>true</code> if the redo log contained entries on startup.
-    */
-   private boolean redoLogApplied = false;
+    /**
+     * The time this index was last flushed or a transaction was committed.
+     */
+    private long lastFlushTime;
 
-   /**
-    * The time this index was last flushed or a transaction was committed.
-    */
-   private long lastFlushTime;
+    /**
+     * The <code>IndexMerger</code> for this <code>MultiIndex</code>.
+     */
+    private final IndexMerger merger;
 
-   /**
-    * The <code>IndexMerger</code> for this <code>MultiIndex</code>.
-    */
-   private final IndexMerger merger;
+    /**
+     * Timer to schedule flushes of this index after some idle time.
+     */
+    private static final Timer FLUSH_TIMER = new Timer(true);
 
-   /**
-    * Timer to schedule flushes of this index after some idle time.
-    */
-   private static final Timer FLUSH_TIMER = new Timer(true);
+    /**
+     * Task that is periodically called by {@link #FLUSH_TIMER} and checks if
+     * index should be flushed.
+     */
+    private final TimerTask flushTask;
 
-   /**
-    * Task that is periodically called by {@link #FLUSH_TIMER} and checks
-    * if index should be flushed.
-    */
-   private final TimerTask flushTask;
+    /**
+     * The RedoLog of this <code>MultiIndex</code>.
+     */
+    private final RedoLog redoLog;
 
-   /**
-    * The RedoLog of this <code>MultiIndex</code>.
-    */
-   private final RedoLog redoLog;
+    /**
+     * The indexing queue with pending text extraction jobs.
+     */
+    private IndexingQueue indexingQueue;
 
-   /**
-    * The indexing queue with pending text extraction jobs.
-    */
-   private IndexingQueue indexingQueue;
+    /**
+     * Set&lt;NodeId> of uuids that should not be indexed.
+     */
+    private final IndexingTree indexingTree;
 
-   /**
-    * Set&lt;NodeId> of uuids that should not be indexed.
-    */
-   private final Set excludedIDs;
+    /**
+     * The next transaction id.
+     */
+    private long nextTransactionId = 0;
 
-   /**
-    * The next transaction id.
-    */
-   private long nextTransactionId = 0;
+    /**
+     * The current transaction id.
+     */
+    private long currentTransactionId = -1;
 
-   /**
-    * The current transaction id.
-    */
-   private long currentTransactionId = -1;
+    /**
+     * Flag indicating whether re-indexing is running.
+     */
+    private boolean reindexing = false;
 
-   /**
-    * Flag indicating whether re-indexing is running.
-    */
-   private boolean reindexing = false;
+    /**
+     * The index format version of this multi index.
+     */
+    private final IndexFormatVersion version;
 
-   /**
-    * The index format version of this multi index.
-    */
-   private final IndexFormatVersion version;
+    /**
+     * Creates a new MultiIndex.
+     * 
+     * @param handler
+     *            the search handler
+     * @param excludedIDs
+     *            Set&lt;NodeId> that contains uuids that should not be indexed
+     *            nor further traversed.
+     * @throws IOException
+     *             if an error occurs
+     */
+    MultiIndex(SearchIndex handler, IndexingTree indexingTree)
+	    throws IOException {
+	this.directoryManager = handler.getDirectoryManager();
+	this.indexDir = directoryManager.getDirectory(".");
+	this.handler = handler;
+	this.cache = new DocNumberCache(handler.getCacheSize());
+	this.redoLog = new RedoLog(indexDir);
+	this.indexingTree = indexingTree;
+	this.nsMappings = handler.getNamespaceMappings();
 
-   /**
-    * Creates a new MultiIndex.
-    *
-    * @param handler the search handler
-    * @param excludedIDs   Set&lt;NodeId> that contains uuids that should not
-    *                      be indexed nor further traversed.
-    * @throws IOException if an error occurs
-    */
-   MultiIndex(SearchIndex handler, Set excludedIDs) throws IOException
-   {
-      this.directoryManager = handler.getDirectoryManager();
-      this.indexDir = directoryManager.getDirectory(".");
-      this.handler = handler;
-      this.cache = new DocNumberCache(handler.getCacheSize());
-      this.redoLog = new RedoLog(indexDir);
-      this.excludedIDs = new HashSet(excludedIDs);
-      this.nsMappings = handler.getNamespaceMappings();
+	if (indexNames.exists(indexDir)) {
+	    indexNames.read(indexDir);
+	}
 
-      if (indexNames.exists(indexDir))
-      {
-         indexNames.read(indexDir);
-      }
+	// as of 1.5 deletable file is not used anymore
+	removeDeletable();
 
-      // as of 1.5 deletable file is not used anymore
-      removeDeletable();
+	// initialize IndexMerger
+	merger = new IndexMerger(this);
+	merger.setMaxMergeDocs(handler.getMaxMergeDocs());
+	merger.setMergeFactor(handler.getMergeFactor());
+	merger.setMinMergeDocs(handler.getMinMergeDocs());
 
-      // initialize IndexMerger
-      merger = new IndexMerger(this);
-      merger.setMaxMergeDocs(handler.getMaxMergeDocs());
-      merger.setMergeFactor(handler.getMergeFactor());
-      merger.setMinMergeDocs(handler.getMinMergeDocs());
+	IndexingQueueStore store = new IndexingQueueStore(indexDir);
 
-      IndexingQueueStore store = new IndexingQueueStore(indexDir);
+	// initialize indexing queue
+	this.indexingQueue = new IndexingQueue(store);
 
-      // initialize indexing queue
-      this.indexingQueue = new IndexingQueue(store);
+	// open persistent indexes
+	for (int i = 0; i < indexNames.size(); i++) {
+	    String name = indexNames.getName(i);
+	    // only open if it still exists
+	    // it is possible that indexNames still contains a name for
+	    // an index that has been deleted, but indexNames has not been
+	    // written to disk.
+	    if (!directoryManager.hasDirectory(name)) {
+		log.debug("index does not exist anymore: " + name);
+		// move on to next index
+		continue;
+	    }
+	    PersistentIndex index = new PersistentIndex(name, handler
+		    .getTextAnalyzer(), handler.getSimilarity(), cache,
+		    indexingQueue, directoryManager);
+	    index.setMaxFieldLength(handler.getMaxFieldLength());
+	    index.setUseCompoundFile(handler.getUseCompoundFile());
+	    index.setTermInfosIndexDivisor(handler.getTermInfosIndexDivisor());
+	    indexes.add(index);
+	    merger.indexAdded(index.getName(), index.getNumDocuments());
+	}
 
-      // open persistent indexes
-      for (int i = 0; i < indexNames.size(); i++)
-      {
-         String name = indexNames.getName(i);
-         // only open if it still exists
-         // it is possible that indexNames still contains a name for
-         // an index that has been deleted, but indexNames has not been
-         // written to disk.
-         if (!directoryManager.hasDirectory(name))
-         {
-            log.debug("index does not exist anymore: " + name);
-            // move on to next index
-            continue;
-         }
-         PersistentIndex index =
-            new PersistentIndex(name, handler.getTextAnalyzer(), handler.getSimilarity(), cache, indexingQueue,
-               directoryManager);
-         index.setMaxFieldLength(handler.getMaxFieldLength());
-         index.setUseCompoundFile(handler.getUseCompoundFile());
-         index.setTermInfosIndexDivisor(handler.getTermInfosIndexDivisor());
-         indexes.add(index);
-         merger.indexAdded(index.getName(), index.getNumDocuments());
-      }
+	// init volatile index
+	resetVolatileIndex();
 
-      // init volatile index
-      resetVolatileIndex();
+	// set index format version and at the same time
+	// initialize hierarchy cache if requested.
+	CachingMultiIndexReader reader = getIndexReader(handler
+		.isInitializeHierarchyCache());
+	try {
+	    version = IndexFormatVersion.getVersion(reader);
+	} finally {
+	    reader.release();
+	}
 
-      // set index format version and at the same time
-      // initialize hierarchy cache if requested.
-      CachingMultiIndexReader reader = getIndexReader(handler.isInitializeHierarchyCache());
-      try
-      {
-         version = IndexFormatVersion.getVersion(reader);
-      }
-      finally
-      {
-         reader.release();
-      }
+	indexingQueue.initialize(this);
 
-      indexingQueue.initialize(this);
+	redoLogApplied = redoLog.hasEntries();
 
-      redoLogApplied = redoLog.hasEntries();
+	// run recovery
+	Recovery.run(this, redoLog);
 
-      // run recovery
-      Recovery.run(this, redoLog);
+	// enqueue unused segments for deletion
+	enqueueUnusedSegments();
+	attemptDelete();
 
-      // enqueue unused segments for deletion
-      enqueueUnusedSegments();
-      attemptDelete();
+	// now that we are ready, start index merger
+	merger.start();
 
-      // now that we are ready, start index merger
-      merger.start();
+	if (redoLogApplied) {
+	    // wait for the index merge to finish pending jobs
+	    try {
+		merger.waitUntilIdle();
+	    } catch (InterruptedException e) {
+		// move on
+	    }
+	    flush();
+	}
 
-      if (redoLogApplied)
-      {
-         // wait for the index merge to finish pending jobs
-         try
-         {
-            merger.waitUntilIdle();
-         }
-         catch (InterruptedException e)
-         {
-            // move on
-         }
-         flush();
-      }
+	flushTask = new TimerTask() {
+	    public void run() {
+		// check if there are any indexing jobs finished
+		checkIndexingQueue();
+		// check if volatile index should be flushed
+		checkFlush();
+	    }
+	};
 
-      flushTask = new TimerTask()
-      {
-         public void run()
-         {
-            // check if there are any indexing jobs finished
-            checkIndexingQueue();
-            // check if volatile index should be flushed
-            checkFlush();
-         }
-      };
+	if (indexNames.size() > 0) {
+	    scheduleFlushTask();
+	}
+    }
 
-      if (indexNames.size() > 0)
-      {
-         scheduleFlushTask();
-      }
-   }
+    /**
+     * Returns the number of documents in this index.
+     * 
+     * @return the number of documents in this index.
+     * @throws IOException
+     *             if an error occurs while reading from the index.
+     */
+    int numDocs() throws IOException {
+	if (indexNames.size() == 0) {
+	    return volatileIndex.getNumDocuments();
+	} else {
+	    CachingMultiIndexReader reader = getIndexReader();
+	    try {
+		return reader.numDocs();
+	    } finally {
+		reader.release();
+	    }
+	}
+    }
 
-   /**
-    * Returns the number of documents in this index.
-    *
-    * @return the number of documents in this index.
-    * @throws IOException if an error occurs while reading from the index.
-    */
-   int numDocs() throws IOException
-   {
-      if (indexNames.size() == 0)
-      {
-         return volatileIndex.getNumDocuments();
-      }
-      else
-      {
-         CachingMultiIndexReader reader = getIndexReader();
-         try
-         {
-            return reader.numDocs();
-         }
-         finally
-         {
-            reader.release();
-         }
-      }
-   }
+    /**
+     * @return the index format version for this multi index.
+     */
+    IndexFormatVersion getIndexFormatVersion() {
+	return version;
+    }
 
-   /**
-    * @return the index format version for this multi index.
-    */
-   IndexFormatVersion getIndexFormatVersion()
-   {
-      return version;
-   }
+    /**
+     * Creates an initial index by traversing the node hierarchy starting at the
+     * node with <code>rootId</code>.
+     * 
+     * @param stateMgr
+     *            the item state manager.
+     * @param rootId
+     *            the id of the node from where to start.
+     * @param rootPath
+     *            the path of the node from where to start.
+     * @throws IOException
+     *             if an error occurs while indexing the workspace.
+     * @throws IllegalStateException
+     *             if this index is not empty.
+     */
+    void createInitialIndex(ItemDataConsumer stateMgr) throws IOException {
+	// only do an initial index if there are no indexes at all
+	if (indexNames.size() == 0) {
+	    reindexing = true;
+	    try {
+		long count = 0;
+		// traverse and index workspace
+		executeAndLog(new Start(Action.INTERNAL_TRANSACTION));
+		// NodeData rootState = (NodeData) stateMgr.getItemData(rootId);
+		count = createIndex(indexingTree.getIndexingRoot(), stateMgr,
+			count);
+		executeAndLog(new Commit(getTransactionId()));
+		log.info("Created initial index for {} nodes", new Long(count));
+		releaseMultiReader();
+		scheduleFlushTask();
+	    } catch (Exception e) {
+		String msg = "Error indexing workspace";
+		IOException ex = new IOException(msg);
+		ex.initCause(e);
+		throw ex;
+	    } finally {
+		reindexing = false;
+	    }
+	} else {
+	    throw new IllegalStateException("Index already present");
+	}
+    }
 
-   /**
-    * Creates an initial index by traversing the node hierarchy starting at the
-    * node with <code>rootId</code>.
-    *
-    * @param stateMgr the item state manager.
-    * @param rootId   the id of the node from where to start.
-    * @param rootPath the path of the node from where to start.
-    * @throws IOException           if an error occurs while indexing the
-    *                               workspace.
-    * @throws IllegalStateException if this index is not empty.
-    */
-   void createInitialIndex(ItemDataConsumer stateMgr, String rootId) throws IOException
-   {
-      // only do an initial index if there are no indexes at all
-      if (indexNames.size() == 0)
-      {
-         reindexing = true;
-         try
-         {
-            long count = 0;
-            // traverse and index workspace
-            executeAndLog(new Start(Action.INTERNAL_TRANSACTION));
-            NodeData rootState = (NodeData)stateMgr.getItemData(rootId);
-            count = createIndex(rootState, stateMgr, count);
-            executeAndLog(new Commit(getTransactionId()));
-            log.info("Created initial index for {} nodes", new Long(count));
-            releaseMultiReader();
-            scheduleFlushTask();
-         }
-         catch (Exception e)
-         {
-            String msg = "Error indexing workspace";
-            IOException ex = new IOException(msg);
-            ex.initCause(e);
-            throw ex;
-         }
-         finally
-         {
-            reindexing = false;
-         }
-      }
-      else
-      {
-         throw new IllegalStateException("Index already present");
-      }
-   }
+    /**
+     * Atomically updates the index by removing some documents and adding
+     * others.
+     * 
+     * @param remove
+     *            collection of <code>UUID</code>s that identify documents to
+     *            remove
+     * @param add
+     *            collection of <code>Document</code>s to add. Some of the
+     *            elements in this collection may be <code>null</code>, to
+     *            indicate that a node could not be indexed successfully.
+     * @throws IOException
+     *             if an error occurs while updating the index.
+     */
+    synchronized void update(Collection remove, Collection add)
+	    throws IOException {
+	// make sure a reader is available during long updates
+	if (add.size() > handler.getBufferSize()) {
+	    try {
+		getIndexReader().release();
+	    } catch (IOException e) {
+		// do not fail if an exception is thrown here
+		log.warn("unable to prepare index reader "
+			+ "for queries during update", e);
+	    }
+	}
 
-   /**
-    * Atomically updates the index by removing some documents and adding
-    * others.
-    *
-    * @param remove collection of <code>UUID</code>s that identify documents to
-    *               remove
-    * @param add    collection of <code>Document</code>s to add. Some of the
-    *               elements in this collection may be <code>null</code>, to
-    *               indicate that a node could not be indexed successfully.
-    * @throws IOException if an error occurs while updating the index.
-    */
-   synchronized void update(Collection remove, Collection add) throws IOException
-   {
-      // make sure a reader is available during long updates
-      if (add.size() > handler.getBufferSize())
-      {
-         try
-         {
-            getIndexReader().release();
-         }
-         catch (IOException e)
-         {
-            // do not fail if an exception is thrown here
-            log.warn("unable to prepare index reader for queries during update", e);
-         }
-      }
+	synchronized (updateMonitor) {
+	    updateInProgress = true;
+	}
+	try {
+	    long transactionId = nextTransactionId++;
+	    executeAndLog(new Start(transactionId));
 
-      synchronized (updateMonitor)
-      {
-         updateInProgress = true;
-      }
-      try
-      {
-         long transactionId = nextTransactionId++;
-         executeAndLog(new Start(transactionId));
+	    boolean flush = false;
+	    for (Iterator it = remove.iterator(); it.hasNext();) {
+		executeAndLog(new DeleteNode(transactionId, (String) it.next()));
+	    }
+	    for (Iterator it = add.iterator(); it.hasNext();) {
+		Document doc = (Document) it.next();
+		if (doc != null) {
+		    executeAndLog(new AddNode(transactionId, doc));
+		    // commit volatile index if needed
+		    flush |= checkVolatileCommit();
+		}
+	    }
+	    executeAndLog(new Commit(transactionId));
 
-         boolean flush = false;
-         for (Iterator it = remove.iterator(); it.hasNext();)
-         {
-            executeAndLog(new DeleteNode(transactionId, (String)it.next()));
-         }
-         for (Iterator it = add.iterator(); it.hasNext();)
-         {
-            Document doc = (Document)it.next();
-            if (doc != null)
-            {
-               executeAndLog(new AddNode(transactionId, doc));
-               // commit volatile index if needed
-               flush |= checkVolatileCommit();
-            }
-         }
-         executeAndLog(new Commit(transactionId));
+	    // flush whole index when volatile index has been commited.
+	    if (flush) {
+		flush();
+	    }
+	} finally {
+	    synchronized (updateMonitor) {
+		updateInProgress = false;
+		updateMonitor.notifyAll();
+		releaseMultiReader();
+	    }
+	}
+    }
 
-         // flush whole index when volatile index has been commited.
-         if (flush)
-         {
-            flush();
-         }
-      }
-      finally
-      {
-         synchronized (updateMonitor)
-         {
-            updateInProgress = false;
-            updateMonitor.notifyAll();
-            releaseMultiReader();
-         }
-      }
-   }
+    /**
+     * Adds a document to the index.
+     * 
+     * @param doc
+     *            the document to add.
+     * @throws IOException
+     *             if an error occurs while adding the document to the index.
+     */
+    void addDocument(Document doc) throws IOException {
+	update(Collections.EMPTY_LIST, Arrays.asList(new Document[] { doc }));
+    }
 
-   /**
-    * Adds a document to the index.
-    *
-    * @param doc the document to add.
-    * @throws IOException if an error occurs while adding the document to the
-    *                     index.
-    */
-   void addDocument(Document doc) throws IOException
-   {
-      update(Collections.EMPTY_LIST, Arrays.asList(new Document[]{doc}));
-   }
+    /**
+     * Deletes the first document that matches the <code>uuid</code>.
+     * 
+     * @param uuid
+     *            document that match this <code>uuid</code> will be deleted.
+     * @throws IOException
+     *             if an error occurs while deleting the document.
+     */
+    void removeDocument(String uuid) throws IOException {
+	update(Arrays.asList(new String[] { uuid }), Collections.EMPTY_LIST);
+    }
 
-   /**
-    * Deletes the first document that matches the <code>uuid</code>.
-    *
-    * @param uuid document that match this <code>uuid</code> will be deleted.
-    * @throws IOException if an error occurs while deleting the document.
-    */
-   void removeDocument(String uuid) throws IOException
-   {
-      update(Arrays.asList(new String[]{uuid}), Collections.EMPTY_LIST);
-   }
+    /**
+     * Deletes all documents that match the <code>uuid</code>.
+     * 
+     * @param uuid
+     *            documents that match this <code>uuid</code> will be deleted.
+     * @return the number of deleted documents.
+     * @throws IOException
+     *             if an error occurs while deleting documents.
+     */
+    synchronized int removeAllDocuments(String uuid) throws IOException {
+	synchronized (updateMonitor) {
+	    updateInProgress = true;
+	}
+	int num;
+	try {
+	    Term idTerm = new Term(FieldNames.UUID, uuid.toString());
+	    executeAndLog(new Start(Action.INTERNAL_TRANSACTION));
+	    num = volatileIndex.removeDocument(idTerm);
+	    if (num > 0) {
+		redoLog.append(new DeleteNode(getTransactionId(), uuid));
+	    }
+	    for (int i = 0; i < indexes.size(); i++) {
+		PersistentIndex index = (PersistentIndex) indexes.get(i);
+		// only remove documents from registered indexes
+		if (indexNames.contains(index.getName())) {
+		    int removed = index.removeDocument(idTerm);
+		    if (removed > 0) {
+			redoLog
+				.append(new DeleteNode(getTransactionId(), uuid));
+		    }
+		    num += removed;
+		}
+	    }
+	    executeAndLog(new Commit(getTransactionId()));
+	} finally {
+	    synchronized (updateMonitor) {
+		updateInProgress = false;
+		updateMonitor.notifyAll();
+		releaseMultiReader();
+	    }
+	}
+	return num;
+    }
 
-   /**
-    * Deletes all documents that match the <code>uuid</code>.
-    *
-    * @param uuid documents that match this <code>uuid</code> will be deleted.
-    * @return the number of deleted documents.
-    * @throws IOException if an error occurs while deleting documents.
-    */
-   synchronized int removeAllDocuments(String uuid) throws IOException
-   {
-      synchronized (updateMonitor)
-      {
-         updateInProgress = true;
-      }
-      int num;
-      try
-      {
-         Term idTerm = new Term(FieldNames.UUID, uuid.toString());
-         executeAndLog(new Start(Action.INTERNAL_TRANSACTION));
-         num = volatileIndex.removeDocument(idTerm);
-         if (num > 0)
-         {
-            redoLog.append(new DeleteNode(getTransactionId(), uuid));
-         }
-         for (int i = 0; i < indexes.size(); i++)
-         {
-            PersistentIndex index = (PersistentIndex)indexes.get(i);
-            // only remove documents from registered indexes
-            if (indexNames.contains(index.getName()))
-            {
-               int removed = index.removeDocument(idTerm);
-               if (removed > 0)
-               {
-                  redoLog.append(new DeleteNode(getTransactionId(), uuid));
-               }
-               num += removed;
-            }
-         }
-         executeAndLog(new Commit(getTransactionId()));
-      }
-      finally
-      {
-         synchronized (updateMonitor)
-         {
-            updateInProgress = false;
-            updateMonitor.notifyAll();
-            releaseMultiReader();
-         }
-      }
-      return num;
-   }
+    /**
+     * Returns <code>IndexReader</code>s for the indexes named
+     * <code>indexNames</code>. An <code>IndexListener</code> is registered and
+     * notified when documents are deleted from one of the indexes in
+     * <code>indexNames</code>.
+     * <p/>
+     * Note: the number of <code>IndexReaders</code> returned by this method is
+     * not necessarily the same as the number of index names passed. An index
+     * might have been deleted and is not reachable anymore.
+     * 
+     * @param indexNames
+     *            the names of the indexes for which to obtain readers.
+     * @param listener
+     *            the listener to notify when documents are deleted.
+     * @return the <code>IndexReaders</code>.
+     * @throws IOException
+     *             if an error occurs acquiring the index readers.
+     */
+    synchronized IndexReader[] getIndexReaders(String[] indexNames,
+	    IndexListener listener) throws IOException {
+	Set names = new HashSet(Arrays.asList(indexNames));
+	Map indexReaders = new HashMap();
 
-   /**
-    * Returns <code>IndexReader</code>s for the indexes named
-    * <code>indexNames</code>. An <code>IndexListener</code> is registered and
-    * notified when documents are deleted from one of the indexes in
-    * <code>indexNames</code>.
-    * <p/>
-    * Note: the number of <code>IndexReaders</code> returned by this method is
-    * not necessarily the same as the number of index names passed. An index
-    * might have been deleted and is not reachable anymore.
-    *
-    * @param indexNames the names of the indexes for which to obtain readers.
-    * @param listener   the listener to notify when documents are deleted.
-    * @return the <code>IndexReaders</code>.
-    * @throws IOException if an error occurs acquiring the index readers.
-    */
-   synchronized IndexReader[] getIndexReaders(String[] indexNames, IndexListener listener) throws IOException
-   {
-      Set names = new HashSet(Arrays.asList(indexNames));
-      Map indexReaders = new HashMap();
+	try {
+	    for (Iterator it = indexes.iterator(); it.hasNext();) {
+		PersistentIndex index = (PersistentIndex) it.next();
+		if (names.contains(index.getName())) {
+		    indexReaders.put(index.getReadOnlyIndexReader(listener),
+			    index);
+		}
+	    }
+	} catch (IOException e) {
+	    // release readers obtained so far
+	    for (Iterator it = indexReaders.entrySet().iterator(); it.hasNext();) {
+		Map.Entry entry = (Map.Entry) it.next();
+		ReadOnlyIndexReader reader = (ReadOnlyIndexReader) entry
+			.getKey();
+		try {
+		    reader.release();
+		} catch (IOException ex) {
+		    log.warn("Exception releasing index reader: " + ex);
+		}
+		((PersistentIndex) entry.getValue()).resetListener();
+	    }
+	    throw e;
+	}
 
-      try
-      {
-         for (Iterator it = indexes.iterator(); it.hasNext();)
-         {
-            PersistentIndex index = (PersistentIndex)it.next();
-            if (names.contains(index.getName()))
-            {
-               indexReaders.put(index.getReadOnlyIndexReader(listener), index);
-            }
-         }
-      }
-      catch (IOException e)
-      {
-         // release readers obtained so far
-         for (Iterator it = indexReaders.entrySet().iterator(); it.hasNext();)
-         {
-            Map.Entry entry = (Map.Entry)it.next();
-            ReadOnlyIndexReader reader = (ReadOnlyIndexReader)entry.getKey();
-            try
-            {
-               reader.release();
-            }
-            catch (IOException ex)
-            {
-               log.warn("Exception releasing index reader: " + ex);
-            }
-            ((PersistentIndex)entry.getValue()).resetListener();
-         }
-         throw e;
-      }
+	return (IndexReader[]) indexReaders.keySet().toArray(
+		new IndexReader[indexReaders.size()]);
+    }
 
-      return (IndexReader[])indexReaders.keySet().toArray(new IndexReader[indexReaders.size()]);
-   }
+    /**
+     * Creates a new Persistent index. The new index is not registered with this
+     * <code>MultiIndex</code>.
+     * 
+     * @param indexName
+     *            the name of the index to open, or <code>null</code> if an
+     *            index with a new name should be created.
+     * @return a new <code>PersistentIndex</code>.
+     * @throws IOException
+     *             if a new index cannot be created.
+     */
+    synchronized PersistentIndex getOrCreateIndex(String indexName)
+	    throws IOException {
+	// check existing
+	for (Iterator it = indexes.iterator(); it.hasNext();) {
+	    PersistentIndex idx = (PersistentIndex) it.next();
+	    if (idx.getName().equals(indexName)) {
+		return idx;
+	    }
+	}
 
-   /**
-    * Creates a new Persistent index. The new index is not registered with this
-    * <code>MultiIndex</code>.
-    *
-    * @param indexName the name of the index to open, or <code>null</code> if
-    *                  an index with a new name should be created.
-    * @return a new <code>PersistentIndex</code>.
-    * @throws IOException if a new index cannot be created.
-    */
-   synchronized PersistentIndex getOrCreateIndex(String indexName) throws IOException
-   {
-      // check existing
-      for (Iterator it = indexes.iterator(); it.hasNext();)
-      {
-         PersistentIndex idx = (PersistentIndex)it.next();
-         if (idx.getName().equals(indexName))
-         {
-            return idx;
-         }
-      }
+	// otherwise open / create it
+	if (indexName == null) {
+	    do {
+		indexName = indexNames.newName();
+	    } while (directoryManager.hasDirectory(indexName));
+	}
+	PersistentIndex index;
+	try {
+	    index = new PersistentIndex(indexName, handler.getTextAnalyzer(),
+		    handler.getSimilarity(), cache, indexingQueue,
+		    directoryManager);
+	} catch (IOException e) {
+	    // do some clean up
+	    if (!directoryManager.delete(indexName)) {
+		deletable.add(indexName);
+	    }
+	    throw e;
+	}
+	index.setMaxFieldLength(handler.getMaxFieldLength());
+	index.setUseCompoundFile(handler.getUseCompoundFile());
+	index.setTermInfosIndexDivisor(handler.getTermInfosIndexDivisor());
 
-      // otherwise open / create it
-      if (indexName == null)
-      {
-         do
-         {
-            indexName = indexNames.newName();
-         }
-         while (directoryManager.hasDirectory(indexName));
-      }
-      PersistentIndex index;
-      try
-      {
-         index =
-            new PersistentIndex(indexName, handler.getTextAnalyzer(), handler.getSimilarity(), cache, indexingQueue,
-               directoryManager);
-      }
-      catch (IOException e)
-      {
-         // do some clean up
-         if (!directoryManager.delete(indexName))
-         {
-            deletable.add(indexName);
-         }
-         throw e;
-      }
-      index.setMaxFieldLength(handler.getMaxFieldLength());
-      index.setUseCompoundFile(handler.getUseCompoundFile());
-      index.setTermInfosIndexDivisor(handler.getTermInfosIndexDivisor());
+	// add to list of open indexes and return it
+	indexes.add(index);
+	return index;
+    }
 
-      // add to list of open indexes and return it
-      indexes.add(index);
-      return index;
-   }
+    /**
+     * Returns <code>true</code> if this multi index has an index segment with
+     * the given name. This method even returns <code>true</code> if an index
+     * segments has not yet been loaded / initialized but exists on disk.
+     * 
+     * @param indexName
+     *            the name of the index segment.
+     * @return <code>true</code> if it exists; otherwise <code>false</code>.
+     * @throws IOException
+     *             if an error occurs while checking existence of directory.
+     */
+    synchronized boolean hasIndex(String indexName) throws IOException {
+	// check existing
+	for (Iterator it = indexes.iterator(); it.hasNext();) {
+	    PersistentIndex idx = (PersistentIndex) it.next();
+	    if (idx.getName().equals(indexName)) {
+		return true;
+	    }
+	}
+	// check if it exists on disk
+	return directoryManager.hasDirectory(indexName);
+    }
 
-   /**
-    * Returns <code>true</code> if this multi index has an index segment with
-    * the given name. This method even returns <code>true</code> if an index
-    * segments has not yet been loaded / initialized but exists on disk.
-    *
-    * @param indexName the name of the index segment.
-    * @return <code>true</code> if it exists; otherwise <code>false</code>.
-    * @throws IOException if an error occurs while checking existence of
-    *          directory.
-    */
-   synchronized boolean hasIndex(String indexName) throws IOException
-   {
-      // check existing
-      for (Iterator it = indexes.iterator(); it.hasNext();)
-      {
-         PersistentIndex idx = (PersistentIndex)it.next();
-         if (idx.getName().equals(indexName))
-         {
-            return true;
-         }
-      }
-      // check if it exists on disk
-      return directoryManager.hasDirectory(indexName);
-   }
+    /**
+     * Replaces the indexes with names <code>obsoleteIndexes</code> with
+     * <code>index</code>. Documents that must be deleted in <code>index</code>
+     * can be identified with <code>Term</code>s in <code>deleted</code>.
+     * 
+     * @param obsoleteIndexes
+     *            the names of the indexes to replace.
+     * @param index
+     *            the new index that is the result of a merge of the indexes to
+     *            replace.
+     * @param deleted
+     *            <code>Term</code>s that identify documents that must be
+     *            deleted in <code>index</code>.
+     * @throws IOException
+     *             if an exception occurs while replacing the indexes.
+     */
+    void replaceIndexes(String[] obsoleteIndexes, PersistentIndex index,
+	    Collection deleted) throws IOException {
 
-   /**
-    * Replaces the indexes with names <code>obsoleteIndexes</code> with
-    * <code>index</code>. Documents that must be deleted in <code>index</code>
-    * can be identified with <code>Term</code>s in <code>deleted</code>.
-    *
-    * @param obsoleteIndexes the names of the indexes to replace.
-    * @param index      the new index that is the result of a merge of the
-    *                   indexes to replace.
-    * @param deleted    <code>Term</code>s that identify documents that must be
-    *                   deleted in <code>index</code>.
-    * @throws IOException if an exception occurs while replacing the indexes.
-    */
-   void replaceIndexes(String[] obsoleteIndexes, PersistentIndex index, Collection deleted) throws IOException
-   {
+	if (handler.isInitializeHierarchyCache()) {
+	    // force initializing of caches
+	    long time = System.currentTimeMillis();
+	    index.getReadOnlyIndexReader(true).release();
+	    time = System.currentTimeMillis() - time;
+	    log.debug("hierarchy cache initialized in {} ms", new Long(time));
+	}
 
-      if (handler.isInitializeHierarchyCache())
-      {
-         // force initializing of caches
-         long time = System.currentTimeMillis();
-         index.getReadOnlyIndexReader(true).release();
-         time = System.currentTimeMillis() - time;
-         log.debug("hierarchy cache initialized in {} ms", new Long(time));
-      }
+	synchronized (this) {
+	    synchronized (updateMonitor) {
+		updateInProgress = true;
+	    }
+	    try {
+		// if we are reindexing there is already an active transaction
+		if (!reindexing) {
+		    executeAndLog(new Start(Action.INTERNAL_TRANS_REPL_INDEXES));
+		}
+		// delete obsolete indexes
+		Set names = new HashSet(Arrays.asList(obsoleteIndexes));
+		for (Iterator it = names.iterator(); it.hasNext();) {
+		    // do not try to delete indexes that are already gone
+		    String indexName = (String) it.next();
+		    if (indexNames.contains(indexName)) {
+			executeAndLog(new DeleteIndex(getTransactionId(),
+				indexName));
+		    }
+		}
 
-      synchronized (this)
-      {
-         synchronized (updateMonitor)
-         {
-            updateInProgress = true;
-         }
-         try
-         {
-            // if we are reindexing there is already an active transaction
-            if (!reindexing)
-            {
-               executeAndLog(new Start(Action.INTERNAL_TRANS_REPL_INDEXES));
-            }
-            // delete obsolete indexes
-            Set names = new HashSet(Arrays.asList(obsoleteIndexes));
-            for (Iterator it = names.iterator(); it.hasNext();)
-            {
-               // do not try to delete indexes that are already gone
-               String indexName = (String)it.next();
-               if (indexNames.contains(indexName))
-               {
-                  executeAndLog(new DeleteIndex(getTransactionId(), indexName));
-               }
-            }
+		// Index merger does not log an action when it creates the
+		// target
+		// index of the merge. We have to do this here.
+		executeAndLog(new CreateIndex(getTransactionId(), index
+			.getName()));
 
-            // Index merger does not log an action when it creates the target
-            // index of the merge. We have to do this here.
-            executeAndLog(new CreateIndex(getTransactionId(), index.getName()));
+		executeAndLog(new AddIndex(getTransactionId(), index.getName()));
 
-            executeAndLog(new AddIndex(getTransactionId(), index.getName()));
+		// delete documents in index
+		for (Iterator it = deleted.iterator(); it.hasNext();) {
+		    Term id = (Term) it.next();
+		    index.removeDocument(id);
+		}
+		index.commit();
 
-            // delete documents in index
-            for (Iterator it = deleted.iterator(); it.hasNext();)
-            {
-               Term id = (Term)it.next();
-               index.removeDocument(id);
-            }
-            index.commit();
+		if (!reindexing) {
+		    // only commit if we are not reindexing
+		    // when reindexing the final commit is done at the very end
+		    executeAndLog(new Commit(getTransactionId()));
+		}
+	    } finally {
+		synchronized (updateMonitor) {
+		    updateInProgress = false;
+		    updateMonitor.notifyAll();
+		    releaseMultiReader();
+		}
+	    }
+	}
+	if (reindexing) {
+	    // do some cleanup right away when reindexing
+	    attemptDelete();
+	}
+    }
 
-            if (!reindexing)
-            {
-               // only commit if we are not reindexing
-               // when reindexing the final commit is done at the very end
-               executeAndLog(new Commit(getTransactionId()));
-            }
-         }
-         finally
-         {
-            synchronized (updateMonitor)
-            {
-               updateInProgress = false;
-               updateMonitor.notifyAll();
-               releaseMultiReader();
-            }
-         }
-      }
-      if (reindexing)
-      {
-         // do some cleanup right away when reindexing
-         attemptDelete();
-      }
-   }
+    /**
+     * Returns an read-only <code>IndexReader</code> that spans alls indexes of
+     * this <code>MultiIndex</code>.
+     * 
+     * @return an <code>IndexReader</code>.
+     * @throws IOException
+     *             if an error occurs constructing the <code>IndexReader</code>.
+     */
+    public CachingMultiIndexReader getIndexReader() throws IOException {
+	return getIndexReader(false);
+    }
 
-   /**
-    * Returns an read-only <code>IndexReader</code> that spans alls indexes of this
-    * <code>MultiIndex</code>.
-    *
-    * @return an <code>IndexReader</code>.
-    * @throws IOException if an error occurs constructing the <code>IndexReader</code>.
-    */
-   public CachingMultiIndexReader getIndexReader() throws IOException
-   {
-      return getIndexReader(false);
-   }
+    /**
+     * Returns an read-only <code>IndexReader</code> that spans alls indexes of
+     * this <code>MultiIndex</code>.
+     * 
+     * @param initCache
+     *            when set <code>true</code> the hierarchy cache is completely
+     *            initialized before this call returns.
+     * @return an <code>IndexReader</code>.
+     * @throws IOException
+     *             if an error occurs constructing the <code>IndexReader</code>.
+     */
+    public synchronized CachingMultiIndexReader getIndexReader(boolean initCache)
+	    throws IOException {
+	synchronized (updateMonitor) {
+	    if (multiReader != null) {
+		multiReader.acquire();
+		return multiReader;
+	    }
+	    // no reader available
+	    // wait until no update is in progress
+	    while (updateInProgress) {
+		try {
+		    updateMonitor.wait();
+		} catch (InterruptedException e) {
+		    throw new IOException(
+			    "Interrupted while waiting to aquire reader");
+		}
+	    }
+	    // some other read thread might have created the reader in the
+	    // meantime -> check again
+	    if (multiReader == null) {
+		List readerList = new ArrayList();
+		for (int i = 0; i < indexes.size(); i++) {
+		    PersistentIndex pIdx = (PersistentIndex) indexes.get(i);
+		    if (indexNames.contains(pIdx.getName())) {
+			readerList.add(pIdx.getReadOnlyIndexReader(initCache));
+		    }
+		}
+		readerList.add(volatileIndex.getReadOnlyIndexReader());
+		ReadOnlyIndexReader[] readers = (ReadOnlyIndexReader[]) readerList
+			.toArray(new ReadOnlyIndexReader[readerList.size()]);
+		multiReader = new CachingMultiIndexReader(readers, cache);
+	    }
+	    multiReader.acquire();
+	    return multiReader;
+	}
+    }
 
-   /**
-    * Returns an read-only <code>IndexReader</code> that spans alls indexes of this
-    * <code>MultiIndex</code>.
-    *
-    * @param initCache when set <code>true</code> the hierarchy cache is
-    *                  completely initialized before this call returns.
-    * @return an <code>IndexReader</code>.
-    * @throws IOException if an error occurs constructing the <code>IndexReader</code>.
-    */
-   public synchronized CachingMultiIndexReader getIndexReader(boolean initCache) throws IOException
-   {
-      synchronized (updateMonitor)
-      {
-         if (multiReader != null)
-         {
-            multiReader.acquire();
-            return multiReader;
-         }
-         // no reader available
-         // wait until no update is in progress
-         while (updateInProgress)
-         {
-            try
-            {
-               updateMonitor.wait();
-            }
-            catch (InterruptedException e)
-            {
-               throw new IOException("Interrupted while waiting to aquire reader");
-            }
-         }
-         // some other read thread might have created the reader in the
-         // meantime -> check again
-         if (multiReader == null)
-         {
-            List readerList = new ArrayList();
-            for (int i = 0; i < indexes.size(); i++)
-            {
-               PersistentIndex pIdx = (PersistentIndex)indexes.get(i);
-               if (indexNames.contains(pIdx.getName()))
-               {
-                  readerList.add(pIdx.getReadOnlyIndexReader(initCache));
-               }
-            }
-            readerList.add(volatileIndex.getReadOnlyIndexReader());
-            ReadOnlyIndexReader[] readers =
-               (ReadOnlyIndexReader[])readerList.toArray(new ReadOnlyIndexReader[readerList.size()]);
-            multiReader = new CachingMultiIndexReader(readers, cache);
-         }
-         multiReader.acquire();
-         return multiReader;
-      }
-   }
+    /**
+     * Returns the volatile index.
+     * 
+     * @return the volatile index.
+     */
+    VolatileIndex getVolatileIndex() {
+	return volatileIndex;
+    }
 
-   /**
-    * Returns the volatile index.
-    *
-    * @return the volatile index.
-    */
-   VolatileIndex getVolatileIndex()
-   {
-      return volatileIndex;
-   }
+    /**
+     * Closes this <code>MultiIndex</code>.
+     */
+    void close() {
 
-   /**
-    * Closes this <code>MultiIndex</code>.
-    */
-   void close()
-   {
+	// stop index merger
+	// when calling this method we must not lock this MultiIndex, otherwise
+	// a deadlock might occur
+	merger.dispose();
 
-      // stop index merger
-      // when calling this method we must not lock this MultiIndex, otherwise
-      // a deadlock might occur
-      merger.dispose();
+	synchronized (this) {
+	    // stop timer
+	    flushTask.cancel();
 
-      synchronized (this)
-      {
-         // stop timer
-         flushTask.cancel();
+	    // commit / close indexes
+	    try {
+		releaseMultiReader();
+	    } catch (IOException e) {
+		log.error("Exception while closing search index.", e);
+	    }
+	    try {
+		flush();
+	    } catch (IOException e) {
+		log.error("Exception while closing search index.", e);
+	    }
+	    volatileIndex.close();
+	    for (int i = 0; i < indexes.size(); i++) {
+		((PersistentIndex) indexes.get(i)).close();
+	    }
 
-         // commit / close indexes
-         try
-         {
-            releaseMultiReader();
-         }
-         catch (IOException e)
-         {
-            log.error("Exception while closing search index.", e);
-         }
-         try
-         {
-            flush();
-         }
-         catch (IOException e)
-         {
-            log.error("Exception while closing search index.", e);
-         }
-         volatileIndex.close();
-         for (int i = 0; i < indexes.size(); i++)
-         {
-            ((PersistentIndex)indexes.get(i)).close();
-         }
+	    // close indexing queue
+	    indexingQueue.close();
 
-         // close indexing queue
-         indexingQueue.close();
+	    // finally close directory
+	    try {
+		indexDir.close();
+	    } catch (IOException e) {
+		log.error("Exception while closing directory.", e);
+	    }
+	}
+    }
 
-         // finally close directory
-         try
-         {
-            indexDir.close();
-         }
-         catch (IOException e)
-         {
-            log.error("Exception while closing directory.", e);
-         }
-      }
-   }
+    /**
+     * Returns the namespace mappings of this search index.
+     * 
+     * @return the namespace mappings of this search index.
+     */
+    NamespaceMappings getNamespaceMappings() {
+	return nsMappings;
+    }
 
-   /**
-    * Returns the namespace mappings of this search index.
-    * @return the namespace mappings of this search index.
-    */
-   NamespaceMappings getNamespaceMappings()
-   {
-      return nsMappings;
-   }
+    /**
+     * Returns the indexing queue for this multi index.
+     * 
+     * @return the indexing queue for this multi index.
+     */
+    public IndexingQueue getIndexingQueue() {
+	return indexingQueue;
+    }
 
-   /**
-    * Returns the indexing queue for this multi index.
-    * @return the indexing queue for this multi index.
-    */
-   public IndexingQueue getIndexingQueue()
-   {
-      return indexingQueue;
-   }
+    /**
+     * Returns a lucene Document for the <code>node</code>.
+     * 
+     * @param node
+     *            the node to index.
+     * @return the index document.
+     * @throws RepositoryException
+     *             if an error occurs while reading from the workspace.
+     */
+    Document createDocument(NodeData node) throws RepositoryException {
+	return handler.createDocument(node, nsMappings, version);
+    }
 
-   /**
-    * Returns a lucene Document for the <code>node</code>.
-    *
-    * @param node the node to index.
-    * @return the index document.
-    * @throws RepositoryException if an error occurs while reading from the
-    *                             workspace.
-    */
-   Document createDocument(NodeData node) throws RepositoryException
-   {
-      return handler.createDocument(node, nsMappings, version);
-   }
+    /**
+     * Returns a lucene Document for the Node with <code>id</code>.
+     * 
+     * @param id
+     *            the id of the node to index.
+     * @return the index document.
+     * @throws RepositoryException
+     *             if an error occurs while reading from the workspace or if
+     *             there is no node with <code>id</code>.
+     */
+    Document createDocument(String id) throws RepositoryException {
+	ItemData data = handler.getContext().getItemStateManager().getItemData(
+		id);
+	if (data == null)
+	    throw new ItemNotFoundException("Item id=" + id + " not found");
+	if (!data.isNode())
+	    throw new RepositoryException("Item with id " + id
+		    + " is not a node");
+	return createDocument((NodeData) data);
 
-   /**
-    * Returns a lucene Document for the Node with <code>id</code>.
-    *
-    * @param id the id of the node to index.
-    * @return the index document.
-    * @throws RepositoryException if an error occurs while reading from the
-    *                             workspace or if there is no node with
-    *                             <code>id</code>.
-    */
-   Document createDocument(String id) throws RepositoryException
-   {
-      ItemData data = handler.getContext().getItemStateManager().getItemData(id);
-      if (data == null)
-         throw new ItemNotFoundException("Item id=" + id + " not found");
-      if (!data.isNode())
-         throw new RepositoryException("Item with id " + id + " is not a node");
-      return createDocument((NodeData)data);
+    }
 
-   }
+    /**
+     * Returns <code>true</code> if the redo log contained entries while this
+     * index was instantiated; <code>false</code> otherwise.
+     * 
+     * @return <code>true</code> if the redo log contained entries.
+     */
+    boolean getRedoLogApplied() {
+	return redoLogApplied;
+    }
 
-   /**
-    * Returns <code>true</code> if the redo log contained entries while
-    * this index was instantiated; <code>false</code> otherwise.
-    * @return <code>true</code> if the redo log contained entries.
-    */
-   boolean getRedoLogApplied()
-   {
-      return redoLogApplied;
-   }
+    /**
+     * Removes the <code>index</code> from the list of active sub indexes. The
+     * Index is not acutally deleted right away, but postponed to the
+     * transaction commit.
+     * <p/>
+     * This method does not close the index, but rather expects that the index
+     * has already been closed.
+     * 
+     * @param index
+     *            the index to delete.
+     */
+    synchronized void deleteIndex(PersistentIndex index) {
+	// remove it from the lists if index is registered
+	indexes.remove(index);
+	indexNames.removeName(index.getName());
+	synchronized (deletable) {
+	    log.debug("Moved " + index.getName() + " to deletable");
+	    deletable.add(index.getName());
+	}
+    }
 
-   /**
-    * Removes the <code>index</code> from the list of active sub indexes. The
-    * Index is not acutally deleted right away, but postponed to the transaction
-    * commit.
-    * <p/>
-    * This method does not close the index, but rather expects that the index
-    * has already been closed.
-    *
-    * @param index the index to delete.
-    */
-   synchronized void deleteIndex(PersistentIndex index)
-   {
-      // remove it from the lists if index is registered
-      indexes.remove(index);
-      indexNames.removeName(index.getName());
-      synchronized (deletable)
-      {
-         log.debug("Moved " + index.getName() + " to deletable");
-         deletable.add(index.getName());
-      }
-   }
+    /**
+     * Flushes this <code>MultiIndex</code>. Persists all pending changes and
+     * resets the redo log.
+     * 
+     * @throws IOException
+     *             if the flush fails.
+     */
+    public void flush() throws IOException {
+	synchronized (this) {
+	    // commit volatile index
+	    executeAndLog(new Start(Action.INTERNAL_TRANSACTION));
+	    commitVolatileIndex();
 
-   /**
-    * Flushes this <code>MultiIndex</code>. Persists all pending changes and
-    * resets the redo log.
-    *
-    * @throws IOException if the flush fails.
-    */
-   public void flush() throws IOException
-   {
-      synchronized (this)
-      {
-         // commit volatile index
-         executeAndLog(new Start(Action.INTERNAL_TRANSACTION));
-         commitVolatileIndex();
+	    // commit persistent indexes
+	    for (int i = indexes.size() - 1; i >= 0; i--) {
+		PersistentIndex index = (PersistentIndex) indexes.get(i);
+		// only commit indexes we own
+		// index merger also places PersistentIndex instances in
+		// indexes,
+		// but does not make them public by registering the name in
+		// indexNames
+		if (indexNames.contains(index.getName())) {
+		    index.commit();
+		    // check if index still contains documents
+		    if (index.getNumDocuments() == 0) {
+			executeAndLog(new DeleteIndex(getTransactionId(), index
+				.getName()));
+		    }
+		}
+	    }
+	    executeAndLog(new Commit(getTransactionId()));
 
-         // commit persistent indexes
-         for (int i = indexes.size() - 1; i >= 0; i--)
-         {
-            PersistentIndex index = (PersistentIndex)indexes.get(i);
-            // only commit indexes we own
-            // index merger also places PersistentIndex instances in indexes,
-            // but does not make them public by registering the name in indexNames
-            if (indexNames.contains(index.getName()))
-            {
-               index.commit();
-               // check if index still contains documents
-               if (index.getNumDocuments() == 0)
-               {
-                  executeAndLog(new DeleteIndex(getTransactionId(), index.getName()));
-               }
-            }
-         }
-         executeAndLog(new Commit(getTransactionId()));
+	    indexNames.write(indexDir);
 
-         indexNames.write(indexDir);
+	    // reset redo log
+	    redoLog.clear();
 
-         // reset redo log
-         redoLog.clear();
+	    lastFlushTime = System.currentTimeMillis();
+	}
 
-         lastFlushTime = System.currentTimeMillis();
-      }
+	// delete obsolete indexes
+	attemptDelete();
+    }
 
-      // delete obsolete indexes
-      attemptDelete();
-   }
+    /**
+     * Releases the {@link #multiReader} and sets it <code>null</code>. If the
+     * reader is already <code>null</code> this method does nothing. When this
+     * method returns {@link #multiReader} is guaranteed to be <code>null</code>
+     * even if an exception is thrown.
+     * <p/>
+     * Please note that this method does not take care of any synchronization. A
+     * caller must ensure that it is the only thread operating on this multi
+     * index, or that it holds the {@link #updateMonitor}.
+     * 
+     * @throws IOException
+     *             if an error occurs while releasing the reader.
+     */
+    void releaseMultiReader() throws IOException {
+	if (multiReader != null) {
+	    try {
+		multiReader.release();
+	    } finally {
+		multiReader = null;
+	    }
+	}
+    }
 
-   /**
-    * Releases the {@link #multiReader} and sets it <code>null</code>. If the
-    * reader is already <code>null</code> this method does nothing. When this
-    * method returns {@link #multiReader} is guaranteed to be <code>null</code>
-    * even if an exception is thrown.
-    * <p/>
-    * Please note that this method does not take care of any synchronization.
-    * A caller must ensure that it is the only thread operating on this multi
-    * index, or that it holds the {@link #updateMonitor}.
-    *
-    * @throws IOException if an error occurs while releasing the reader.
-    */
-   void releaseMultiReader() throws IOException
-   {
-      if (multiReader != null)
-      {
-         try
-         {
-            multiReader.release();
-         }
-         finally
-         {
-            multiReader = null;
-         }
-      }
-   }
+    // -------------------------< internal
+    // >-------------------------------------
 
-   //-------------------------< internal >-------------------------------------
+    /**
+     * Enqueues unused segments for deletion in {@link #deletable}. This method
+     * does not synchronize on {@link #deletable}! A caller must ensure that it
+     * is the only one acting on the {@link #deletable} map.
+     * 
+     * @throws IOException
+     *             if an error occurs while reading directories.
+     */
+    private void enqueueUnusedSegments() throws IOException {
+	// walk through index segments
+	String[] dirNames = directoryManager.getDirectoryNames();
+	for (int i = 0; i < dirNames.length; i++) {
+	    if (dirNames[i].startsWith("_")
+		    && !indexNames.contains(dirNames[i])) {
+		deletable.add(dirNames[i]);
+	    }
+	}
+    }
 
-   /**
-    * Enqueues unused segments for deletion in {@link #deletable}. This method
-    * does not synchronize on {@link #deletable}! A caller must ensure that it
-    * is the only one acting on the {@link #deletable} map.
-    *
-    * @throws IOException if an error occurs while reading directories.
-    */
-   private void enqueueUnusedSegments() throws IOException
-   {
-      // walk through index segments
-      String[] dirNames = directoryManager.getDirectoryNames();
-      for (int i = 0; i < dirNames.length; i++)
-      {
-         if (dirNames[i].startsWith("_") && !indexNames.contains(dirNames[i]))
-         {
-            deletable.add(dirNames[i]);
-         }
-      }
-   }
+    private void scheduleFlushTask() {
+	lastFlushTime = System.currentTimeMillis();
+	FLUSH_TIMER.schedule(flushTask, 0, 1000);
+    }
 
-   private void scheduleFlushTask()
-   {
-      lastFlushTime = System.currentTimeMillis();
-      FLUSH_TIMER.schedule(flushTask, 0, 1000);
-   }
+    /**
+     * Resets the volatile index to a new instance.
+     */
+    private void resetVolatileIndex() throws IOException {
+	volatileIndex = new VolatileIndex(handler.getTextAnalyzer(), handler
+		.getSimilarity(), indexingQueue);
+	volatileIndex.setUseCompoundFile(handler.getUseCompoundFile());
+	volatileIndex.setMaxFieldLength(handler.getMaxFieldLength());
+	volatileIndex.setBufferSize(handler.getBufferSize());
+    }
 
-   /**
-    * Resets the volatile index to a new instance.
-    */
-   private void resetVolatileIndex() throws IOException
-   {
-      volatileIndex = new VolatileIndex(handler.getTextAnalyzer(), handler.getSimilarity(), indexingQueue);
-      volatileIndex.setUseCompoundFile(handler.getUseCompoundFile());
-      volatileIndex.setMaxFieldLength(handler.getMaxFieldLength());
-      volatileIndex.setBufferSize(handler.getBufferSize());
-   }
+    /**
+     * Returns the current transaction id.
+     * 
+     * @return the current transaction id.
+     */
+    private long getTransactionId() {
+	return currentTransactionId;
+    }
 
-   /**
-    * Returns the current transaction id.
-    *
-    * @return the current transaction id.
-    */
-   private long getTransactionId()
-   {
-      return currentTransactionId;
-   }
+    /**
+     * Executes action <code>a</code> and appends the action to the redo log if
+     * successful.
+     * 
+     * @param a
+     *            the <code>Action</code> to execute.
+     * @return the executed action.
+     * @throws IOException
+     *             if an error occurs while executing the action or appending
+     *             the action to the redo log.
+     */
+    private Action executeAndLog(Action a) throws IOException {
+	a.execute(this);
+	redoLog.append(a);
+	// please note that flushing the redo log is only required on
+	// commit, but we also want to keep track of new indexes for sure.
+	// otherwise it might happen that unused index folders are orphaned
+	// after a crash.
+	if (a.getType() == Action.TYPE_COMMIT
+		|| a.getType() == Action.TYPE_ADD_INDEX) {
+	    redoLog.flush();
+	}
+	return a;
+    }
 
-   /**
-    * Executes action <code>a</code> and appends the action to the redo log if
-    * successful.
-    *
-    * @param a the <code>Action</code> to execute.
-    * @return the executed action.
-    * @throws IOException         if an error occurs while executing the action
-    *                             or appending the action to the redo log.
-    */
-   private Action executeAndLog(Action a) throws IOException
-   {
-      a.execute(this);
-      redoLog.append(a);
-      // please note that flushing the redo log is only required on
-      // commit, but we also want to keep track of new indexes for sure.
-      // otherwise it might happen that unused index folders are orphaned
-      // after a crash.
-      if (a.getType() == Action.TYPE_COMMIT || a.getType() == Action.TYPE_ADD_INDEX)
-      {
-         redoLog.flush();
-      }
-      return a;
-   }
+    /**
+     * Checks if it is needed to commit the volatile index according to
+     * {@link SearchIndex#getMaxVolatileIndexSize()}.
+     * 
+     * @return <code>true</code> if the volatile index has been committed,
+     *         <code>false</code> otherwise.
+     * @throws IOException
+     *             if an error occurs while committing the volatile index.
+     */
+    private boolean checkVolatileCommit() throws IOException {
+	if (volatileIndex.getRamSizeInBytes() >= handler
+		.getMaxVolatileIndexSize()) {
+	    commitVolatileIndex();
+	    return true;
+	}
+	return false;
+    }
 
-   /**
-    * Checks if it is needed to commit the volatile index according to {@link
-    * SearchIndex#getMaxVolatileIndexSize()}.
-    *
-    * @return <code>true</code> if the volatile index has been committed,
-    *         <code>false</code> otherwise.
-    * @throws IOException if an error occurs while committing the volatile
-    *                     index.
-    */
-   private boolean checkVolatileCommit() throws IOException
-   {
-      if (volatileIndex.getRamSizeInBytes() >= handler.getMaxVolatileIndexSize())
-      {
-         commitVolatileIndex();
-         return true;
-      }
-      return false;
-   }
+    /**
+     * Commits the volatile index to a persistent index. The new persistent
+     * index is added to the list of indexes but not written to disk. When this
+     * method returns a new volatile index has been created.
+     * 
+     * @throws IOException
+     *             if an error occurs while writing the volatile index to disk.
+     */
+    private void commitVolatileIndex() throws IOException {
 
-   /**
-    * Commits the volatile index to a persistent index. The new persistent
-    * index is added to the list of indexes but not written to disk. When this
-    * method returns a new volatile index has been created.
-    *
-    * @throws IOException if an error occurs while writing the volatile index
-    *                     to disk.
-    */
-   private void commitVolatileIndex() throws IOException
-   {
+	// check if volatile index contains documents at all
+	if (volatileIndex.getNumDocuments() > 0) {
 
-      // check if volatile index contains documents at all
-      if (volatileIndex.getNumDocuments() > 0)
-      {
+	    long time = System.currentTimeMillis();
+	    // create index
+	    CreateIndex create = new CreateIndex(getTransactionId(), null);
+	    executeAndLog(create);
 
-         long time = System.currentTimeMillis();
-         // create index
-         CreateIndex create = new CreateIndex(getTransactionId(), null);
-         executeAndLog(create);
+	    // commit volatile index
+	    executeAndLog(new VolatileCommit(getTransactionId(), create
+		    .getIndexName()));
 
-         // commit volatile index
-         executeAndLog(new VolatileCommit(getTransactionId(), create.getIndexName()));
+	    // add new index
+	    AddIndex add = new AddIndex(getTransactionId(), create
+		    .getIndexName());
+	    executeAndLog(add);
 
-         // add new index
-         AddIndex add = new AddIndex(getTransactionId(), create.getIndexName());
-         executeAndLog(add);
+	    // create new volatile index
+	    resetVolatileIndex();
 
-         // create new volatile index
-         resetVolatileIndex();
+	    time = System.currentTimeMillis() - time;
+	    log.debug("Committed in-memory index in " + time + "ms.");
+	}
+    }
 
-         time = System.currentTimeMillis() - time;
-         log.debug("Committed in-memory index in " + time + "ms.");
-      }
-   }
+    /**
+     * Recursively creates an index starting with the NodeState
+     * <code>node</code>.
+     * 
+     * @param node
+     *            the current NodeState.
+     * @param path
+     *            the path of the current node.
+     * @param stateMgr
+     *            the shared item state manager.
+     * @param count
+     *            the number of nodes already indexed.
+     * @return the number of nodes indexed so far.
+     * @throws IOException
+     *             if an error occurs while writing to the index.
+     * @throws ItemStateException
+     *             if an node state cannot be found.
+     * @throws RepositoryException
+     *             if any other error occurs
+     */
+    private long createIndex(NodeData node, ItemDataConsumer stateMgr,
+	    long count) throws IOException, RepositoryException {
+	// NodeId id = node.getNodeId();
 
-   /**
-    * Recursively creates an index starting with the NodeState
-    * <code>node</code>.
-    *
-    * @param node     the current NodeState.
-    * @param path     the path of the current node.
-    * @param stateMgr the shared item state manager.
-    * @param count    the number of nodes already indexed.
-    * @return the number of nodes indexed so far.
-    * @throws IOException         if an error occurs while writing to the
-    *                             index.
-    * @throws ItemStateException  if an node state cannot be found.
-    * @throws RepositoryException if any other error occurs
-    */
-   private long createIndex(NodeData node, ItemDataConsumer stateMgr, long count) throws IOException,
-      RepositoryException
-   {
-      //NodeId id = node.getNodeId();
-      if (excludedIDs.contains(node.getIdentifier()))
-      {
-         return count;
-      }
-      executeAndLog(new AddNode(getTransactionId(), node.getIdentifier()));
-      if (++count % 100 == 0)
-      {
+	if (indexingTree.isExcluded(node)) {
+	    return count;
+	}
+	executeAndLog(new AddNode(getTransactionId(), node.getIdentifier()));
+	if (++count % 100 == 0) {
 
-         log.info("indexing... {} ({})", node.getQPath().getAsString(), new Long(count));
-      }
-      if (count % 10 == 0)
-      {
-         checkIndexingQueue(true);
-      }
-      checkVolatileCommit();
-      List<NodeData> children = stateMgr.getChildNodesData(node);
-      for (NodeData nodeData : children)
-      {
+	    log.info("indexing... {} ({})", node.getQPath().getAsString(),
+		    new Long(count));
+	}
+	if (count % 10 == 0) {
+	    checkIndexingQueue(true);
+	}
+	checkVolatileCommit();
+	List<NodeData> children = stateMgr.getChildNodesData(node);
+	for (NodeData nodeData : children) {
 
-         NodeData childState = (NodeData)stateMgr.getItemData(nodeData.getIdentifier());
-         if (childState == null)
-         {
-            handler.getOnWorkspaceInconsistencyHandler().handleMissingChildNode(new ItemNotFoundException("Child not found "), handler, nodeData.getQPath(), node, nodeData);
-         }
+	    NodeData childState = (NodeData) stateMgr.getItemData(nodeData
+		    .getIdentifier());
+	    if (childState == null) {
+		handler.getOnWorkspaceInconsistencyHandler()
+			.handleMissingChildNode(
+				new ItemNotFoundException("Child not found "),
+				handler, nodeData.getQPath(), node, nodeData);
+	    }
 
-         if (nodeData != null)
-         {
-            count = createIndex(nodeData, stateMgr, count);
-         }
-      }
+	    if (nodeData != null) {
+		count = createIndex(nodeData, stateMgr, count);
+	    }
+	}
 
-      return count;
-   }
+	return count;
+    }
 
-   /**
-    * Attempts to delete all files recorded in {@link #deletable}.
-    */
-   private void attemptDelete()
-   {
-      synchronized (deletable)
-      {
-         for (Iterator it = deletable.iterator(); it.hasNext();)
-         {
-            String indexName = (String)it.next();
-            if (directoryManager.delete(indexName))
-            {
-               it.remove();
-            }
-            else
-            {
-               log.info("Unable to delete obsolete index: " + indexName);
-            }
-         }
-      }
-   }
+    /**
+     * Attempts to delete all files recorded in {@link #deletable}.
+     */
+    private void attemptDelete() {
+	synchronized (deletable) {
+	    for (Iterator it = deletable.iterator(); it.hasNext();) {
+		String indexName = (String) it.next();
+		if (directoryManager.delete(indexName)) {
+		    it.remove();
+		} else {
+		    log.info("Unable to delete obsolete index: " + indexName);
+		}
+	    }
+	}
+    }
 
-   /**
-    * Removes the deletable file if it exists. The file is not used anymore
-    * in Jackrabbit versions >= 1.5.
-    */
-   private void removeDeletable()
-   {
-      String fileName = "deletable";
-      try
-      {
-         if (indexDir.fileExists(fileName))
-         {
-            indexDir.deleteFile(fileName);
-         }
-      }
-      catch (IOException e)
-      {
-         log.warn("Unable to remove file 'deletable'.", e);
-      }
-   }
+    /**
+     * Removes the deletable file if it exists. The file is not used anymore in
+     * Jackrabbit versions >= 1.5.
+     */
+    private void removeDeletable() {
+	String fileName = "deletable";
+	try {
+	    if (indexDir.fileExists(fileName)) {
+		indexDir.deleteFile(fileName);
+	    }
+	} catch (IOException e) {
+	    log.warn("Unable to remove file 'deletable'.", e);
+	}
+    }
 
-   /**
-    * Checks the duration between the last commit to this index and the
-    * current time and flushes the index (if there are changes at all)
-    * if the duration (idle time) is more than {@link SearchIndex#getVolatileIdleTime()}
-    * seconds.
-    */
-   private synchronized void checkFlush()
-   {
-      long idleTime = System.currentTimeMillis() - lastFlushTime;
-      // do not flush if volatileIdleTime is zero or negative
-      if (handler.getVolatileIdleTime() > 0 && idleTime > handler.getVolatileIdleTime() * 1000)
-      {
-         try
-         {
-            if (redoLog.hasEntries())
-            {
-               log.debug("Flushing index after being idle for " + idleTime + " ms.");
-               synchronized (updateMonitor)
-               {
-                  updateInProgress = true;
-               }
-               try
-               {
-                  flush();
-               }
-               finally
-               {
-                  synchronized (updateMonitor)
-                  {
-                     updateInProgress = false;
-                     updateMonitor.notifyAll();
-                     releaseMultiReader();
-                  }
-               }
-            }
-         }
-         catch (IOException e)
-         {
-            log.error("Unable to commit volatile index", e);
-         }
-      }
-   }
+    /**
+     * Checks the duration between the last commit to this index and the current
+     * time and flushes the index (if there are changes at all) if the duration
+     * (idle time) is more than {@link SearchIndex#getVolatileIdleTime()}
+     * seconds.
+     */
+    private synchronized void checkFlush() {
+	long idleTime = System.currentTimeMillis() - lastFlushTime;
+	// do not flush if volatileIdleTime is zero or negative
+	if (handler.getVolatileIdleTime() > 0
+		&& idleTime > handler.getVolatileIdleTime() * 1000) {
+	    try {
+		if (redoLog.hasEntries()) {
+		    log.debug("Flushing index after being idle for " + idleTime
+			    + " ms.");
+		    synchronized (updateMonitor) {
+			updateInProgress = true;
+		    }
+		    try {
+			flush();
+		    } finally {
+			synchronized (updateMonitor) {
+			    updateInProgress = false;
+			    updateMonitor.notifyAll();
+			    releaseMultiReader();
+			}
+		    }
+		}
+	    } catch (IOException e) {
+		log.error("Unable to commit volatile index", e);
+	    }
+	}
+    }
 
-   /**
-    * Checks the indexing queue for finished text extrator jobs and updates the
-    * index accordingly if there are any new ones. This method is synchronized
-    * and should only be called by the timer task that periodically checks if
-    * there are documents ready in the indexing queue. A new transaction is
-    * used when documents are transfered from the indexing queue to the index.
-    */
-   private synchronized void checkIndexingQueue()
-   {
-      checkIndexingQueue(false);
-   }
+    /**
+     * Checks the indexing queue for finished text extrator jobs and updates the
+     * index accordingly if there are any new ones. This method is synchronized
+     * and should only be called by the timer task that periodically checks if
+     * there are documents ready in the indexing queue. A new transaction is
+     * used when documents are transfered from the indexing queue to the index.
+     */
+    private synchronized void checkIndexingQueue() {
+	checkIndexingQueue(false);
+    }
 
-   /**
-    * Checks the indexing queue for finished text extrator jobs and updates the
-    * index accordingly if there are any new ones.
-    *
-    * @param transactionPresent whether a transaction is in progress and the
-    *                           current {@link #getTransactionId()} should be
-    *                           used. If <code>false</code> a new transaction
-    *                           is created when documents are transfered from
-    *                           the indexing queue to the index.
-    */
-   private void checkIndexingQueue(boolean transactionPresent)
-   {
-      Document[] docs = indexingQueue.getFinishedDocuments();
-      Map finished = new HashMap();
-      for (int i = 0; i < docs.length; i++)
-      {
-         String uuid = docs[i].get(FieldNames.UUID);
-         finished.put(uuid, docs[i]);
-      }
+    /**
+     * Checks the indexing queue for finished text extrator jobs and updates the
+     * index accordingly if there are any new ones.
+     * 
+     * @param transactionPresent
+     *            whether a transaction is in progress and the current
+     *            {@link #getTransactionId()} should be used. If
+     *            <code>false</code> a new transaction is created when documents
+     *            are transfered from the indexing queue to the index.
+     */
+    private void checkIndexingQueue(boolean transactionPresent) {
+	Document[] docs = indexingQueue.getFinishedDocuments();
+	Map finished = new HashMap();
+	for (int i = 0; i < docs.length; i++) {
+	    String uuid = docs[i].get(FieldNames.UUID);
+	    finished.put(uuid, docs[i]);
+	}
 
-      // now update index with the remaining ones if there are any
-      if (!finished.isEmpty())
-      {
-         log.info("updating index with {} nodes from indexing queue.", new Long(finished.size()));
+	// now update index with the remaining ones if there are any
+	if (!finished.isEmpty()) {
+	    log.info("updating index with {} nodes from indexing queue.",
+		    new Long(finished.size()));
 
-         // remove documents from the queue
-         for (Iterator it = finished.keySet().iterator(); it.hasNext();)
-         {
-            indexingQueue.removeDocument(it.next().toString());
-         }
+	    // remove documents from the queue
+	    for (Iterator it = finished.keySet().iterator(); it.hasNext();) {
+		indexingQueue.removeDocument(it.next().toString());
+	    }
 
-         try
-         {
-            if (transactionPresent)
-            {
-               for (Iterator it = finished.keySet().iterator(); it.hasNext();)
-               {
-                  executeAndLog(new DeleteNode(getTransactionId(), (String)it.next()));
-               }
-               for (Iterator it = finished.values().iterator(); it.hasNext();)
-               {
-                  executeAndLog(new AddNode(getTransactionId(), (Document)it.next()));
-               }
-            }
-            else
-            {
-               update(finished.keySet(), finished.values());
-            }
-         }
-         catch (IOException e)
-         {
-            // update failed
-            log.warn("Failed to update index with deferred text extraction", e);
-         }
-      }
-   }
+	    try {
+		if (transactionPresent) {
+		    for (Iterator it = finished.keySet().iterator(); it
+			    .hasNext();) {
+			executeAndLog(new DeleteNode(getTransactionId(),
+				(String) it.next()));
+		    }
+		    for (Iterator it = finished.values().iterator(); it
+			    .hasNext();) {
+			executeAndLog(new AddNode(getTransactionId(),
+				(Document) it.next()));
+		    }
+		} else {
+		    update(finished.keySet(), finished.values());
+		}
+	    } catch (IOException e) {
+		// update failed
+		log.warn(
+			"Failed to update index with deferred text extraction",
+			e);
+	    }
+	}
+    }
 
-   //------------------------< Actions >---------------------------------------
+    // ------------------------< Actions
+    // >---------------------------------------
 
-   /**
-    * Defines an action on an <code>MultiIndex</code>.
-    */
-   public abstract static class Action
-   {
+    /**
+     * Defines an action on an <code>MultiIndex</code>.
+     */
+    public abstract static class Action {
 
-      /**
-       * Action identifier in redo log for transaction start action.
-       */
-      static final String START = "STR";
+	/**
+	 * Action identifier in redo log for transaction start action.
+	 */
+	static final String START = "STR";
 
-      /**
-       * Action type for start action.
-       */
-      public static final int TYPE_START = 0;
+	/**
+	 * Action type for start action.
+	 */
+	public static final int TYPE_START = 0;
 
-      /**
-       * Action identifier in redo log for add node action.
-       */
-      static final String ADD_NODE = "ADD";
+	/**
+	 * Action identifier in redo log for add node action.
+	 */
+	static final String ADD_NODE = "ADD";
 
-      /**
-       * Action type for add node action.
-       */
-      public static final int TYPE_ADD_NODE = 1;
+	/**
+	 * Action type for add node action.
+	 */
+	public static final int TYPE_ADD_NODE = 1;
 
-      /**
-       * Action identifier in redo log for node delete action.
-       */
-      static final String DELETE_NODE = "DEL";
+	/**
+	 * Action identifier in redo log for node delete action.
+	 */
+	static final String DELETE_NODE = "DEL";
 
-      /**
-       * Action type for delete node action.
-       */
-      public static final int TYPE_DELETE_NODE = 2;
+	/**
+	 * Action type for delete node action.
+	 */
+	public static final int TYPE_DELETE_NODE = 2;
 
-      /**
-       * Action identifier in redo log for transaction commit action.
-       */
-      static final String COMMIT = "COM";
+	/**
+	 * Action identifier in redo log for transaction commit action.
+	 */
+	static final String COMMIT = "COM";
 
-      /**
-       * Action type for commit action.
-       */
-      public static final int TYPE_COMMIT = 3;
+	/**
+	 * Action type for commit action.
+	 */
+	public static final int TYPE_COMMIT = 3;
 
-      /**
-       * Action identifier in redo log for volatile index commit action.
-       */
-      static final String VOLATILE_COMMIT = "VOL_COM";
+	/**
+	 * Action identifier in redo log for volatile index commit action.
+	 */
+	static final String VOLATILE_COMMIT = "VOL_COM";
 
-      /**
-       * Action type for volatile index commit action.
-       */
-      public static final int TYPE_VOLATILE_COMMIT = 4;
+	/**
+	 * Action type for volatile index commit action.
+	 */
+	public static final int TYPE_VOLATILE_COMMIT = 4;
 
-      /**
-       * Action identifier in redo log for index create action.
-       */
-      static final String CREATE_INDEX = "CRE_IDX";
+	/**
+	 * Action identifier in redo log for index create action.
+	 */
+	static final String CREATE_INDEX = "CRE_IDX";
 
-      /**
-       * Action type for create index action.
-       */
-      public static final int TYPE_CREATE_INDEX = 5;
+	/**
+	 * Action type for create index action.
+	 */
+	public static final int TYPE_CREATE_INDEX = 5;
 
-      /**
-       * Action identifier in redo log for index add action.
-       */
-      static final String ADD_INDEX = "ADD_IDX";
+	/**
+	 * Action identifier in redo log for index add action.
+	 */
+	static final String ADD_INDEX = "ADD_IDX";
 
-      /**
-       * Action type for add index action.
-       */
-      public static final int TYPE_ADD_INDEX = 6;
+	/**
+	 * Action type for add index action.
+	 */
+	public static final int TYPE_ADD_INDEX = 6;
 
-      /**
-       * Action identifier in redo log for delete index action.
-       */
-      static final String DELETE_INDEX = "DEL_IDX";
+	/**
+	 * Action identifier in redo log for delete index action.
+	 */
+	static final String DELETE_INDEX = "DEL_IDX";
 
-      /**
-       * Action type for delete index action.
-       */
-      public static final int TYPE_DELETE_INDEX = 7;
+	/**
+	 * Action type for delete index action.
+	 */
+	public static final int TYPE_DELETE_INDEX = 7;
 
-      /**
-       * Transaction identifier for internal actions like volatile index
-       * commit triggered by timer thread.
-       */
-      static final long INTERNAL_TRANSACTION = -1;
+	/**
+	 * Transaction identifier for internal actions like volatile index
+	 * commit triggered by timer thread.
+	 */
+	static final long INTERNAL_TRANSACTION = -1;
 
-      /**
-       * Transaction identifier for internal action that replaces indexs.
-       */
-      static final long INTERNAL_TRANS_REPL_INDEXES = -2;
+	/**
+	 * Transaction identifier for internal action that replaces indexs.
+	 */
+	static final long INTERNAL_TRANS_REPL_INDEXES = -2;
 
-      /**
-       * The id of the transaction that executed this action.
-       */
-      private final long transactionId;
+	/**
+	 * The id of the transaction that executed this action.
+	 */
+	private final long transactionId;
 
-      /**
-       * The action type.
-       */
-      private final int type;
+	/**
+	 * The action type.
+	 */
+	private final int type;
 
-      /**
-       * Creates a new <code>Action</code>.
-       *
-       * @param transactionId the id of the transaction that executed this
-       *                      action.
-       * @param type          the action type.
-       */
-      Action(long transactionId, int type)
-      {
-         this.transactionId = transactionId;
-         this.type = type;
-      }
+	/**
+	 * Creates a new <code>Action</code>.
+	 * 
+	 * @param transactionId
+	 *            the id of the transaction that executed this action.
+	 * @param type
+	 *            the action type.
+	 */
+	Action(long transactionId, int type) {
+	    this.transactionId = transactionId;
+	    this.type = type;
+	}
 
-      /**
-       * Returns the transaction id for this <code>Action</code>.
-       *
-       * @return the transaction id for this <code>Action</code>.
-       */
-      long getTransactionId()
-      {
-         return transactionId;
-      }
+	/**
+	 * Returns the transaction id for this <code>Action</code>.
+	 * 
+	 * @return the transaction id for this <code>Action</code>.
+	 */
+	long getTransactionId() {
+	    return transactionId;
+	}
 
-      /**
-       * Returns the action type.
-       *
-       * @return the action type.
-       */
-      int getType()
-      {
-         return type;
-      }
+	/**
+	 * Returns the action type.
+	 * 
+	 * @return the action type.
+	 */
+	int getType() {
+	    return type;
+	}
 
-      /**
-       * Executes this action on the <code>index</code>.
-       *
-       * @param index the index where to execute the action.
-       * @throws IOException         if the action fails due to some I/O error in
-       *                             the index or some other error.
-       */
-      public abstract void execute(MultiIndex index) throws IOException;
+	/**
+	 * Executes this action on the <code>index</code>.
+	 * 
+	 * @param index
+	 *            the index where to execute the action.
+	 * @throws IOException
+	 *             if the action fails due to some I/O error in the index or
+	 *             some other error.
+	 */
+	public abstract void execute(MultiIndex index) throws IOException;
 
-      /**
-       * Executes the inverse operation of this action. That is, does an undo
-       * of this action. This default implementation does nothing, but returns
-       * silently.
-       *
-       * @param index the index where to undo the action.
-       * @throws IOException if the action cannot be undone.
-       */
-      public void undo(MultiIndex index) throws IOException
-      {
-      }
+	/**
+	 * Executes the inverse operation of this action. That is, does an undo
+	 * of this action. This default implementation does nothing, but returns
+	 * silently.
+	 * 
+	 * @param index
+	 *            the index where to undo the action.
+	 * @throws IOException
+	 *             if the action cannot be undone.
+	 */
+	public void undo(MultiIndex index) throws IOException {
+	}
 
-      /**
-       * Returns a <code>String</code> representation of this action that can be
-       * written to the {@link RedoLog}.
-       *
-       * @return a <code>String</code> representation of this action.
-       */
-      public abstract String toString();
+	/**
+	 * Returns a <code>String</code> representation of this action that can
+	 * be written to the {@link RedoLog}.
+	 * 
+	 * @return a <code>String</code> representation of this action.
+	 */
+	public abstract String toString();
 
-      /**
-       * Parses an line in the redo log and created an {@link Action}.
-       *
-       * @param line the line from the redo log.
-       * @return an <code>Action</code>.
-       * @throws IllegalArgumentException if the line is malformed.
-       */
-      static Action fromString(String line) throws IllegalArgumentException
-      {
-         int endTransIdx = line.indexOf(' ');
-         if (endTransIdx == -1)
-         {
-            throw new IllegalArgumentException(line);
-         }
-         long transactionId;
-         try
-         {
-            transactionId = Long.parseLong(line.substring(0, endTransIdx));
-         }
-         catch (NumberFormatException e)
-         {
-            throw new IllegalArgumentException(line);
-         }
-         int endActionIdx = line.indexOf(' ', endTransIdx + 1);
-         if (endActionIdx == -1)
-         {
-            // action does not have arguments
-            endActionIdx = line.length();
-         }
-         String actionLabel = line.substring(endTransIdx + 1, endActionIdx);
-         String arguments = "";
-         if (endActionIdx + 1 <= line.length())
-         {
-            arguments = line.substring(endActionIdx + 1);
-         }
-         Action a;
-         if (actionLabel.equals(Action.ADD_NODE))
-         {
-            a = AddNode.fromString(transactionId, arguments);
-         }
-         else if (actionLabel.equals(Action.ADD_INDEX))
-         {
-            a = AddIndex.fromString(transactionId, arguments);
-         }
-         else if (actionLabel.equals(Action.COMMIT))
-         {
-            a = Commit.fromString(transactionId, arguments);
-         }
-         else if (actionLabel.equals(Action.CREATE_INDEX))
-         {
-            a = CreateIndex.fromString(transactionId, arguments);
-         }
-         else if (actionLabel.equals(Action.DELETE_INDEX))
-         {
-            a = DeleteIndex.fromString(transactionId, arguments);
-         }
-         else if (actionLabel.equals(Action.DELETE_NODE))
-         {
-            a = DeleteNode.fromString(transactionId, arguments);
-         }
-         else if (actionLabel.equals(Action.START))
-         {
-            a = Start.fromString(transactionId, arguments);
-         }
-         else if (actionLabel.equals(Action.VOLATILE_COMMIT))
-         {
-            a = VolatileCommit.fromString(transactionId, arguments);
-         }
-         else
-         {
-            throw new IllegalArgumentException(line);
-         }
-         return a;
-      }
-   }
+	/**
+	 * Parses an line in the redo log and created an {@link Action}.
+	 * 
+	 * @param line
+	 *            the line from the redo log.
+	 * @return an <code>Action</code>.
+	 * @throws IllegalArgumentException
+	 *             if the line is malformed.
+	 */
+	static Action fromString(String line) throws IllegalArgumentException {
+	    int endTransIdx = line.indexOf(' ');
+	    if (endTransIdx == -1) {
+		throw new IllegalArgumentException(line);
+	    }
+	    long transactionId;
+	    try {
+		transactionId = Long.parseLong(line.substring(0, endTransIdx));
+	    } catch (NumberFormatException e) {
+		throw new IllegalArgumentException(line);
+	    }
+	    int endActionIdx = line.indexOf(' ', endTransIdx + 1);
+	    if (endActionIdx == -1) {
+		// action does not have arguments
+		endActionIdx = line.length();
+	    }
+	    String actionLabel = line.substring(endTransIdx + 1, endActionIdx);
+	    String arguments = "";
+	    if (endActionIdx + 1 <= line.length()) {
+		arguments = line.substring(endActionIdx + 1);
+	    }
+	    Action a;
+	    if (actionLabel.equals(Action.ADD_NODE)) {
+		a = AddNode.fromString(transactionId, arguments);
+	    } else if (actionLabel.equals(Action.ADD_INDEX)) {
+		a = AddIndex.fromString(transactionId, arguments);
+	    } else if (actionLabel.equals(Action.COMMIT)) {
+		a = Commit.fromString(transactionId, arguments);
+	    } else if (actionLabel.equals(Action.CREATE_INDEX)) {
+		a = CreateIndex.fromString(transactionId, arguments);
+	    } else if (actionLabel.equals(Action.DELETE_INDEX)) {
+		a = DeleteIndex.fromString(transactionId, arguments);
+	    } else if (actionLabel.equals(Action.DELETE_NODE)) {
+		a = DeleteNode.fromString(transactionId, arguments);
+	    } else if (actionLabel.equals(Action.START)) {
+		a = Start.fromString(transactionId, arguments);
+	    } else if (actionLabel.equals(Action.VOLATILE_COMMIT)) {
+		a = VolatileCommit.fromString(transactionId, arguments);
+	    } else {
+		throw new IllegalArgumentException(line);
+	    }
+	    return a;
+	}
+    }
 
-   /**
-    * Adds an index to the MultiIndex's active persistent index list.
-    */
-   private static class AddIndex extends Action
-   {
+    /**
+     * Adds an index to the MultiIndex's active persistent index list.
+     */
+    private static class AddIndex extends Action {
 
-      /**
-       * The name of the index to add.
-       */
-      private String indexName;
+	/**
+	 * The name of the index to add.
+	 */
+	private String indexName;
 
-      /**
-       * Creates a new AddIndex action.
-       *
-       * @param transactionId the id of the transaction that executes this
-       *                      action.
-       * @param indexName     the name of the index to add, or <code>null</code>
-       *                      if an index with a new name should be created.
-       */
-      AddIndex(long transactionId, String indexName)
-      {
-         super(transactionId, Action.TYPE_ADD_INDEX);
-         this.indexName = indexName;
-      }
+	/**
+	 * Creates a new AddIndex action.
+	 * 
+	 * @param transactionId
+	 *            the id of the transaction that executes this action.
+	 * @param indexName
+	 *            the name of the index to add, or <code>null</code> if an
+	 *            index with a new name should be created.
+	 */
+	AddIndex(long transactionId, String indexName) {
+	    super(transactionId, Action.TYPE_ADD_INDEX);
+	    this.indexName = indexName;
+	}
 
-      /**
-       * Creates a new AddIndex action.
-       *
-       * @param transactionId the id of the transaction that executes this
-       *                      action.
-       * @param arguments     the name of the index to add.
-       * @return the AddIndex action.
-       * @throws IllegalArgumentException if the arguments are malformed.
-       */
-      static AddIndex fromString(long transactionId, String arguments)
-      {
-         return new AddIndex(transactionId, arguments);
-      }
+	/**
+	 * Creates a new AddIndex action.
+	 * 
+	 * @param transactionId
+	 *            the id of the transaction that executes this action.
+	 * @param arguments
+	 *            the name of the index to add.
+	 * @return the AddIndex action.
+	 * @throws IllegalArgumentException
+	 *             if the arguments are malformed.
+	 */
+	static AddIndex fromString(long transactionId, String arguments) {
+	    return new AddIndex(transactionId, arguments);
+	}
 
-      /**
-       * Adds a sub index to <code>index</code>.
-       *
-       * @inheritDoc
-       */
-      public void execute(MultiIndex index) throws IOException
-      {
-         PersistentIndex idx = index.getOrCreateIndex(indexName);
-         if (!index.indexNames.contains(indexName))
-         {
-            index.indexNames.addName(indexName);
-            // now that the index is in the active list let the merger know about it
-            index.merger.indexAdded(indexName, idx.getNumDocuments());
-         }
-      }
+	/**
+	 * Adds a sub index to <code>index</code>.
+	 * 
+	 * @inheritDoc
+	 */
+	public void execute(MultiIndex index) throws IOException {
+	    PersistentIndex idx = index.getOrCreateIndex(indexName);
+	    if (!index.indexNames.contains(indexName)) {
+		index.indexNames.addName(indexName);
+		// now that the index is in the active list let the merger know
+		// about it
+		index.merger.indexAdded(indexName, idx.getNumDocuments());
+	    }
+	}
 
-      /**
-       * @inheritDoc
-       */
-      public String toString()
-      {
-         StringBuffer logLine = new StringBuffer();
-         logLine.append(Long.toString(getTransactionId()));
-         logLine.append(' ');
-         logLine.append(Action.ADD_INDEX);
-         logLine.append(' ');
-         logLine.append(indexName);
-         return logLine.toString();
-      }
-   }
+	/**
+	 * @inheritDoc
+	 */
+	public String toString() {
+	    StringBuffer logLine = new StringBuffer();
+	    logLine.append(Long.toString(getTransactionId()));
+	    logLine.append(' ');
+	    logLine.append(Action.ADD_INDEX);
+	    logLine.append(' ');
+	    logLine.append(indexName);
+	    return logLine.toString();
+	}
+    }
 
-   /**
-    * Adds a node to the index.
-    */
-   private static class AddNode extends Action
-   {
+    /**
+     * Adds a node to the index.
+     */
+    private static class AddNode extends Action {
 
-      /**
-       * The maximum length of a AddNode String.
-       */
-      private static final int ENTRY_LENGTH =
-         Long.toString(Long.MAX_VALUE).length() + Action.ADD_NODE.length() + Constants.UUID_FORMATTED_LENGTH + 2;
+	/**
+	 * The maximum length of a AddNode String.
+	 */
+	private static final int ENTRY_LENGTH = Long.toString(Long.MAX_VALUE)
+		.length()
+		+ Action.ADD_NODE.length()
+		+ Constants.UUID_FORMATTED_LENGTH
+		+ 2;
 
-      /**
-       * The uuid of the node to add.
-       */
-      private final String uuid;
+	/**
+	 * The uuid of the node to add.
+	 */
+	private final String uuid;
 
-      /**
-       * The document to add to the index, or <code>null</code> if not available.
-       */
-      private Document doc;
+	/**
+	 * The document to add to the index, or <code>null</code> if not
+	 * available.
+	 */
+	private Document doc;
 
-      /**
-       * Creates a new AddNode action.
-       *
-       * @param transactionId the id of the transaction that executes this action.
-       * @param uuid the uuid of the node to add.
-       */
-      AddNode(long transactionId, String uuid)
-      {
-         super(transactionId, Action.TYPE_ADD_NODE);
-         this.uuid = uuid;
-      }
+	/**
+	 * Creates a new AddNode action.
+	 * 
+	 * @param transactionId
+	 *            the id of the transaction that executes this action.
+	 * @param uuid
+	 *            the uuid of the node to add.
+	 */
+	AddNode(long transactionId, String uuid) {
+	    super(transactionId, Action.TYPE_ADD_NODE);
+	    this.uuid = uuid;
+	}
 
-      /**
-       * Creates a new AddNode action.
-       *
-       * @param transactionId the id of the transaction that executes this action.
-       * @param doc the document to add.
-       */
-      AddNode(long transactionId, Document doc)
-      {
-         this(transactionId, doc.get(FieldNames.UUID));
-         this.doc = doc;
-      }
+	/**
+	 * Creates a new AddNode action.
+	 * 
+	 * @param transactionId
+	 *            the id of the transaction that executes this action.
+	 * @param doc
+	 *            the document to add.
+	 */
+	AddNode(long transactionId, Document doc) {
+	    this(transactionId, doc.get(FieldNames.UUID));
+	    this.doc = doc;
+	}
 
-      /**
-       * Creates a new AddNode action.
-       *
-       * @param transactionId the id of the transaction that executes this
-       *                      action.
-       * @param arguments     the arguments to this action. The uuid of the node
-       *                      to add
-       * @return the AddNode action.
-       * @throws IllegalArgumentException if the arguments are malformed. Not a
-       *                                  UUID.
-       */
-      static AddNode fromString(long transactionId, String arguments) throws IllegalArgumentException
-      {
-         // simple length check
-         if (arguments.length() != Constants.UUID_FORMATTED_LENGTH)
-         {
-            throw new IllegalArgumentException("arguments is not a uuid");
-         }
-         return new AddNode(transactionId, arguments);
-      }
+	/**
+	 * Creates a new AddNode action.
+	 * 
+	 * @param transactionId
+	 *            the id of the transaction that executes this action.
+	 * @param arguments
+	 *            the arguments to this action. The uuid of the node to add
+	 * @return the AddNode action.
+	 * @throws IllegalArgumentException
+	 *             if the arguments are malformed. Not a UUID.
+	 */
+	static AddNode fromString(long transactionId, String arguments)
+		throws IllegalArgumentException {
+	    // simple length check
+	    if (arguments.length() != Constants.UUID_FORMATTED_LENGTH) {
+		throw new IllegalArgumentException("arguments is not a uuid");
+	    }
+	    return new AddNode(transactionId, arguments);
+	}
 
-      /**
-       * Adds a node to the index.
-       *
-       * @inheritDoc
-       */
-      public void execute(MultiIndex index) throws IOException
-      {
-         if (doc == null)
-         {
-            try
-            {
-               doc = index.createDocument(uuid);
-            }
-            catch (RepositoryException e)
-            {
-               // node does not exist anymore
-               log.debug(e.getMessage());
-            }
-         }
-         if (doc != null)
-         {
-            index.volatileIndex.addDocuments(new Document[]{doc});
-         }
-      }
+	/**
+	 * Adds a node to the index.
+	 * 
+	 * @inheritDoc
+	 */
+	public void execute(MultiIndex index) throws IOException {
+	    if (doc == null) {
+		try {
+		    doc = index.createDocument(uuid);
+		} catch (RepositoryException e) {
+		    // node does not exist anymore
+		    log.debug(e.getMessage());
+		}
+	    }
+	    if (doc != null) {
+		index.volatileIndex.addDocuments(new Document[] { doc });
+	    }
+	}
 
-      /**
-       * @inheritDoc
-       */
-      public String toString()
-      {
-         StringBuffer logLine = new StringBuffer(ENTRY_LENGTH);
-         logLine.append(Long.toString(getTransactionId()));
-         logLine.append(' ');
-         logLine.append(Action.ADD_NODE);
-         logLine.append(' ');
-         logLine.append(uuid);
-         return logLine.toString();
-      }
-   }
+	/**
+	 * @inheritDoc
+	 */
+	public String toString() {
+	    StringBuffer logLine = new StringBuffer(ENTRY_LENGTH);
+	    logLine.append(Long.toString(getTransactionId()));
+	    logLine.append(' ');
+	    logLine.append(Action.ADD_NODE);
+	    logLine.append(' ');
+	    logLine.append(uuid);
+	    return logLine.toString();
+	}
+    }
 
-   /**
-    * Commits a transaction.
-    */
-   private static class Commit extends Action
-   {
+    /**
+     * Commits a transaction.
+     */
+    private static class Commit extends Action {
 
-      /**
-       * Creates a new Commit action.
-       *
-       * @param transactionId the id of the transaction that is committed.
-       */
-      Commit(long transactionId)
-      {
-         super(transactionId, Action.TYPE_COMMIT);
-      }
+	/**
+	 * Creates a new Commit action.
+	 * 
+	 * @param transactionId
+	 *            the id of the transaction that is committed.
+	 */
+	Commit(long transactionId) {
+	    super(transactionId, Action.TYPE_COMMIT);
+	}
 
-      /**
-       * Creates a new Commit action.
-       *
-       * @param transactionId the id of the transaction that executes this
-       *                      action.
-       * @param arguments     ignored by this method.
-       * @return the Commit action.
-       */
-      static Commit fromString(long transactionId, String arguments)
-      {
-         return new Commit(transactionId);
-      }
+	/**
+	 * Creates a new Commit action.
+	 * 
+	 * @param transactionId
+	 *            the id of the transaction that executes this action.
+	 * @param arguments
+	 *            ignored by this method.
+	 * @return the Commit action.
+	 */
+	static Commit fromString(long transactionId, String arguments) {
+	    return new Commit(transactionId);
+	}
 
-      /**
-       * Touches the last flush time (sets it to the current time).
-       *
-       * @inheritDoc
-       */
-      public void execute(MultiIndex index) throws IOException
-      {
-         index.lastFlushTime = System.currentTimeMillis();
-      }
+	/**
+	 * Touches the last flush time (sets it to the current time).
+	 * 
+	 * @inheritDoc
+	 */
+	public void execute(MultiIndex index) throws IOException {
+	    index.lastFlushTime = System.currentTimeMillis();
+	}
 
-      /**
-       * @inheritDoc
-       */
-      public String toString()
-      {
-         return Long.toString(getTransactionId()) + ' ' + Action.COMMIT;
-      }
-   }
+	/**
+	 * @inheritDoc
+	 */
+	public String toString() {
+	    return Long.toString(getTransactionId()) + ' ' + Action.COMMIT;
+	}
+    }
 
-   /**
-    * Creates an new sub index but does not add it to the active persistent index
-    * list.
-    */
-   private static class CreateIndex extends Action
-   {
+    /**
+     * Creates an new sub index but does not add it to the active persistent
+     * index list.
+     */
+    private static class CreateIndex extends Action {
 
-      /**
-       * The name of the index to add.
-       */
-      private String indexName;
+	/**
+	 * The name of the index to add.
+	 */
+	private String indexName;
 
-      /**
-       * Creates a new CreateIndex action.
-       *
-       * @param transactionId the id of the transaction that executes this
-       *                      action.
-       * @param indexName     the name of the index to add, or <code>null</code>
-       *                      if an index with a new name should be created.
-       */
-      CreateIndex(long transactionId, String indexName)
-      {
-         super(transactionId, Action.TYPE_CREATE_INDEX);
-         this.indexName = indexName;
-      }
+	/**
+	 * Creates a new CreateIndex action.
+	 * 
+	 * @param transactionId
+	 *            the id of the transaction that executes this action.
+	 * @param indexName
+	 *            the name of the index to add, or <code>null</code> if an
+	 *            index with a new name should be created.
+	 */
+	CreateIndex(long transactionId, String indexName) {
+	    super(transactionId, Action.TYPE_CREATE_INDEX);
+	    this.indexName = indexName;
+	}
 
-      /**
-       * Creates a new CreateIndex action.
-       *
-       * @param transactionId the id of the transaction that executes this
-       *                      action.
-       * @param arguments     the name of the index to create.
-       * @return the AddIndex action.
-       * @throws IllegalArgumentException if the arguments are malformed.
-       */
-      static CreateIndex fromString(long transactionId, String arguments)
-      {
-         // when created from String, this action is executed as redo action
-         return new CreateIndex(transactionId, arguments);
-      }
+	/**
+	 * Creates a new CreateIndex action.
+	 * 
+	 * @param transactionId
+	 *            the id of the transaction that executes this action.
+	 * @param arguments
+	 *            the name of the index to create.
+	 * @return the AddIndex action.
+	 * @throws IllegalArgumentException
+	 *             if the arguments are malformed.
+	 */
+	static CreateIndex fromString(long transactionId, String arguments) {
+	    // when created from String, this action is executed as redo action
+	    return new CreateIndex(transactionId, arguments);
+	}
 
-      /**
-       * Creates a new index.
-       *
-       * @inheritDoc
-       */
-      public void execute(MultiIndex index) throws IOException
-      {
-         PersistentIndex idx = index.getOrCreateIndex(indexName);
-         indexName = idx.getName();
-      }
+	/**
+	 * Creates a new index.
+	 * 
+	 * @inheritDoc
+	 */
+	public void execute(MultiIndex index) throws IOException {
+	    PersistentIndex idx = index.getOrCreateIndex(indexName);
+	    indexName = idx.getName();
+	}
 
-      /**
-       * @inheritDoc
-       */
-      public void undo(MultiIndex index) throws IOException
-      {
-         if (index.hasIndex(indexName))
-         {
-            PersistentIndex idx = index.getOrCreateIndex(indexName);
-            idx.close();
-            index.deleteIndex(idx);
-         }
-      }
+	/**
+	 * @inheritDoc
+	 */
+	public void undo(MultiIndex index) throws IOException {
+	    if (index.hasIndex(indexName)) {
+		PersistentIndex idx = index.getOrCreateIndex(indexName);
+		idx.close();
+		index.deleteIndex(idx);
+	    }
+	}
 
-      /**
-       * @inheritDoc
-       */
-      public String toString()
-      {
-         StringBuffer logLine = new StringBuffer();
-         logLine.append(Long.toString(getTransactionId()));
-         logLine.append(' ');
-         logLine.append(Action.CREATE_INDEX);
-         logLine.append(' ');
-         logLine.append(indexName);
-         return logLine.toString();
-      }
+	/**
+	 * @inheritDoc
+	 */
+	public String toString() {
+	    StringBuffer logLine = new StringBuffer();
+	    logLine.append(Long.toString(getTransactionId()));
+	    logLine.append(' ');
+	    logLine.append(Action.CREATE_INDEX);
+	    logLine.append(' ');
+	    logLine.append(indexName);
+	    return logLine.toString();
+	}
 
-      /**
-       * Returns the index name that has been created. If this method is called
-       * before {@link #execute(MultiIndex)} it will return <code>null</code>.
-       *
-       * @return the name of the index that has been created.
-       */
-      String getIndexName()
-      {
-         return indexName;
-      }
-   }
+	/**
+	 * Returns the index name that has been created. If this method is
+	 * called before {@link #execute(MultiIndex)} it will return
+	 * <code>null</code>.
+	 * 
+	 * @return the name of the index that has been created.
+	 */
+	String getIndexName() {
+	    return indexName;
+	}
+    }
 
-   /**
-    * Closes and deletes an index that is no longer in use.
-    */
-   private static class DeleteIndex extends Action
-   {
+    /**
+     * Closes and deletes an index that is no longer in use.
+     */
+    private static class DeleteIndex extends Action {
 
-      /**
-       * The name of the index to add.
-       */
-      private String indexName;
+	/**
+	 * The name of the index to add.
+	 */
+	private String indexName;
 
-      /**
-       * Creates a new DeleteIndex action.
-       *
-       * @param transactionId the id of the transaction that executes this
-       *                      action.
-       * @param indexName     the name of the index to delete.
-       */
-      DeleteIndex(long transactionId, String indexName)
-      {
-         super(transactionId, Action.TYPE_DELETE_INDEX);
-         this.indexName = indexName;
-      }
+	/**
+	 * Creates a new DeleteIndex action.
+	 * 
+	 * @param transactionId
+	 *            the id of the transaction that executes this action.
+	 * @param indexName
+	 *            the name of the index to delete.
+	 */
+	DeleteIndex(long transactionId, String indexName) {
+	    super(transactionId, Action.TYPE_DELETE_INDEX);
+	    this.indexName = indexName;
+	}
 
-      /**
-       * Creates a new DeleteIndex action.
-       *
-       * @param transactionId the id of the transaction that executes this
-       *                      action.
-       * @param arguments     the name of the index to delete.
-       * @return the DeleteIndex action.
-       * @throws IllegalArgumentException if the arguments are malformed.
-       */
-      static DeleteIndex fromString(long transactionId, String arguments)
-      {
-         return new DeleteIndex(transactionId, arguments);
-      }
+	/**
+	 * Creates a new DeleteIndex action.
+	 * 
+	 * @param transactionId
+	 *            the id of the transaction that executes this action.
+	 * @param arguments
+	 *            the name of the index to delete.
+	 * @return the DeleteIndex action.
+	 * @throws IllegalArgumentException
+	 *             if the arguments are malformed.
+	 */
+	static DeleteIndex fromString(long transactionId, String arguments) {
+	    return new DeleteIndex(transactionId, arguments);
+	}
 
-      /**
-       * Removes a sub index from <code>index</code>.
-       *
-       * @inheritDoc
-       */
-      public void execute(MultiIndex index) throws IOException
-      {
-         // get index if it exists
-         for (Iterator it = index.indexes.iterator(); it.hasNext();)
-         {
-            PersistentIndex idx = (PersistentIndex)it.next();
-            if (idx.getName().equals(indexName))
-            {
-               idx.close();
-               index.deleteIndex(idx);
-               break;
-            }
-         }
-      }
+	/**
+	 * Removes a sub index from <code>index</code>.
+	 * 
+	 * @inheritDoc
+	 */
+	public void execute(MultiIndex index) throws IOException {
+	    // get index if it exists
+	    for (Iterator it = index.indexes.iterator(); it.hasNext();) {
+		PersistentIndex idx = (PersistentIndex) it.next();
+		if (idx.getName().equals(indexName)) {
+		    idx.close();
+		    index.deleteIndex(idx);
+		    break;
+		}
+	    }
+	}
 
-      /**
-       * @inheritDoc
-       */
-      public String toString()
-      {
-         StringBuffer logLine = new StringBuffer();
-         logLine.append(Long.toString(getTransactionId()));
-         logLine.append(' ');
-         logLine.append(Action.DELETE_INDEX);
-         logLine.append(' ');
-         logLine.append(indexName);
-         return logLine.toString();
-      }
-   }
+	/**
+	 * @inheritDoc
+	 */
+	public String toString() {
+	    StringBuffer logLine = new StringBuffer();
+	    logLine.append(Long.toString(getTransactionId()));
+	    logLine.append(' ');
+	    logLine.append(Action.DELETE_INDEX);
+	    logLine.append(' ');
+	    logLine.append(indexName);
+	    return logLine.toString();
+	}
+    }
 
-   /**
-    * Deletes a node from the index.
-    */
-   private static class DeleteNode extends Action
-   {
+    /**
+     * Deletes a node from the index.
+     */
+    private static class DeleteNode extends Action {
 
-      /**
-       * The maximum length of a DeleteNode String.
-       */
-      private static final int ENTRY_LENGTH =
-         Long.toString(Long.MAX_VALUE).length() + Action.DELETE_NODE.length() + Constants.UUID_FORMATTED_LENGTH + 2;
+	/**
+	 * The maximum length of a DeleteNode String.
+	 */
+	private static final int ENTRY_LENGTH = Long.toString(Long.MAX_VALUE)
+		.length()
+		+ Action.DELETE_NODE.length()
+		+ Constants.UUID_FORMATTED_LENGTH
+		+ 2;
 
-      /**
-       * The uuid of the node to remove.
-       */
-      private final String uuid;
+	/**
+	 * The uuid of the node to remove.
+	 */
+	private final String uuid;
 
-      /**
-       * Creates a new DeleteNode action.
-       *
-       * @param transactionId the id of the transaction that executes this action.
-       * @param uuid the uuid of the node to delete.
-       */
-      DeleteNode(long transactionId, String uuid)
-      {
-         super(transactionId, Action.TYPE_DELETE_NODE);
-         this.uuid = uuid;
-      }
+	/**
+	 * Creates a new DeleteNode action.
+	 * 
+	 * @param transactionId
+	 *            the id of the transaction that executes this action.
+	 * @param uuid
+	 *            the uuid of the node to delete.
+	 */
+	DeleteNode(long transactionId, String uuid) {
+	    super(transactionId, Action.TYPE_DELETE_NODE);
+	    this.uuid = uuid;
+	}
 
-      /**
-       * Creates a new DeleteNode action.
-       *
-       * @param transactionId the id of the transaction that executes this
-       *                      action.
-       * @param arguments     the uuid of the node to delete.
-       * @return the DeleteNode action.
-       * @throws IllegalArgumentException if the arguments are malformed. Not a
-       *                                  UUID.
-       */
-      static DeleteNode fromString(long transactionId, String arguments)
-      {
-         // simple length check
-         if (arguments.length() != Constants.UUID_FORMATTED_LENGTH)
-         {
-            throw new IllegalArgumentException("arguments is not a uuid");
-         }
-         return new DeleteNode(transactionId, arguments);
-      }
+	/**
+	 * Creates a new DeleteNode action.
+	 * 
+	 * @param transactionId
+	 *            the id of the transaction that executes this action.
+	 * @param arguments
+	 *            the uuid of the node to delete.
+	 * @return the DeleteNode action.
+	 * @throws IllegalArgumentException
+	 *             if the arguments are malformed. Not a UUID.
+	 */
+	static DeleteNode fromString(long transactionId, String arguments) {
+	    // simple length check
+	    if (arguments.length() != Constants.UUID_FORMATTED_LENGTH) {
+		throw new IllegalArgumentException("arguments is not a uuid");
+	    }
+	    return new DeleteNode(transactionId, arguments);
+	}
 
-      /**
-       * Deletes a node from the index.
-       *
-       * @inheritDoc
-       */
-      public void execute(MultiIndex index) throws IOException
-      {
-         String uuidString = uuid.toString();
-         // check if indexing queue is still working on
-         // this node from a previous update
-         Document doc = index.indexingQueue.removeDocument(uuidString);
-         if (doc != null)
-         {
-            Util.disposeDocument(doc);
-         }
-         Term idTerm = new Term(FieldNames.UUID, uuidString);
-         // if the document cannot be deleted from the volatile index
-         // delete it from one of the persistent indexes.
-         int num = index.volatileIndex.removeDocument(idTerm);
-         if (num == 0)
-         {
-            for (int i = index.indexes.size() - 1; i >= 0; i--)
-            {
-               // only look in registered indexes
-               PersistentIndex idx = (PersistentIndex)index.indexes.get(i);
-               if (index.indexNames.contains(idx.getName()))
-               {
-                  num = idx.removeDocument(idTerm);
-                  if (num > 0)
-                  {
-                     return;
-                  }
-               }
-            }
-         }
-      }
+	/**
+	 * Deletes a node from the index.
+	 * 
+	 * @inheritDoc
+	 */
+	public void execute(MultiIndex index) throws IOException {
+	    String uuidString = uuid.toString();
+	    // check if indexing queue is still working on
+	    // this node from a previous update
+	    Document doc = index.indexingQueue.removeDocument(uuidString);
+	    if (doc != null) {
+		Util.disposeDocument(doc);
+	    }
+	    Term idTerm = new Term(FieldNames.UUID, uuidString);
+	    // if the document cannot be deleted from the volatile index
+	    // delete it from one of the persistent indexes.
+	    int num = index.volatileIndex.removeDocument(idTerm);
+	    if (num == 0) {
+		for (int i = index.indexes.size() - 1; i >= 0; i--) {
+		    // only look in registered indexes
+		    PersistentIndex idx = (PersistentIndex) index.indexes
+			    .get(i);
+		    if (index.indexNames.contains(idx.getName())) {
+			num = idx.removeDocument(idTerm);
+			if (num > 0) {
+			    return;
+			}
+		    }
+		}
+	    }
+	}
 
-      /**
-       * @inheritDoc
-       */
-      public String toString()
-      {
-         StringBuffer logLine = new StringBuffer(ENTRY_LENGTH);
-         logLine.append(Long.toString(getTransactionId()));
-         logLine.append(' ');
-         logLine.append(Action.DELETE_NODE);
-         logLine.append(' ');
-         logLine.append(uuid);
-         return logLine.toString();
-      }
-   }
+	/**
+	 * @inheritDoc
+	 */
+	public String toString() {
+	    StringBuffer logLine = new StringBuffer(ENTRY_LENGTH);
+	    logLine.append(Long.toString(getTransactionId()));
+	    logLine.append(' ');
+	    logLine.append(Action.DELETE_NODE);
+	    logLine.append(' ');
+	    logLine.append(uuid);
+	    return logLine.toString();
+	}
+    }
 
-   /**
-    * Starts a transaction.
-    */
-   private static class Start extends Action
-   {
+    /**
+     * Starts a transaction.
+     */
+    private static class Start extends Action {
 
-      /**
-       * Creates a new Start transaction action.
-       *
-       * @param transactionId the id of the transaction that started.
-       */
-      Start(long transactionId)
-      {
-         super(transactionId, Action.TYPE_START);
-      }
+	/**
+	 * Creates a new Start transaction action.
+	 * 
+	 * @param transactionId
+	 *            the id of the transaction that started.
+	 */
+	Start(long transactionId) {
+	    super(transactionId, Action.TYPE_START);
+	}
 
-      /**
-       * Creates a new Start action.
-       *
-       * @param transactionId the id of the transaction that executes this
-       *                      action.
-       * @param arguments     ignored by this method.
-       * @return the Start action.
-       */
-      static Start fromString(long transactionId, String arguments)
-      {
-         return new Start(transactionId);
-      }
+	/**
+	 * Creates a new Start action.
+	 * 
+	 * @param transactionId
+	 *            the id of the transaction that executes this action.
+	 * @param arguments
+	 *            ignored by this method.
+	 * @return the Start action.
+	 */
+	static Start fromString(long transactionId, String arguments) {
+	    return new Start(transactionId);
+	}
 
-      /**
-       * Sets the current transaction id on <code>index</code>.
-       *
-       * @inheritDoc
-       */
-      public void execute(MultiIndex index) throws IOException
-      {
-         index.currentTransactionId = getTransactionId();
-      }
+	/**
+	 * Sets the current transaction id on <code>index</code>.
+	 * 
+	 * @inheritDoc
+	 */
+	public void execute(MultiIndex index) throws IOException {
+	    index.currentTransactionId = getTransactionId();
+	}
 
-      /**
-       * @inheritDoc
-       */
-      public String toString()
-      {
-         return Long.toString(getTransactionId()) + ' ' + Action.START;
-      }
-   }
+	/**
+	 * @inheritDoc
+	 */
+	public String toString() {
+	    return Long.toString(getTransactionId()) + ' ' + Action.START;
+	}
+    }
 
-   /**
-    * Commits the volatile index to disk.
-    */
-   private static class VolatileCommit extends Action
-   {
+    /**
+     * Commits the volatile index to disk.
+     */
+    private static class VolatileCommit extends Action {
 
-      /**
-       * The name of the target index to commit to.
-       */
-      private final String targetIndex;
+	/**
+	 * The name of the target index to commit to.
+	 */
+	private final String targetIndex;
 
-      /**
-       * Creates a new VolatileCommit action.
-       *
-       * @param transactionId the id of the transaction that executes this action.
-       */
-      VolatileCommit(long transactionId, String targetIndex)
-      {
-         super(transactionId, Action.TYPE_VOLATILE_COMMIT);
-         this.targetIndex = targetIndex;
-      }
+	/**
+	 * Creates a new VolatileCommit action.
+	 * 
+	 * @param transactionId
+	 *            the id of the transaction that executes this action.
+	 */
+	VolatileCommit(long transactionId, String targetIndex) {
+	    super(transactionId, Action.TYPE_VOLATILE_COMMIT);
+	    this.targetIndex = targetIndex;
+	}
 
-      /**
-       * Creates a new VolatileCommit action.
-       *
-       * @param transactionId the id of the transaction that executes this
-       *                      action.
-       * @param arguments     ignored by this implementation.
-       * @return the VolatileCommit action.
-       */
-      static VolatileCommit fromString(long transactionId, String arguments)
-      {
-         return new VolatileCommit(transactionId, arguments);
-      }
+	/**
+	 * Creates a new VolatileCommit action.
+	 * 
+	 * @param transactionId
+	 *            the id of the transaction that executes this action.
+	 * @param arguments
+	 *            ignored by this implementation.
+	 * @return the VolatileCommit action.
+	 */
+	static VolatileCommit fromString(long transactionId, String arguments) {
+	    return new VolatileCommit(transactionId, arguments);
+	}
 
-      /**
-       * Commits the volatile index to disk.
-       *
-       * @inheritDoc
-       */
-      public void execute(MultiIndex index) throws IOException
-      {
-         VolatileIndex volatileIndex = index.getVolatileIndex();
-         PersistentIndex persistentIndex = index.getOrCreateIndex(targetIndex);
-         persistentIndex.copyIndex(volatileIndex);
-         index.resetVolatileIndex();
-      }
+	/**
+	 * Commits the volatile index to disk.
+	 * 
+	 * @inheritDoc
+	 */
+	public void execute(MultiIndex index) throws IOException {
+	    VolatileIndex volatileIndex = index.getVolatileIndex();
+	    PersistentIndex persistentIndex = index
+		    .getOrCreateIndex(targetIndex);
+	    persistentIndex.copyIndex(volatileIndex);
+	    index.resetVolatileIndex();
+	}
 
-      /**
-       * @inheritDoc
-       */
-      public String toString()
-      {
-         StringBuffer logLine = new StringBuffer();
-         logLine.append(Long.toString(getTransactionId()));
-         logLine.append(' ');
-         logLine.append(Action.VOLATILE_COMMIT);
-         logLine.append(' ');
-         logLine.append(targetIndex);
-         return logLine.toString();
-      }
-   }
+	/**
+	 * @inheritDoc
+	 */
+	public String toString() {
+	    StringBuffer logLine = new StringBuffer();
+	    logLine.append(Long.toString(getTransactionId()));
+	    logLine.append(' ');
+	    logLine.append(Action.VOLATILE_COMMIT);
+	    logLine.append(' ');
+	    logLine.append(targetIndex);
+	    return logLine.toString();
+	}
+    }
 }

Modified: jcr/trunk/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/lucene/SearchIndex.java
===================================================================
--- jcr/trunk/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/lucene/SearchIndex.java	2009-10-01 15:37:53 UTC (rev 193)
+++ jcr/trunk/component/core/src/main/java/org/exoplatform/services/jcr/impl/core/query/lucene/SearchIndex.java	2009-10-02 08:25:49 UTC (rev 194)
@@ -38,7 +38,7 @@
 import org.exoplatform.container.configuration.ConfigurationManager;
 import org.exoplatform.services.document.DocumentReaderService;
 import org.exoplatform.services.jcr.config.QueryHandlerEntry;
-import org.exoplatform.services.jcr.config.QueryHandlerEntryWrapper;
+import org.exoplatform.services.jcr.config.RepositoryConfigurationException;
 import org.exoplatform.services.jcr.dataflow.ItemDataConsumer;
 import org.exoplatform.services.jcr.datamodel.ItemData;
 import org.exoplatform.services.jcr.datamodel.NodeData;
@@ -54,6 +54,7 @@
 import org.exoplatform.services.jcr.impl.core.query.ExecutableQuery;
 import org.exoplatform.services.jcr.impl.core.query.QueryHandler;
 import org.exoplatform.services.jcr.impl.core.query.QueryHandlerContext;
+import org.exoplatform.services.jcr.impl.core.query.SearchIndexConfigurationHelper;
 import org.exoplatform.services.jcr.impl.core.query.lucene.directory.DirectoryManager;
 import org.exoplatform.services.jcr.impl.core.query.lucene.directory.FSDirectoryManager;
 import org.slf4j.Logger;
@@ -379,7 +380,7 @@
      */
     private SynonymProvider synProvider;
 
-    private File indexDirectory;
+    // private File indexDirectory;
 
     /**
      * The configuration path for the synonym provider.
@@ -450,14 +451,14 @@
      */
     private boolean closed = false;
 
-    private QueryHandlerContext context;
+    // private QueryHandlerContext context;
 
     /**
      * Text extractor for extracting text content of binary properties.
      */
     private DocumentReaderService extractor;
 
-    private final QueryHandlerEntryWrapper queryHandlerConfig;
+    // private final QueryHandlerEntryWrapper queryHandlerConfig;
 
     /**
      * The ErrorLog of this <code>MultiIndex</code>. All changes that must be in
@@ -469,13 +470,20 @@
 
     /**
      * Working constructor.
+     * 
+     * @throws RepositoryConfigurationException
+     * @throws IOException
      */
     public SearchIndex(QueryHandlerEntry queryHandlerConfig,
-	    ConfigurationManager cfm) {
+	    ConfigurationManager cfm) throws IOException,
+	    RepositoryConfigurationException {
 	this.analyzer = new JcrStandartAnalyzer();
-	this.queryHandlerConfig = new QueryHandlerEntryWrapper(
-		queryHandlerConfig);
+	// this.queryHandlerConfig = new QueryHandlerEntryWrapper(
+	// queryHandlerConfig);
 	this.cfm = cfm;
+	SearchIndexConfigurationHelper searchIndexConfigurationHelper = new SearchIndexConfigurationHelper(
+		this);
+	searchIndexConfigurationHelper.init(queryHandlerConfig);
     }
 
     /**
@@ -483,23 +491,11 @@
      */
     public SearchIndex() {
 	this.analyzer = new JcrStandartAnalyzer();
-	this.queryHandlerConfig = null;
+	// this.queryHandlerConfig = null;
 	this.cfm = null;
     }
 
     /**
-     * Initializes this query handler by setting all properties in this class
-     * with appropriate parameter values.
-     * 
-     * @param context
-     *            the context for this query handler.
-     */
-    public final void setContext(QueryHandlerContext queryHandlerContext)
-	    throws IOException {
-	this.context = queryHandlerContext;
-    }
-
-    /**
      * Initializes this <code>QueryHandler</code>. This implementation requires
      * that a path parameter is set in the configuration. If this condition is
      * not met, a <code>IOException</code> is thrown.
@@ -510,25 +506,25 @@
      */
     public void doInit() throws IOException, RepositoryException {
 	QueryHandlerContext context = getContext();
-	// if (path == null)
-	// {
-	// throw new
-	// IOException("SearchIndex requires 'path' parameter in configuration!");
-	// }
+	setPath(context.getIndexDirectory());
+	if (path == null) {
+	    throw new IOException(
+		    "SearchIndex requires 'path' parameter in configuration!");
+	}
 
-	String indexDir = context.getIndexDirectory();
-	if (indexDir != null) {
-	    indexDir = indexDir.replace("${java.io.tmpdir}", System
-		    .getProperty("java.io.tmpdir"));
-	    indexDirectory = new File(indexDir);
+	File indexDirectory;
+	if (path != null) {
+
+	    indexDirectory = new File(path);
 	    if (!indexDirectory.exists())
 		if (!indexDirectory.mkdirs())
 		    throw new RepositoryException("fail to create index dir "
-			    + indexDir);
+			    + path);
 	} else {
 	    throw new IOException(
 		    "SearchIndex requires 'path' parameter in configuration!");
 	}
+	log.info("path=" + path);
 
 	// Set excludedIDs = new HashSet();
 	// if (context.getExcludedNodeId() != null)
@@ -537,7 +533,7 @@
 	// }
 
 	extractor = context.getExtractor();
-	synProvider = queryHandlerConfig.createSynonymProvider(cfm);
+	// synProvider = queryHandlerConfig.createSynonymProvider(cfm);
 	directoryManager = createDirectoryManager();
 
 	if (context.getParentHandler() instanceof SearchIndex) {
@@ -566,22 +562,10 @@
 	indexingConfig = createIndexingConfiguration(nsMappings);
 	analyzer.setIndexingConfig(indexingConfig);
 
-	index = new MultiIndex(this, new HashSet<String>());
-	if (index.numDocs() == 0) {
-	    // Path rootPath;
-	    // if (excludedIDs.isEmpty())
-	    // {
-	    // // this is the index for jcr:system
-	    // rootPath = JCR_SYSTEM_PATH;
-	    // }
-	    // else
-	    // {
-	    // rootPath = ROOT_PATH;
-	    // }
-	    // index.createInitialIndex(context.getItemStateManager(),
-	    // context.getRootId(), rootPath);
-	    index.createInitialIndex(context.getItemStateManager(), context
-		    .getRootNodeIdentifer());
+	index = new MultiIndex(this, context.getIndexingTree());
+	if (index.numDocs() == 0 && context.isCreateInitialIndex()) {
+
+	    index.createInitialIndex(context.getItemStateManager());
 	}
 	if (consistencyCheckEnabled
 		&& (index.getRedoLogApplied() || forceConsistencyCheck)) {
@@ -1797,9 +1781,13 @@
      * 
      * @param path
      *            the location of the search index.
+     * @throws IOException
      */
     public void setPath(String path) {
-	indexDirectory = new File(path);
+
+	this.path = path.replace("${java.io.tmpdir}", System
+		.getProperty("java.io.tmpdir"));
+
     }
 
     /**
@@ -1809,7 +1797,7 @@
      * @return the location of the search index.
      */
     public String getPath() {
-	return indexDirectory.getAbsolutePath();
+	return path;
     }
 
     /**

Modified: jcr/trunk/component/core/src/test/java/org/exoplatform/services/jcr/api/core/query/TestAll.java
===================================================================
--- jcr/trunk/component/core/src/test/java/org/exoplatform/services/jcr/api/core/query/TestAll.java	2009-10-01 15:37:53 UTC (rev 193)
+++ jcr/trunk/component/core/src/test/java/org/exoplatform/services/jcr/api/core/query/TestAll.java	2009-10-02 08:25:49 UTC (rev 194)
@@ -25,43 +25,41 @@
 /**
  * Test suite that includes all testcases for the Search module.
  */
-public class TestAll extends TestCase
-{
+public class TestAll extends TestCase {
 
-   /**
-    * Returns a <code>Test</code> suite that executes all tests inside this
-    * package.
-    *
-    * @return a <code>Test</code> suite that executes all tests inside this
-    *         package.
-    */
-   public static Test suite()
-   {
-      TestSuite suite = new TestSuite("Search tests");
+    /**
+     * Returns a <code>Test</code> suite that executes all tests inside this
+     * package.
+     * 
+     * @return a <code>Test</code> suite that executes all tests inside this
+     *         package.
+     */
+    public static Test suite() {
+	TestSuite suite = new TestSuite("Search tests");
 
-      suite.addTestSuite(SimpleQueryTest.class);
-      suite.addTestSuite(FulltextQueryTest.class);
-      suite.addTestSuite(SelectClauseTest.class);
-      suite.addTestSuite(SQLTest.class);
-      suite.addTestSuite(OrderByTest.class);
-      suite.addTestSuite(XPathAxisTest.class);
-      suite.addTestSuite(SkipDeletedNodesTest.class);
-      suite.addTestSuite(SkipDeniedNodesTest.class);
-      suite.addTestSuite(MixinTest.class);
-      suite.addTestSuite(DerefTest.class);
-      suite.addTestSuite(VersionStoreQueryTest.class);
-      suite.addTestSuite(UpperLowerCaseQueryTest.class);
-      suite.addTestSuite(ChildAxisQueryTest.class);
-      suite.addTestSuite(QueryResultTest.class);
-      suite.addTestSuite(FnNameQueryTest.class);
-      suite.addTestSuite(PathQueryNodeTest.class);
-      suite.addTestSuite(SynonymProviderTest.class);
-      suite.addTestSuite(ArrayHitsTest.class);
-      //suite.addTestSuite(ExcerptTest.class);
-      suite.addTestSuite(IndexFormatVersionTest.class);
-      //suite.addTestSuite(IndexingRuleTest.class);
-      suite.addTestSuite(ShareableNodeTest.class);
+	suite.addTestSuite(SimpleQueryTest.class);
+	suite.addTestSuite(FulltextQueryTest.class);
+	suite.addTestSuite(SelectClauseTest.class);
+	suite.addTestSuite(SQLTest.class);
+	suite.addTestSuite(OrderByTest.class);
+	suite.addTestSuite(XPathAxisTest.class);
+	suite.addTestSuite(SkipDeletedNodesTest.class);
+	suite.addTestSuite(SkipDeniedNodesTest.class);
+	suite.addTestSuite(MixinTest.class);
+	suite.addTestSuite(DerefTest.class);
+	suite.addTestSuite(VersionStoreQueryTest.class);
+	suite.addTestSuite(UpperLowerCaseQueryTest.class);
+	suite.addTestSuite(ChildAxisQueryTest.class);
+	suite.addTestSuite(QueryResultTest.class);
+	suite.addTestSuite(FnNameQueryTest.class);
+	suite.addTestSuite(PathQueryNodeTest.class);
+	// suite.addTestSuite(SynonymProviderTest.class);
+	suite.addTestSuite(ArrayHitsTest.class);
+	// suite.addTestSuite(ExcerptTest.class);
+	suite.addTestSuite(IndexFormatVersionTest.class);
+	// suite.addTestSuite(IndexingRuleTest.class);
+	suite.addTestSuite(ShareableNodeTest.class);
 
-      return suite;
-   }
+	return suite;
+    }
 }

Modified: jcr/trunk/component/core/src/test/java/org/exoplatform/services/jcr/api/core/query/lucene/directory/DirectoryManagerTest.java
===================================================================
--- jcr/trunk/component/core/src/test/java/org/exoplatform/services/jcr/api/core/query/lucene/directory/DirectoryManagerTest.java	2009-10-01 15:37:53 UTC (rev 193)
+++ jcr/trunk/component/core/src/test/java/org/exoplatform/services/jcr/api/core/query/lucene/directory/DirectoryManagerTest.java	2009-10-02 08:25:49 UTC (rev 194)
@@ -16,19 +16,18 @@
  */
 package org.exoplatform.services.jcr.api.core.query.lucene.directory;
 
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Iterator;
-import java.io.File;
+import junit.framework.TestCase;
 
-
 import org.apache.lucene.store.Directory;
 import org.exoplatform.services.jcr.impl.core.query.lucene.SearchIndex;
 import org.exoplatform.services.jcr.impl.core.query.lucene.directory.DirectoryManager;
 import org.exoplatform.services.jcr.impl.core.query.lucene.directory.FSDirectoryManager;
 import org.exoplatform.services.jcr.impl.core.query.lucene.directory.RAMDirectoryManager;
 
-import junit.framework.TestCase;
+import java.io.File;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Iterator;
 
 /**
  * <code>DirectoryManagerTest</code> performs tests on directory manager
@@ -36,8 +35,9 @@
  */
 public class DirectoryManagerTest extends TestCase {
 
-    private static final Collection IMPLEMENTATIONS = Arrays.asList(
-            new Class[]{FSDirectoryManager.class, RAMDirectoryManager.class});
+    private static final Collection IMPLEMENTATIONS = Arrays
+	    .asList(new Class[] { FSDirectoryManager.class,
+		    RAMDirectoryManager.class });
 
     private static final SearchIndex INDEX = new SearchIndex();
 
@@ -46,69 +46,75 @@
     private static final String RENAMED = "renamed";
 
     static {
-        INDEX.setPath(new File(new File("target"), "directory-factory-test").getAbsolutePath());
+	INDEX.setPath(new File(new File("target"), "directory-factory-test")
+		.getAbsolutePath());
     }
 
     protected void tearDown() throws Exception {
-        new File(INDEX.getPath(), TEST).delete();
-        new File(INDEX.getPath(), RENAMED).delete();
+	new File(INDEX.getPath(), TEST).delete();
+	new File(INDEX.getPath(), RENAMED).delete();
     }
 
     public void testHasDirectory() throws Exception {
-        execute(new Callable(){
-            public void call(DirectoryManager directoryManager) throws Exception {
-                Directory dir = directoryManager.getDirectory(TEST);
-                assertTrue(directoryManager.hasDirectory(TEST));
-                dir.close();
-            }
-        });
+	execute(new Callable() {
+	    public void call(DirectoryManager directoryManager)
+		    throws Exception {
+		Directory dir = directoryManager.getDirectory(TEST);
+		assertTrue(directoryManager.hasDirectory(TEST));
+		dir.close();
+	    }
+	});
     }
 
     public void testDelete() throws Exception {
-        execute(new Callable(){
-            public void call(DirectoryManager directoryManager) throws Exception {
-                directoryManager.getDirectory(TEST).close();
-                directoryManager.delete(TEST);
-                assertFalse(directoryManager.hasDirectory(TEST));
-            }
-        });
+	execute(new Callable() {
+	    public void call(DirectoryManager directoryManager)
+		    throws Exception {
+		directoryManager.getDirectory(TEST).close();
+		directoryManager.delete(TEST);
+		assertFalse(directoryManager.hasDirectory(TEST));
+	    }
+	});
     }
 
     public void testGetDirectoryNames() throws Exception {
-        execute(new Callable(){
-            public void call(DirectoryManager directoryManager) throws Exception {
-                directoryManager.getDirectory(TEST).close();
-                assertTrue(Arrays.asList(directoryManager.getDirectoryNames()).contains(TEST));
-            }
-        });
+	execute(new Callable() {
+	    public void call(DirectoryManager directoryManager)
+		    throws Exception {
+		directoryManager.getDirectory(TEST).close();
+		assertTrue(Arrays.asList(directoryManager.getDirectoryNames())
+			.contains(TEST));
+	    }
+	});
     }
 
     public void testRename() throws Exception {
-        execute(new Callable(){
-            public void call(DirectoryManager directoryManager) throws Exception {
-                directoryManager.getDirectory(TEST).close();
-                directoryManager.rename(TEST, RENAMED);
-                assertTrue(directoryManager.hasDirectory(RENAMED));
-                assertFalse(directoryManager.hasDirectory(TEST));
-            }
-        });
+	execute(new Callable() {
+	    public void call(DirectoryManager directoryManager)
+		    throws Exception {
+		directoryManager.getDirectory(TEST).close();
+		directoryManager.rename(TEST, RENAMED);
+		assertTrue(directoryManager.hasDirectory(RENAMED));
+		assertFalse(directoryManager.hasDirectory(TEST));
+	    }
+	});
     }
 
     private void execute(Callable callable) throws Exception {
-        for (Iterator it = IMPLEMENTATIONS.iterator(); it.hasNext(); ) {
-            Class clazz = (Class) it.next();
-            DirectoryManager dirMgr = (DirectoryManager) clazz.newInstance();
-            dirMgr.init(INDEX);
-            try {
-                callable.call(dirMgr);
-            } finally {
-                dirMgr.dispose();
-            }
-        }
+	for (Iterator it = IMPLEMENTATIONS.iterator(); it.hasNext();) {
+	    Class clazz = (Class) it.next();
+	    DirectoryManager dirMgr = (DirectoryManager) clazz.newInstance();
+	    dirMgr.init(INDEX);
+	    try {
+		callable.call(dirMgr);
+	    } finally {
+		dirMgr.dispose();
+	    }
+	}
     }
 
     private interface Callable {
 
-        public void call(DirectoryManager directoryManager) throws Exception;
+	public void call(DirectoryManager directoryManager) throws Exception;
     }
 }



More information about the exo-jcr-commits mailing list